2016-03-11 15:51:35 +00:00
#!/usr/bin/env python
#
# Copyright (C) 2016 GNS3 Technologies Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
2016-04-26 15:10:33 +00:00
import os
2016-04-21 10:14:09 +00:00
import re
2016-03-11 15:51:35 +00:00
import uuid
2017-03-20 18:14:07 +00:00
import html
2016-03-11 15:51:35 +00:00
import asyncio
2016-08-23 21:33:19 +00:00
import aiohttp
2016-03-11 15:51:35 +00:00
2018-01-29 11:13:20 +00:00
from gns3server . utils . asyncio import asyncio_ensure_future
2016-04-26 15:10:33 +00:00
import logging
log = logging . getLogger ( __name__ )
2017-06-30 08:22:30 +00:00
FILTERS = [
{
" type " : " frequency_drop " ,
" name " : " Frequency drop " ,
" description " : " It will drop everything with a -1 frequency, drop every Nth packet with a positive frequency, or drop nothing " ,
" parameters " : [
{
" name " : " Frequency " ,
" minimum " : - 1 ,
" maximum " : 32767 ,
2017-07-11 15:30:29 +00:00
" type " : " int " ,
2017-06-30 08:22:30 +00:00
" unit " : " th packet "
}
]
} ,
{
" type " : " packet_loss " ,
" name " : " Packet loss " ,
" description " : " The percentage represents the chance for a packet to be lost " ,
" parameters " : [
{
2017-07-06 09:53:05 +00:00
" name " : " Chance " ,
2017-06-30 08:22:30 +00:00
" minimum " : 0 ,
" maximum " : 100 ,
2017-07-11 15:30:29 +00:00
" type " : " int " ,
2017-06-30 08:22:30 +00:00
" unit " : " % "
}
]
} ,
{
" type " : " delay " ,
" name " : " Delay " ,
" description " : " Delay packets in milliseconds. You can add jitter in milliseconds (+/-) of the delay " ,
" parameters " : [
{
2017-07-06 09:53:05 +00:00
" name " : " Latency " ,
2017-06-30 08:22:30 +00:00
" minimum " : 0 ,
" maximum " : 32767 ,
2017-07-11 15:30:29 +00:00
" unit " : " ms " ,
" type " : " int "
2017-06-30 08:22:30 +00:00
} ,
{
2017-07-06 09:53:05 +00:00
" name " : " Jitter (-/+) " ,
2017-06-30 08:22:30 +00:00
" minimum " : 0 ,
" maximum " : 32767 ,
2017-07-11 15:30:29 +00:00
" unit " : " ms " ,
" type " : " int "
2017-06-30 08:22:30 +00:00
}
]
} ,
{
" type " : " corrupt " ,
" name " : " Corrupt " ,
2017-07-05 14:36:39 +00:00
" description " : " The percentage represents the chance for a packet to be corrupted " ,
2017-06-30 08:22:30 +00:00
" parameters " : [
{
2017-07-06 09:53:05 +00:00
" name " : " Chance " ,
2017-06-30 08:22:30 +00:00
" minimum " : 0 ,
" maximum " : 100 ,
2017-07-11 15:30:29 +00:00
" unit " : " % " ,
" type " : " int "
}
]
} ,
{
" type " : " bpf " ,
2017-07-12 09:21:11 +00:00
" name " : " Berkeley Packet Filter (BPF) " ,
" description " : " This filter will drop any packet matching a BPF expression. Put one expression per line " ,
2017-07-11 15:30:29 +00:00
" parameters " : [
{
2017-07-12 09:21:11 +00:00
" name " : " Filters " ,
2017-07-11 15:30:29 +00:00
" type " : " text "
2017-06-30 08:22:30 +00:00
}
]
}
]
2016-03-11 15:51:35 +00:00
class Link :
2016-06-03 03:32:46 +00:00
"""
Base class for links .
"""
2016-03-11 16:02:50 +00:00
2016-06-14 14:57:13 +00:00
def __init__ ( self , project , link_id = None ) :
2016-06-03 03:32:46 +00:00
2016-06-14 14:57:13 +00:00
if link_id :
self . _id = link_id
else :
self . _id = str ( uuid . uuid4 ( ) )
2016-05-11 17:35:36 +00:00
self . _nodes = [ ]
2016-03-11 19:13:52 +00:00
self . _project = project
2016-04-21 14:11:42 +00:00
self . _capturing = False
2016-04-26 15:10:33 +00:00
self . _capture_file_name = None
2016-05-14 00:48:10 +00:00
self . _streaming_pcap = None
2016-09-02 12:39:38 +00:00
self . _created = False
2016-09-15 12:51:40 +00:00
self . _link_type = " ethernet "
2018-03-19 09:26:12 +00:00
self . _suspended = False
2017-06-30 08:22:30 +00:00
self . _filters = { }
@property
def filters ( self ) :
"""
Get an array of filters
"""
return self . _filters
2017-09-14 10:57:58 +00:00
@property
def nodes ( self ) :
"""
Get the current nodes attached to this link
"""
return self . _nodes
2017-07-19 15:30:25 +00:00
def get_active_filters ( self ) :
"""
Return the active filters .
2017-07-20 04:11:44 +00:00
Filters are overridden if the link is suspended .
2017-07-19 15:30:25 +00:00
"""
2018-03-19 09:26:12 +00:00
if self . _suspended :
# this is to allow all node types to support suspend link
2017-07-19 15:30:25 +00:00
return { " frequency_drop " : [ - 1 ] }
return self . _filters
2017-06-30 08:22:30 +00:00
@asyncio.coroutine
def update_filters ( self , filters ) :
"""
Modify the filters list .
Filter with value 0 will be dropped because not active
"""
new_filters = { }
for ( filter , values ) in filters . items ( ) :
2017-07-11 15:30:29 +00:00
new_values = [ ]
for value in values :
if isinstance ( value , str ) :
new_values . append ( value . strip ( " \n " ) )
else :
new_values . append ( int ( value ) )
values = new_values
if len ( values ) != 0 and values [ 0 ] != 0 and values [ 0 ] != ' ' :
2017-06-30 08:22:30 +00:00
new_filters [ filter ] = values
if new_filters != self . filters :
self . _filters = new_filters
if self . _created :
yield from self . update ( )
2018-03-12 06:38:50 +00:00
self . _project . controller . notification . emit ( " link.updated " , self . __json__ ( ) )
self . _project . dump ( )
2016-09-02 12:39:38 +00:00
2017-07-19 15:30:25 +00:00
@asyncio.coroutine
def update_suspend ( self , value ) :
2018-03-19 09:26:12 +00:00
if value != self . _suspended :
self . _suspended = value
2017-07-19 15:30:25 +00:00
yield from self . update ( )
2018-03-12 06:38:50 +00:00
self . _project . controller . notification . emit ( " link.updated " , self . __json__ ( ) )
self . _project . dump ( )
2017-07-19 15:30:25 +00:00
2016-09-02 12:39:38 +00:00
@property
def created ( self ) :
"""
: returns : True the link has been created on the computes
"""
return self . _created
2016-03-11 15:51:35 +00:00
@asyncio.coroutine
2017-02-06 10:40:00 +00:00
def add_node ( self , node , adapter_number , port_number , label = None , dump = True ) :
2016-03-11 15:51:35 +00:00
"""
2016-05-11 17:35:36 +00:00
Add a node to the link
2017-02-06 10:40:00 +00:00
: param dump : Dump project on disk
2016-03-11 15:51:35 +00:00
"""
2016-07-01 15:38:32 +00:00
2016-09-15 12:51:40 +00:00
port = node . get_port ( adapter_number , port_number )
2018-03-12 06:38:50 +00:00
if port is None :
raise aiohttp . web . HTTPNotFound ( text = " Port {} / {} for {} not found " . format ( adapter_number , port_number , node . name ) )
2017-01-06 09:29:56 +00:00
if port . link is not None :
raise aiohttp . web . HTTPConflict ( text = " Port is already used " )
2016-09-15 12:51:40 +00:00
self . _link_type = port . link_type
2016-08-23 21:33:19 +00:00
for other_node in self . _nodes :
2016-10-03 10:31:01 +00:00
if other_node [ " node " ] == node :
raise aiohttp . web . HTTPConflict ( text = " Cannot connect to itself " )
2016-08-23 21:33:19 +00:00
if node . node_type in [ " nat " , " cloud " ] :
if other_node [ " node " ] . node_type in [ " nat " , " cloud " ] :
raise aiohttp . web . HTTPConflict ( text = " It ' s not allowed to connect a {} to a {} " . format ( other_node [ " node " ] . node_type , node . node_type ) )
2016-09-15 12:51:40 +00:00
# Check if user is not connecting serial => ethernet
other_port = other_node [ " node " ] . get_port ( other_node [ " adapter_number " ] , other_node [ " port_number " ] )
2018-03-12 06:38:50 +00:00
if other_port is None :
raise aiohttp . web . HTTPNotFound ( text = " Port {} / {} for {} not found " . format ( other_node [ " adapter_number " ] , other_node [ " port_number " ] , other_node [ " node " ] . name ) )
2016-09-15 12:51:40 +00:00
if port . link_type != other_port . link_type :
raise aiohttp . web . HTTPConflict ( text = " It ' s not allowed to connect a {} to a {} " . format ( other_port . link_type , port . link_type ) )
2016-07-01 15:38:32 +00:00
if label is None :
label = {
" x " : - 10 ,
" y " : - 10 ,
2016-07-01 17:54:44 +00:00
" rotation " : 0 ,
2017-03-20 18:14:07 +00:00
" text " : html . escape ( " {} / {} " . format ( adapter_number , port_number ) ) ,
2016-07-01 15:38:32 +00:00
" style " : " font-size: 10; font-style: Verdana "
}
2016-05-11 17:35:36 +00:00
self . _nodes . append ( {
" node " : node ,
2016-03-11 15:51:35 +00:00
" adapter_number " : adapter_number ,
2016-07-01 15:38:32 +00:00
" port_number " : port_number ,
2017-01-06 09:29:56 +00:00
" port " : port ,
2016-07-01 15:38:32 +00:00
" label " : label
2016-03-11 15:51:35 +00:00
} )
2016-06-03 03:32:46 +00:00
2016-05-18 16:37:18 +00:00
if len ( self . _nodes ) == 2 :
2016-09-02 12:39:38 +00:00
yield from self . create ( )
2016-11-22 15:05:00 +00:00
for n in self . _nodes :
n [ " node " ] . add_link ( self )
2017-01-06 09:29:56 +00:00
n [ " port " ] . link = self
2016-09-02 12:39:38 +00:00
self . _created = True
2016-05-18 16:37:18 +00:00
self . _project . controller . notification . emit ( " link.created " , self . __json__ ( ) )
2016-07-01 15:38:32 +00:00
2017-02-06 10:40:00 +00:00
if dump :
self . _project . dump ( )
2016-07-01 15:38:32 +00:00
@asyncio.coroutine
2016-07-01 19:56:42 +00:00
def update_nodes ( self , nodes ) :
for node_data in nodes :
node = self . _project . get_node ( node_data [ " node_id " ] )
for port in self . _nodes :
if port [ " node " ] == node :
label = node_data . get ( " label " )
if label :
port [ " label " ] = label
2016-07-01 15:38:32 +00:00
self . _project . controller . notification . emit ( " link.updated " , self . __json__ ( ) )
2016-06-15 13:12:38 +00:00
self . _project . dump ( )
2016-03-11 15:51:35 +00:00
2016-03-14 16:40:27 +00:00
@asyncio.coroutine
def create ( self ) :
"""
Create the link
"""
2016-06-03 03:32:46 +00:00
2016-03-18 15:55:54 +00:00
raise NotImplementedError
2016-03-14 16:40:27 +00:00
2017-06-30 08:22:30 +00:00
@asyncio.coroutine
def update ( self ) :
"""
Update a link
"""
raise NotImplementedError
2016-03-14 16:40:27 +00:00
@asyncio.coroutine
def delete ( self ) :
"""
Delete the link
"""
2017-01-06 09:29:56 +00:00
for n in self . _nodes :
# It could be different of self if we rollback an already existing link
if n [ " port " ] . link == self :
n [ " port " ] . link = None
n [ " node " ] . remove_link ( self )
2016-03-14 16:40:27 +00:00
2016-04-21 10:14:09 +00:00
@asyncio.coroutine
2016-04-26 15:10:33 +00:00
def start_capture ( self , data_link_type = " DLT_EN10MB " , capture_file_name = None ) :
2016-04-21 10:14:09 +00:00
"""
Start capture on the link
: returns : Capture object
"""
2016-06-03 03:32:46 +00:00
2016-04-26 15:10:33 +00:00
self . _capturing = True
self . _capture_file_name = capture_file_name
2018-01-29 11:13:20 +00:00
self . _streaming_pcap = asyncio_ensure_future ( self . _start_streaming_pcap ( ) )
2016-05-18 16:37:18 +00:00
self . _project . controller . notification . emit ( " link.updated " , self . __json__ ( ) )
2016-04-26 15:10:33 +00:00
@asyncio.coroutine
def _start_streaming_pcap ( self ) :
"""
2016-05-14 00:48:10 +00:00
Dump a pcap file on disk
2016-04-26 15:10:33 +00:00
"""
2016-06-03 03:32:46 +00:00
2018-03-12 06:38:50 +00:00
if os . path . exists ( self . capture_file_path ) :
try :
os . remove ( self . capture_file_path )
except OSError as e :
raise aiohttp . web . HTTPConflict ( text = " Could not delete old capture file ' {} ' : {} " . format ( self . capture_file_path , e ) )
2017-09-01 10:10:24 +00:00
try :
stream_content = yield from self . read_pcap_from_source ( )
except aiohttp . web . HTTPException as e :
2017-12-07 18:28:01 +00:00
error_msg = " Could not stream PCAP file: error {} : {} " . format ( e . status , e . text )
log . error ( error_msg )
2017-09-01 10:10:24 +00:00
self . _capturing = False
2017-12-07 18:28:01 +00:00
self . _project . notification . emit ( " log.error " , { " message " : error_msg } )
2017-09-01 10:10:24 +00:00
self . _project . controller . notification . emit ( " link.updated " , self . __json__ ( ) )
2017-12-07 18:28:01 +00:00
2016-08-19 09:05:54 +00:00
with stream_content as stream :
2018-03-12 06:38:50 +00:00
try :
with open ( self . capture_file_path , " wb " ) as f :
while self . _capturing :
# We read 1 bytes by 1 otherwise the remaining data is not read if the traffic stops
data = yield from stream . read ( 1 )
if data :
f . write ( data )
# Flush to disk otherwise the live is not really live
f . flush ( )
else :
break
except OSError as e :
raise aiohttp . web . HTTPConflict ( text = " Could not write capture file ' {} ' : {} " . format ( self . capture_file_path , e ) )
2016-04-21 10:14:09 +00:00
@asyncio.coroutine
def stop_capture ( self ) :
"""
Stop capture on the link
"""
2016-06-03 03:32:46 +00:00
2016-04-26 15:10:33 +00:00
self . _capturing = False
2016-05-18 16:37:18 +00:00
self . _project . controller . notification . emit ( " link.updated " , self . __json__ ( ) )
2016-04-22 14:22:03 +00:00
@asyncio.coroutine
2016-06-03 03:32:46 +00:00
def _read_pcap_from_source ( self ) :
2016-04-22 14:22:03 +00:00
"""
2016-05-14 00:48:10 +00:00
Return a FileStream of the Pcap from the compute server
2016-04-22 14:22:03 +00:00
"""
2016-06-03 03:32:46 +00:00
2016-04-22 14:22:03 +00:00
raise NotImplementedError
2016-11-22 15:05:00 +00:00
@asyncio.coroutine
def node_updated ( self , node ) :
"""
Called when a node member of the link is updated
"""
raise NotImplementedError
2016-04-26 15:10:33 +00:00
def default_capture_file_name ( self ) :
2016-04-21 10:14:09 +00:00
"""
: returns : File name for a capture on this link
"""
2016-06-03 03:32:46 +00:00
2016-05-14 00:48:10 +00:00
capture_file_name = " {} _ {} - {} _to_ {} _ {} - {} " . format ( self . _nodes [ 0 ] [ " node " ] . name ,
self . _nodes [ 0 ] [ " adapter_number " ] ,
self . _nodes [ 0 ] [ " port_number " ] ,
self . _nodes [ 1 ] [ " node " ] . name ,
self . _nodes [ 1 ] [ " adapter_number " ] ,
self . _nodes [ 1 ] [ " port_number " ] )
2016-04-21 10:14:09 +00:00
return re . sub ( " [^0-9A-Za-z_-] " , " " , capture_file_name ) + " .pcap "
2016-03-11 15:51:35 +00:00
@property
def id ( self ) :
return self . _id
2016-06-14 14:57:13 +00:00
@property
def nodes ( self ) :
2016-08-19 09:20:56 +00:00
return [ node [ ' node ' ] for node in self . _nodes ]
2016-06-14 14:57:13 +00:00
2016-04-21 14:11:42 +00:00
@property
def capturing ( self ) :
return self . _capturing
2016-04-26 15:10:33 +00:00
@property
def capture_file_path ( self ) :
"""
Get the path of the capture
"""
2016-06-03 03:32:46 +00:00
2016-04-26 15:10:33 +00:00
if self . _capture_file_name :
return os . path . join ( self . _project . captures_directory , self . _capture_file_name )
else :
return None
2017-06-30 08:22:30 +00:00
def available_filters ( self ) :
"""
Return the list of filters compatible with this link
: returns : Array of filters
"""
filter_node = self . _get_filter_node ( )
if filter_node :
return FILTERS
return [ ]
def _get_filter_node ( self ) :
"""
Return the node where the filter will run
: returns : None if no node support filtering else the node
"""
for node in self . _nodes :
2017-07-17 09:21:54 +00:00
if node [ " node " ] . node_type in ( ' vpcs ' ,
2017-07-18 16:04:03 +00:00
' vmware ' ,
2017-07-17 09:21:54 +00:00
' dynamips ' ,
' qemu ' ,
2017-07-18 07:24:36 +00:00
' iou ' ,
2017-07-17 12:22:05 +00:00
' cloud ' ,
' nat ' ,
2017-07-18 12:59:47 +00:00
' virtualbox ' ,
2017-07-17 12:22:05 +00:00
' docker ' ) :
2017-06-30 08:22:30 +00:00
return node [ " node " ]
return None
2016-07-05 14:07:05 +00:00
def __eq__ ( self , other ) :
if not isinstance ( other , Link ) :
return False
return self . id == other . id
2016-11-22 15:05:00 +00:00
def __hash__ ( self ) :
return hash ( self . _id )
2017-07-18 07:24:36 +00:00
def __json__ ( self , topology_dump = False ) :
2016-06-15 13:12:38 +00:00
"""
: param topology_dump : Filter to keep only properties require for saving on disk
"""
2017-07-18 07:24:36 +00:00
res = [ ]
2016-05-11 17:35:36 +00:00
for side in self . _nodes :
2016-03-11 15:51:35 +00:00
res . append ( {
2016-05-11 17:35:36 +00:00
" node_id " : side [ " node " ] . id ,
2016-03-11 15:51:35 +00:00
" adapter_number " : side [ " adapter_number " ] ,
2016-07-01 15:38:32 +00:00
" port_number " : side [ " port_number " ] ,
" label " : side [ " label " ]
2016-03-11 15:51:35 +00:00
} )
2016-06-15 13:12:38 +00:00
if topology_dump :
return {
" nodes " : res ,
2017-06-30 08:22:30 +00:00
" link_id " : self . _id ,
2017-07-19 15:30:25 +00:00
" filters " : self . _filters ,
2018-03-19 09:26:12 +00:00
" suspend " : self . _suspended
2016-06-15 13:12:38 +00:00
}
2016-04-26 15:10:33 +00:00
return {
2016-06-03 03:32:46 +00:00
" nodes " : res ,
" link_id " : self . _id ,
2016-05-18 16:37:18 +00:00
" project_id " : self . _project . id ,
2016-04-26 15:10:33 +00:00
" capturing " : self . _capturing ,
2016-04-26 15:36:24 +00:00
" capture_file_name " : self . _capture_file_name ,
2016-09-15 12:51:40 +00:00
" capture_file_path " : self . capture_file_path ,
2017-06-30 08:22:30 +00:00
" link_type " : self . _link_type ,
2017-07-19 15:30:25 +00:00
" filters " : self . _filters ,
2018-03-19 09:26:12 +00:00
" suspend " : self . _suspended
2016-04-26 15:10:33 +00:00
}