2016-03-11 15:51:35 +00:00
|
|
|
#!/usr/bin/env python
|
|
|
|
#
|
|
|
|
# Copyright (C) 2016 GNS3 Technologies Inc.
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
2016-04-26 15:10:33 +00:00
|
|
|
import os
|
2016-04-21 10:14:09 +00:00
|
|
|
import re
|
2016-03-11 15:51:35 +00:00
|
|
|
import uuid
|
2017-03-20 18:14:07 +00:00
|
|
|
import html
|
2016-03-11 15:51:35 +00:00
|
|
|
import asyncio
|
2016-08-23 21:33:19 +00:00
|
|
|
import aiohttp
|
2016-03-11 15:51:35 +00:00
|
|
|
|
2016-04-26 15:10:33 +00:00
|
|
|
import logging
|
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2016-03-11 15:51:35 +00:00
|
|
|
class Link:
|
2016-06-03 03:32:46 +00:00
|
|
|
"""
|
|
|
|
Base class for links.
|
|
|
|
"""
|
2016-03-11 16:02:50 +00:00
|
|
|
|
2016-06-14 14:57:13 +00:00
|
|
|
def __init__(self, project, link_id=None):
|
2016-06-03 03:32:46 +00:00
|
|
|
|
2016-06-14 14:57:13 +00:00
|
|
|
if link_id:
|
|
|
|
self._id = link_id
|
|
|
|
else:
|
|
|
|
self._id = str(uuid.uuid4())
|
2016-05-11 17:35:36 +00:00
|
|
|
self._nodes = []
|
2016-03-11 19:13:52 +00:00
|
|
|
self._project = project
|
2016-04-21 14:11:42 +00:00
|
|
|
self._capturing = False
|
2016-04-26 15:10:33 +00:00
|
|
|
self._capture_file_name = None
|
2016-05-14 00:48:10 +00:00
|
|
|
self._streaming_pcap = None
|
2016-09-02 12:39:38 +00:00
|
|
|
self._created = False
|
2016-09-15 12:51:40 +00:00
|
|
|
self._link_type = "ethernet"
|
2016-09-02 12:39:38 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def created(self):
|
|
|
|
"""
|
|
|
|
:returns: True the link has been created on the computes
|
|
|
|
"""
|
|
|
|
return self._created
|
2016-03-11 15:51:35 +00:00
|
|
|
|
|
|
|
@asyncio.coroutine
|
2017-02-06 10:40:00 +00:00
|
|
|
def add_node(self, node, adapter_number, port_number, label=None, dump=True):
|
2016-03-11 15:51:35 +00:00
|
|
|
"""
|
2016-05-11 17:35:36 +00:00
|
|
|
Add a node to the link
|
2017-02-06 10:40:00 +00:00
|
|
|
|
|
|
|
:param dump: Dump project on disk
|
2016-03-11 15:51:35 +00:00
|
|
|
"""
|
2016-07-01 15:38:32 +00:00
|
|
|
|
2016-09-15 12:51:40 +00:00
|
|
|
port = node.get_port(adapter_number, port_number)
|
2017-01-06 09:29:56 +00:00
|
|
|
if port.link is not None:
|
|
|
|
raise aiohttp.web.HTTPConflict(text="Port is already used")
|
|
|
|
|
2016-09-15 12:51:40 +00:00
|
|
|
self._link_type = port.link_type
|
|
|
|
|
2016-08-23 21:33:19 +00:00
|
|
|
for other_node in self._nodes:
|
2016-10-03 10:31:01 +00:00
|
|
|
if other_node["node"] == node:
|
|
|
|
raise aiohttp.web.HTTPConflict(text="Cannot connect to itself")
|
|
|
|
|
2016-08-23 21:33:19 +00:00
|
|
|
if node.node_type in ["nat", "cloud"]:
|
|
|
|
if other_node["node"].node_type in ["nat", "cloud"]:
|
|
|
|
raise aiohttp.web.HTTPConflict(text="It's not allowed to connect a {} to a {}".format(other_node["node"].node_type, node.node_type))
|
|
|
|
|
2016-09-15 12:51:40 +00:00
|
|
|
# Check if user is not connecting serial => ethernet
|
|
|
|
other_port = other_node["node"].get_port(other_node["adapter_number"], other_node["port_number"])
|
|
|
|
if port.link_type != other_port.link_type:
|
|
|
|
raise aiohttp.web.HTTPConflict(text="It's not allowed to connect a {} to a {}".format(other_port.link_type, port.link_type))
|
|
|
|
|
2016-07-01 15:38:32 +00:00
|
|
|
if label is None:
|
|
|
|
label = {
|
|
|
|
"x": -10,
|
|
|
|
"y": -10,
|
2016-07-01 17:54:44 +00:00
|
|
|
"rotation": 0,
|
2017-03-20 18:14:07 +00:00
|
|
|
"text": html.escape("{}/{}".format(adapter_number, port_number)),
|
2016-07-01 15:38:32 +00:00
|
|
|
"style": "font-size: 10; font-style: Verdana"
|
|
|
|
}
|
|
|
|
|
2016-05-11 17:35:36 +00:00
|
|
|
self._nodes.append({
|
|
|
|
"node": node,
|
2016-03-11 15:51:35 +00:00
|
|
|
"adapter_number": adapter_number,
|
2016-07-01 15:38:32 +00:00
|
|
|
"port_number": port_number,
|
2017-01-06 09:29:56 +00:00
|
|
|
"port": port,
|
2016-07-01 15:38:32 +00:00
|
|
|
"label": label
|
2016-03-11 15:51:35 +00:00
|
|
|
})
|
2016-06-03 03:32:46 +00:00
|
|
|
|
2016-05-18 16:37:18 +00:00
|
|
|
if len(self._nodes) == 2:
|
2016-09-02 12:39:38 +00:00
|
|
|
yield from self.create()
|
2016-11-22 15:05:00 +00:00
|
|
|
for n in self._nodes:
|
|
|
|
n["node"].add_link(self)
|
2017-01-06 09:29:56 +00:00
|
|
|
n["port"].link = self
|
2016-09-02 12:39:38 +00:00
|
|
|
self._created = True
|
2016-05-18 16:37:18 +00:00
|
|
|
self._project.controller.notification.emit("link.created", self.__json__())
|
2016-07-01 15:38:32 +00:00
|
|
|
|
2017-02-06 10:40:00 +00:00
|
|
|
if dump:
|
|
|
|
self._project.dump()
|
2016-07-01 15:38:32 +00:00
|
|
|
|
|
|
|
@asyncio.coroutine
|
2016-07-01 19:56:42 +00:00
|
|
|
def update_nodes(self, nodes):
|
|
|
|
for node_data in nodes:
|
|
|
|
node = self._project.get_node(node_data["node_id"])
|
|
|
|
for port in self._nodes:
|
|
|
|
if port["node"] == node:
|
|
|
|
label = node_data.get("label")
|
|
|
|
if label:
|
|
|
|
port["label"] = label
|
2016-07-01 15:38:32 +00:00
|
|
|
self._project.controller.notification.emit("link.updated", self.__json__())
|
2016-06-15 13:12:38 +00:00
|
|
|
self._project.dump()
|
2016-03-11 15:51:35 +00:00
|
|
|
|
2016-03-14 16:40:27 +00:00
|
|
|
@asyncio.coroutine
|
|
|
|
def create(self):
|
|
|
|
"""
|
|
|
|
Create the link
|
|
|
|
"""
|
2016-06-03 03:32:46 +00:00
|
|
|
|
2016-03-18 15:55:54 +00:00
|
|
|
raise NotImplementedError
|
2016-03-14 16:40:27 +00:00
|
|
|
|
|
|
|
@asyncio.coroutine
|
|
|
|
def delete(self):
|
|
|
|
"""
|
|
|
|
Delete the link
|
|
|
|
"""
|
2017-01-06 09:29:56 +00:00
|
|
|
for n in self._nodes:
|
|
|
|
# It could be different of self if we rollback an already existing link
|
|
|
|
if n["port"].link == self:
|
|
|
|
n["port"].link = None
|
|
|
|
n["node"].remove_link(self)
|
2016-03-14 16:40:27 +00:00
|
|
|
|
2016-04-21 10:14:09 +00:00
|
|
|
@asyncio.coroutine
|
2016-04-26 15:10:33 +00:00
|
|
|
def start_capture(self, data_link_type="DLT_EN10MB", capture_file_name=None):
|
2016-04-21 10:14:09 +00:00
|
|
|
"""
|
|
|
|
Start capture on the link
|
|
|
|
|
|
|
|
:returns: Capture object
|
|
|
|
"""
|
2016-06-03 03:32:46 +00:00
|
|
|
|
2016-04-26 15:10:33 +00:00
|
|
|
self._capturing = True
|
|
|
|
self._capture_file_name = capture_file_name
|
|
|
|
self._streaming_pcap = asyncio.async(self._start_streaming_pcap())
|
2016-05-18 16:37:18 +00:00
|
|
|
self._project.controller.notification.emit("link.updated", self.__json__())
|
2016-04-26 15:10:33 +00:00
|
|
|
|
|
|
|
@asyncio.coroutine
|
|
|
|
def _start_streaming_pcap(self):
|
|
|
|
"""
|
2016-05-14 00:48:10 +00:00
|
|
|
Dump a pcap file on disk
|
2016-04-26 15:10:33 +00:00
|
|
|
"""
|
2016-06-03 03:32:46 +00:00
|
|
|
|
2016-08-19 09:05:54 +00:00
|
|
|
stream_content = yield from self.read_pcap_from_source()
|
|
|
|
with stream_content as stream:
|
|
|
|
with open(self.capture_file_path, "wb+") as f:
|
|
|
|
while self._capturing:
|
|
|
|
# We read 1 bytes by 1 otherwise the remaining data is not read if the traffic stops
|
|
|
|
data = yield from stream.read(1)
|
|
|
|
if data:
|
|
|
|
f.write(data)
|
|
|
|
# Flush to disk otherwise the live is not really live
|
|
|
|
f.flush()
|
|
|
|
else:
|
|
|
|
break
|
2016-04-21 10:14:09 +00:00
|
|
|
|
|
|
|
@asyncio.coroutine
|
|
|
|
def stop_capture(self):
|
|
|
|
"""
|
|
|
|
Stop capture on the link
|
|
|
|
"""
|
2016-06-03 03:32:46 +00:00
|
|
|
|
2016-04-26 15:10:33 +00:00
|
|
|
self._capturing = False
|
2016-05-18 16:37:18 +00:00
|
|
|
self._project.controller.notification.emit("link.updated", self.__json__())
|
|
|
|
|
2016-04-22 14:22:03 +00:00
|
|
|
@asyncio.coroutine
|
2016-06-03 03:32:46 +00:00
|
|
|
def _read_pcap_from_source(self):
|
2016-04-22 14:22:03 +00:00
|
|
|
"""
|
2016-05-14 00:48:10 +00:00
|
|
|
Return a FileStream of the Pcap from the compute server
|
2016-04-22 14:22:03 +00:00
|
|
|
"""
|
2016-06-03 03:32:46 +00:00
|
|
|
|
2016-04-22 14:22:03 +00:00
|
|
|
raise NotImplementedError
|
|
|
|
|
2016-11-22 15:05:00 +00:00
|
|
|
@asyncio.coroutine
|
|
|
|
def node_updated(self, node):
|
|
|
|
"""
|
|
|
|
Called when a node member of the link is updated
|
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2016-04-26 15:10:33 +00:00
|
|
|
def default_capture_file_name(self):
|
2016-04-21 10:14:09 +00:00
|
|
|
"""
|
|
|
|
:returns: File name for a capture on this link
|
|
|
|
"""
|
2016-06-03 03:32:46 +00:00
|
|
|
|
2016-05-14 00:48:10 +00:00
|
|
|
capture_file_name = "{}_{}-{}_to_{}_{}-{}".format(self._nodes[0]["node"].name,
|
|
|
|
self._nodes[0]["adapter_number"],
|
|
|
|
self._nodes[0]["port_number"],
|
|
|
|
self._nodes[1]["node"].name,
|
|
|
|
self._nodes[1]["adapter_number"],
|
|
|
|
self._nodes[1]["port_number"])
|
2016-04-21 10:14:09 +00:00
|
|
|
return re.sub("[^0-9A-Za-z_-]", "", capture_file_name) + ".pcap"
|
|
|
|
|
2016-03-11 15:51:35 +00:00
|
|
|
@property
|
|
|
|
def id(self):
|
|
|
|
return self._id
|
|
|
|
|
2016-06-14 14:57:13 +00:00
|
|
|
@property
|
|
|
|
def nodes(self):
|
2016-08-19 09:20:56 +00:00
|
|
|
return [node['node'] for node in self._nodes]
|
2016-06-14 14:57:13 +00:00
|
|
|
|
2016-04-21 14:11:42 +00:00
|
|
|
@property
|
|
|
|
def capturing(self):
|
|
|
|
return self._capturing
|
|
|
|
|
2016-04-26 15:10:33 +00:00
|
|
|
@property
|
|
|
|
def capture_file_path(self):
|
|
|
|
"""
|
|
|
|
Get the path of the capture
|
|
|
|
"""
|
2016-06-03 03:32:46 +00:00
|
|
|
|
2016-04-26 15:10:33 +00:00
|
|
|
if self._capture_file_name:
|
|
|
|
return os.path.join(self._project.captures_directory, self._capture_file_name)
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2016-07-05 14:07:05 +00:00
|
|
|
def __eq__(self, other):
|
|
|
|
if not isinstance(other, Link):
|
|
|
|
return False
|
|
|
|
return self.id == other.id
|
|
|
|
|
2016-11-22 15:05:00 +00:00
|
|
|
def __hash__(self):
|
|
|
|
return hash(self._id)
|
|
|
|
|
2016-06-15 13:12:38 +00:00
|
|
|
def __json__(self, topology_dump=False):
|
|
|
|
"""
|
|
|
|
:param topology_dump: Filter to keep only properties require for saving on disk
|
|
|
|
"""
|
2016-03-11 15:51:35 +00:00
|
|
|
res = []
|
2016-05-11 17:35:36 +00:00
|
|
|
for side in self._nodes:
|
2016-03-11 15:51:35 +00:00
|
|
|
res.append({
|
2016-05-11 17:35:36 +00:00
|
|
|
"node_id": side["node"].id,
|
2016-03-11 15:51:35 +00:00
|
|
|
"adapter_number": side["adapter_number"],
|
2016-07-01 15:38:32 +00:00
|
|
|
"port_number": side["port_number"],
|
|
|
|
"label": side["label"]
|
2016-03-11 15:51:35 +00:00
|
|
|
})
|
2016-06-15 13:12:38 +00:00
|
|
|
if topology_dump:
|
|
|
|
return {
|
|
|
|
"nodes": res,
|
|
|
|
"link_id": self._id
|
|
|
|
}
|
2016-04-26 15:10:33 +00:00
|
|
|
return {
|
2016-06-03 03:32:46 +00:00
|
|
|
"nodes": res,
|
|
|
|
"link_id": self._id,
|
2016-05-18 16:37:18 +00:00
|
|
|
"project_id": self._project.id,
|
2016-04-26 15:10:33 +00:00
|
|
|
"capturing": self._capturing,
|
2016-04-26 15:36:24 +00:00
|
|
|
"capture_file_name": self._capture_file_name,
|
2016-09-15 12:51:40 +00:00
|
|
|
"capture_file_path": self.capture_file_path,
|
|
|
|
"link_type": self._link_type
|
2016-04-26 15:10:33 +00:00
|
|
|
}
|