1
0
mirror of https://github.com/GNS3/gns3-server synced 2024-11-24 17:28:08 +00:00
gns3-server/gns3server/compute/project.py

583 lines
19 KiB
Python
Raw Normal View History

2015-01-19 15:23:41 +00:00
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 GNS3 Technologies Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import aiohttp
2015-01-23 10:28:58 +00:00
import shutil
2015-01-26 11:10:30 +00:00
import asyncio
import hashlib
2016-03-30 09:43:31 +00:00
import zipstream
import zipfile
import json
2015-01-19 15:23:41 +00:00
from uuid import UUID
from .port_manager import PortManager
2016-03-17 14:15:30 +00:00
from .notification_manager import NotificationManager
2015-01-26 11:10:30 +00:00
from ..config import Config
from ..utils.asyncio import wait_run_in_executor
from ..utils.path import check_path_allowed, get_default_project_directory
2015-01-19 15:23:41 +00:00
2015-01-23 17:37:29 +00:00
import logging
log = logging.getLogger(__name__)
2015-01-19 15:23:41 +00:00
class Project:
2015-01-31 21:34:49 +00:00
2015-01-19 15:23:41 +00:00
"""
A project contains a list of nodes.
In theory nodes are isolated project/project.
2015-01-19 15:23:41 +00:00
2015-04-08 17:17:34 +00:00
:param project_id: force project identifier (None by default auto generate an UUID)
:param path: path of the project. (None use the standard directory)
2015-01-19 15:23:41 +00:00
"""
2015-01-19 21:43:35 +00:00
def __init__(self, name=None, project_id=None, path=None):
2015-01-19 21:43:35 +00:00
self._name = name
try:
UUID(project_id, version=4)
except ValueError:
raise aiohttp.web.HTTPBadRequest(text="{} is not a valid UUID".format(project_id))
self._id = project_id
2015-01-19 15:23:41 +00:00
self._nodes = set()
self._nodes_to_destroy = set()
self._used_tcp_ports = set()
self._used_udp_ports = set()
if path is None:
location = get_default_project_directory()
path = os.path.join(location, self._id)
try:
os.makedirs(path, exist_ok=True)
except OSError as e:
raise aiohttp.web.HTTPInternalServerError(text="Could not create project directory: {}".format(e))
self.path = path
try:
if os.path.exists(self.tmp_working_directory()):
shutil.rmtree(self.tmp_working_directory())
except OSError:
raise aiohttp.web.HTTPInternalServerError(text="Could not clean project directory: {}".format(e))
log.info("Project {id} with path '{path}' created".format(path=self._path, id=self._id))
def __json__(self):
return {
"name": self._name,
"project_id": self._id
}
def _config(self):
return Config.instance().get_section_config("Server")
def is_local(self):
return self._config().getboolean("local", False)
2015-01-19 21:43:35 +00:00
@property
def id(self):
2015-01-19 21:43:35 +00:00
return self._id
2015-01-19 15:23:41 +00:00
@property
def path(self):
return self._path
@path.setter
def path(self, path):
check_path_allowed(path)
if hasattr(self, "_path"):
if path != self._path and self.is_local() is False:
raise aiohttp.web.HTTPForbidden(text="You are not allowed to modify the project directory path")
self._path = path
@property
def name(self):
return self._name
@name.setter
def name(self, name):
if "/" in name or "\\" in name:
raise aiohttp.web.HTTPForbidden(text="Name can not contain path separator")
self._name = name
2015-01-23 13:07:10 +00:00
@property
def nodes(self):
2015-01-23 13:07:10 +00:00
return self._nodes
2015-01-23 13:07:10 +00:00
def record_tcp_port(self, port):
"""
Associate a reserved TCP port number with this project.
:param port: TCP port number
"""
if port not in self._used_tcp_ports:
self._used_tcp_ports.add(port)
def record_udp_port(self, port):
"""
Associate a reserved UDP port number with this project.
:param port: UDP port number
"""
if port not in self._used_udp_ports:
self._used_udp_ports.add(port)
def remove_tcp_port(self, port):
"""
Removes an associated TCP port number from this project.
:param port: TCP port number
"""
if port in self._used_tcp_ports:
self._used_tcp_ports.remove(port)
def remove_udp_port(self, port):
"""
Removes an associated UDP port number from this project.
:param port: UDP port number
"""
if port in self._used_udp_ports:
self._used_udp_ports.remove(port)
def module_working_directory(self, module_name):
"""
2015-04-08 17:17:34 +00:00
Returns a working directory for the module
2016-05-14 00:48:10 +00:00
The directory is created if the directory doesn't exist.
:param module_name: name for the module
:returns: working directory
"""
workdir = self.module_working_path(module_name)
try:
os.makedirs(workdir, exist_ok=True)
except OSError as e:
raise aiohttp.web.HTTPInternalServerError(text="Could not create module working directory: {}".format(e))
return workdir
def module_working_path(self, module_name):
"""
2015-04-08 17:17:34 +00:00
Returns the working directory for the module. If you want
to be sure to have the directory on disk take a look on:
module_working_directory
"""
2015-04-08 17:17:34 +00:00
return os.path.join(self._path, "project-files", module_name)
def node_working_directory(self, node):
"""
Returns a working directory for a specific node.
If the directory doesn't exist, the directory is created.
:param node: Node instance
2015-04-08 17:17:34 +00:00
:returns: Node working directory
"""
workdir = os.path.join(self._path, "project-files", node.manager.module_name.lower(), node.id)
try:
2015-01-22 02:28:52 +00:00
os.makedirs(workdir, exist_ok=True)
except OSError as e:
raise aiohttp.web.HTTPInternalServerError(text="Could not create the node working directory: {}".format(e))
2015-01-24 01:33:49 +00:00
return workdir
def tmp_working_directory(self):
"""
A temporary directory. Will be clean at project open and close
"""
return os.path.join(self._path, "tmp")
2015-01-24 01:33:49 +00:00
def capture_working_directory(self):
"""
Returns a working directory where to temporary store packet capture files.
2015-01-24 01:33:49 +00:00
:returns: path to the directory
"""
workdir = os.path.join(self._path, "tmp", "captures")
2015-01-24 01:33:49 +00:00
try:
os.makedirs(workdir, exist_ok=True)
except OSError as e:
raise aiohttp.web.HTTPInternalServerError(text="Could not create the capture working directory: {}".format(e))
2015-01-22 02:28:52 +00:00
return workdir
def mark_node_for_destruction(self, node):
2015-01-23 10:28:58 +00:00
"""
:param node: An instance of Node
2015-01-23 10:28:58 +00:00
"""
self.remove_node(node)
self._nodes_to_destroy.add(node)
2015-01-23 10:28:58 +00:00
def add_node(self, node):
2015-01-23 13:07:10 +00:00
"""
Adds a node to the project.
In theory this should be called by the node manager.
2015-01-23 13:07:10 +00:00
:param node: Node instance
2015-01-23 13:07:10 +00:00
"""
self._nodes.add(node)
2015-01-23 13:07:10 +00:00
def remove_node(self, node):
"""
Removes a node from the project.
In theory this should be called by the node manager.
:param node: Node instance
"""
if node in self._nodes:
self._nodes.remove(node)
2015-01-26 11:10:30 +00:00
@asyncio.coroutine
2015-01-23 10:48:20 +00:00
def close(self):
2015-04-08 17:17:34 +00:00
"""
Closes the project, but keep information on disk
"""
2015-01-23 10:48:20 +00:00
2016-04-15 15:57:06 +00:00
for module in self.compute():
yield from module.instance().project_closing(self)
yield from self._close_and_clean(False)
2016-04-15 15:57:06 +00:00
for module in self.compute():
yield from module.instance().project_closed(self)
2015-01-23 15:02:26 +00:00
try:
if os.path.exists(self.tmp_working_directory()):
shutil.rmtree(self.tmp_working_directory())
except OSError:
pass
2015-01-26 11:10:30 +00:00
@asyncio.coroutine
2015-01-23 15:02:26 +00:00
def _close_and_clean(self, cleanup):
"""
2015-04-08 17:17:34 +00:00
Closes the project, and cleanup the disk if cleanup is True
2015-01-23 15:02:26 +00:00
2016-05-14 00:48:10 +00:00
:param cleanup: Whether to delete the project directory
2015-01-23 15:02:26 +00:00
"""
2015-02-05 21:24:06 +00:00
tasks = []
for node in self._nodes:
tasks.append(asyncio.async(node.manager.close_node(node.id)))
2015-02-05 21:24:06 +00:00
if tasks:
done, _ = yield from asyncio.wait(tasks)
for future in done:
try:
future.result()
except (Exception, GeneratorExit) as e:
log.error("Could not close node {}".format(e), exc_info=1)
2015-02-05 21:24:06 +00:00
2015-01-23 15:02:26 +00:00
if cleanup and os.path.exists(self.path):
2015-01-26 12:54:44 +00:00
try:
yield from wait_run_in_executor(shutil.rmtree, self.path)
log.info("Project {id} with path '{path}' deleted".format(path=self._path, id=self._id))
2015-01-26 12:54:44 +00:00
except OSError as e:
raise aiohttp.web.HTTPInternalServerError(text="Could not delete the project directory: {}".format(e))
else:
log.info("Project {id} with path '{path}' closed".format(path=self._path, id=self._id))
2015-01-23 10:48:20 +00:00
if self._used_tcp_ports:
log.warning("Project {} has TCP ports still in use: {}".format(self.id, self._used_tcp_ports))
if self._used_udp_ports:
log.warning("Project {} has UDP ports still in use: {}".format(self.id, self._used_udp_ports))
# clean the remaining ports that have not been cleaned by their respective node.
port_manager = PortManager.instance()
for port in self._used_tcp_ports.copy():
port_manager.release_tcp_port(port, self)
for port in self._used_udp_ports.copy():
2015-04-15 13:58:31 +00:00
port_manager.release_udp_port(port, self)
2015-01-26 11:10:30 +00:00
@asyncio.coroutine
2015-01-23 10:28:58 +00:00
def commit(self):
2015-04-08 17:17:34 +00:00
"""
Writes project changes on disk
"""
2015-01-23 10:48:20 +00:00
while self._nodes_to_destroy:
node = self._nodes_to_destroy.pop()
yield from node.delete()
self.remove_node(node)
2016-04-15 15:57:06 +00:00
for module in self.compute():
yield from module.instance().project_committed(self)
2015-01-23 10:48:20 +00:00
2015-01-26 11:10:30 +00:00
@asyncio.coroutine
2015-01-23 10:48:20 +00:00
def delete(self):
2015-04-08 17:17:34 +00:00
"""
Removes project from disk
"""
2015-01-23 10:48:20 +00:00
2016-04-15 15:57:06 +00:00
for module in self.compute():
yield from module.instance().project_closing(self)
2015-01-26 11:10:30 +00:00
yield from self._close_and_clean(True)
2016-04-15 15:57:06 +00:00
for module in self.compute():
yield from module.instance().project_closed(self)
2016-04-15 15:57:06 +00:00
def compute(self):
2015-04-08 17:17:34 +00:00
"""
Returns all loaded modules from compute.
2015-04-08 17:17:34 +00:00
"""
# We import it at the last time to avoid circular dependencies
2016-04-15 15:57:06 +00:00
from ..compute import MODULES
return MODULES
def emit(self, action, event):
"""
2015-05-13 19:53:42 +00:00
Send an event to all the client listening for notifications
2015-05-13 19:53:42 +00:00
:param action: Action name
:param event: Event to send
"""
2016-03-17 14:15:30 +00:00
NotificationManager.instance().emit(action, event, project_id=self.id)
@asyncio.coroutine
def list_files(self):
"""
2016-05-14 00:48:10 +00:00
:returns: Array of files in project without temporary files. The files are dictionary {"path": "test.bin", "md5sum": "aaaaa"}
"""
files = []
for (dirpath, dirnames, filenames) in os.walk(self.path):
for filename in filenames:
if not filename.endswith(".ghost"):
path = os.path.relpath(dirpath, self.path)
path = os.path.join(path, filename)
path = os.path.normpath(path)
file_info = {"path": path}
try:
file_info["md5sum"] = yield from wait_run_in_executor(self._hash_file, os.path.join(dirpath, filename))
except OSError:
continue
files.append(file_info)
return files
def _hash_file(self, path):
"""
Compute and md5 hash for file
:returns: hexadecimal md5
"""
m = hashlib.md5()
with open(path, "rb") as f:
while True:
buf = f.read(128)
if not buf:
break
m.update(buf)
return m.hexdigest()
2016-03-30 09:43:31 +00:00
def export(self, include_images=False):
2016-03-30 09:43:31 +00:00
"""
Export the project as zip. It's a ZipStream object.
The file will be read chunk by chunk when you iterate on
the zip.
It will ignore some files like snapshots and
:returns: ZipStream object
"""
z = zipstream.ZipFile()
2016-05-14 00:48:10 +00:00
# topdown allows to modify the list of directory in order to ignore the directory
2016-03-30 09:43:31 +00:00
for root, dirs, files in os.walk(self._path, topdown=True):
2016-04-21 15:02:05 +00:00
# Remove snapshots and capture
if os.path.split(root)[-1:][0] == "project-files":
dirs[:] = [d for d in dirs if d not in ("snapshots", "tmp")]
2016-03-30 09:43:31 +00:00
2016-05-18 09:23:45 +00:00
# Ignore log files and OS noise
2016-03-30 09:43:31 +00:00
files = [f for f in files if not f.endswith('_log.txt') and not f.endswith('.log') and f != '.DS_Store']
for file in files:
path = os.path.join(root, file)
# Try open the file
try:
open(path).close()
except OSError as e:
msg = "Could not export file {}: {}".format(path, e)
log.warn(msg)
self.emit("log.warning", {"message": msg})
continue
# We rename the .gns3 project.gns3 to avoid the task to the client to guess the file name
if file.endswith(".gns3"):
self._export_project_file(path, z, include_images)
else:
# We merge the data from all server in the same project-files directory
node_directory = os.path.join(self._path, "servers", "vm")
if os.path.commonprefix([root, node_directory]) == node_directory:
z.write(path, os.path.relpath(path, node_directory))
else:
z.write(path, os.path.relpath(path, self._path))
2016-03-30 09:43:31 +00:00
return z
def _export_images(self, image, type, z):
"""
Take a project file (.gns3) and export images to the zip
:param image: Image path
:param type: Type of image
:param z: Zipfile instance for the export
"""
from . import MODULES
for module in MODULES:
try:
img_directory = module.instance().get_images_directory()
except NotImplementedError:
# Some modules don't have images
continue
directory = os.path.split(img_directory)[-1:][0]
if os.path.exists(image):
path = image
else:
path = os.path.join(img_directory, image)
if os.path.exists(path):
arcname = os.path.join("images", directory, os.path.basename(image))
z.write(path, arcname)
break
def _export_project_file(self, path, z, include_images):
"""
Take a project file (.gns3) and patch it for the export
:param path: Path of the .gns3
"""
with open(path) as f:
topology = json.load(f)
if "topology" in topology and "nodes" in topology["topology"]:
for node in topology["topology"]["nodes"]:
if "properties" in node and node["type"] != "DockerVM":
for prop, value in node["properties"].items():
if prop.endswith("image"):
node["properties"][prop] = os.path.basename(value)
2016-05-07 16:39:32 +00:00
if include_images is True:
self._export_images(value, node["type"], z)
z.writestr("project.gns3", json.dumps(topology).encode())
def import_zip(self, stream, gns3vm=True):
"""
Import a project contain in a zip file
:param stream: A io.BytesIO of the zipfile
:param gns3vm: True move Docker, IOU and Qemu to the GNS3 VM
"""
with zipfile.ZipFile(stream) as myzip:
myzip.extractall(self.path)
project_file = os.path.join(self.path, "project.gns3")
if os.path.exists(project_file):
with open(project_file) as f:
topology = json.load(f)
topology["project_id"] = self.id
topology["name"] = self.name
topology.setdefault("topology", {})
topology["topology"].setdefault("nodes", [])
topology["topology"]["servers"] = [
{
"id": 1,
"local": True,
"vm": False
}
]
# By default all node run on local server
for node in topology["topology"]["nodes"]:
node["server_id"] = 1
if gns3vm:
# Move to servers/vm directory the data that should be import on remote server
modules_to_vm = {
"qemu": "QemuVM",
"iou": "IOUDevice",
"docker": "DockerVM"
}
node_directory = os.path.join(self.path, "servers", "vm", "project-files")
vm_server_use = False
for module, vm_type in modules_to_vm.items():
module_directory = os.path.join(self.path, "project-files", module)
if os.path.exists(module_directory):
os.makedirs(node_directory, exist_ok=True)
shutil.move(module_directory, os.path.join(node_directory, module))
# Patch node to use the GNS3 VM
for node in topology["topology"]["nodes"]:
if node["type"] == vm_type:
node["server_id"] = 2
vm_server_use = True
# We use the GNS3 VM. We need to add the server to the list
if vm_server_use:
topology["topology"]["servers"].append({
"id": 2,
"vm": True,
"local": False
})
# Write the modified topology
with open(project_file, "w") as f:
json.dump(topology, f, indent=4)
# Rename to a human distinctive name
shutil.move(project_file, os.path.join(self.path, self.name + ".gns3"))
if os.path.exists(os.path.join(self.path, "images")):
self._import_images()
def _import_images(self):
"""
Copy images to the images directory or delete them if they
already exists.
"""
image_dir = self._config().get("images_path")
root = os.path.join(self.path, "images")
for (dirpath, dirnames, filenames) in os.walk(root):
for filename in filenames:
path = os.path.join(dirpath, filename)
dst = os.path.join(image_dir, os.path.relpath(path, root))
os.makedirs(os.path.dirname(dst), exist_ok=True)
shutil.move(path, dst)
# Cleanup the project
2016-05-14 00:48:10 +00:00
shutil.rmtree(root, ignore_errors=True)