Export project with the GNS3VM

pull/638/head
Julien Duponchelle 8 years ago
parent 88ffd43c97
commit 9b499dc51e
No known key found for this signature in database
GPG Key ID: CE8B29639E07F5E8

@ -270,7 +270,23 @@ class Compute:
}
@asyncio.coroutine
def steam_file(self, project, path):
def download_file(self, project, path):
"""
Read file of a project and download it
:param project: A project object
:param path: The path of the file in the project
:returns: A file stream
"""
url = self._getUrl("/projects/{}/files/{}".format(project.id, path))
response = yield from self._session().request("GET", url, auth=self._auth)
if response.status == 404:
raise aiohttp.web.HTTPNotFound(text="{} not found on compute".format(path))
return response.content
@asyncio.coroutine
def stream_file(self, project, path):
"""
Read file of a project and stream it
@ -447,3 +463,13 @@ class Compute:
if image not in [i['filename'] for i in images]:
images.append({"filename": image, "path": image})
return images
@asyncio.coroutine
def list_files(self, project):
"""
List files in the project on computes
"""
path = "/projects/{}/files".format(project.id)
res = yield from self.http_query("GET", path, timeout=120)
return res.json

@ -17,12 +17,15 @@
import os
import json
import asyncio
import aiohttp
import zipfile
import tempfile
import zipstream
def export_project(project, include_images=False):
@asyncio.coroutine
def export_project(project, temporary_dir, include_images=False):
"""
Export the project as zip. It's a ZipStream object.
The file will be read chunk by chunk when you iterate on
@ -30,6 +33,7 @@ def export_project(project, include_images=False):
It will ignore some files like snapshots and
:param temporary_dir: A temporary dir where to store intermediate data
:returns: ZipStream object
"""
@ -45,12 +49,7 @@ def export_project(project, include_images=False):
_export_project_file(project, os.path.join(project._path, file), z, include_images)
for root, dirs, files in os.walk(project._path, topdown=True):
# Remove snapshots and capture
if os.path.split(root)[-1:][0] == "project-files":
dirs[:] = [d for d in dirs if d not in ("snapshots", "tmp")]
# Ignore log files and OS noise
files = [f for f in files if not f.endswith('_log.txt') and not f.endswith('.log') and f != '.DS_Store']
files = [f for f in files if not _filter_files(os.path.join(root, f))]
for file in files:
path = os.path.join(root, file)
@ -66,9 +65,44 @@ def export_project(project, include_images=False):
pass
else:
z.write(path, os.path.relpath(path, project._path), compress_type=zipfile.ZIP_DEFLATED)
for compute in project.computes:
if compute.id == "vm":
compute_files = yield from compute.list_files(project)
for compute_file in compute_files:
if not _filter_files(compute_file["path"]):
(fp, temp_path) = tempfile.mkstemp(dir=temporary_dir)
stream = yield from compute.download_file(project, compute_file["path"])
while True:
data = yield from stream.read(512)
if not data:
break
fp.write(data)
z.write(temp_path, arcname=compute_file["path"], compress_type=zipfile.ZIP_DEFLATED)
return z
def _filter_files(path):
"""
:returns: True if file should not be included in the final archive
"""
s = os.path.normpath(path).split(os.path.sep)
try:
i = s.index("project-files")
if s[i + 1] in ("tmp", "captures", "snapshots"):
return True
except (ValueError, IndexError):
pass
file_name = os.path.basename(path)
# Ignore log files and OS noises
if file_name.endswith('_log.txt') or file_name.endswith('.log') or file_name == '.DS_Store':
return True
return False
def _export_project_file(project, path, z, include_images):
"""
Take a project file (.gns3) and patch it for the export

@ -149,9 +149,9 @@ class Project:
@property
def computes(self):
"""
:return: Dictonnary of computes used by the project
:return: List of computes used by the project
"""
return self._computes
return self._project_created_on_compute
def remove_allocated_node_name(self, name):
"""

@ -138,4 +138,4 @@ class UDPLink(Link):
"""
if self._capture_node:
compute = self._capture_node["node"].compute
return compute.steam_file(self._project, "tmp/captures/" + self._capture_file_name)
return compute.stream_file(self._project, "tmp/captures/" + self._capture_file_name)

@ -235,24 +235,23 @@ class ProjectHandler:
controller = Controller.instance()
project = controller.get_project(request.match_info["project_id"])
started = False
for data in export_project(project, include_images=bool(request.GET.get("include_images", "0"))):
with tempfile.TemporaryDirectory() as tmp_dir:
datas = yield from export_project(project, tmp_dir, include_images=bool(request.GET.get("include_images", "0")))
# We need to do that now because export could failed and raise an HTTP error
# that why response start need to be the later possible
if not started:
response.content_type = 'application/gns3project'
response.headers['CONTENT-DISPOSITION'] = 'attachment; filename="{}.gns3project"'.format(project.name)
response.enable_chunked_encoding()
# Very important: do not send a content length otherwise QT closes the connection (curl can consume the feed)
response.content_length = None
response.start(request)
started = True
response.write(data)
yield from response.drain()
yield from response.write_eof()
response.content_type = 'application/gns3project'
response.headers['CONTENT-DISPOSITION'] = 'attachment; filename="{}.gns3project"'.format(project.name)
response.enable_chunked_encoding()
# Very important: do not send a content length otherwise QT closes the connection (curl can consume the feed)
response.content_length = None
response.start(request)
for data in datas:
response.write(data)
yield from response.drain()
yield from response.write_eof()
@Route.post(
r"/projects/{project_id}/import",

@ -257,7 +257,7 @@ def test_streamFile(project, async_run, compute):
response = MagicMock()
response.status = 200
with asyncio_patch("aiohttp.ClientSession.request", return_value=response) as mock:
async_run(compute.steam_file(project, "test/titi"))
async_run(compute.stream_file(project, "test/titi"))
mock.assert_called_with("GET", "https://example.com:84/v2/compute/projects/{}/stream/test/titi".format(project.id), auth=None)

@ -27,7 +27,7 @@ from unittest.mock import MagicMock
from tests.utils import AsyncioMagicMock
from gns3server.controller.project import Project
from gns3server.controller.export_project import export_project
from gns3server.controller.export_project import export_project, _filter_files
@pytest.fixture
@ -48,7 +48,16 @@ def node(controller, project, async_run):
return node
def test_export(tmpdir, project):
def test_filter_files():
assert not _filter_files("hello/world")
assert _filter_files("project-files/tmp")
assert _filter_files("project-files/test_log.txt")
assert _filter_files("project-files/test.log")
assert _filter_files("test/project-files/snapshots")
assert _filter_files("test/project-files/snapshots/test.gns3p")
def test_export(tmpdir, project, async_run):
path = project.path
os.makedirs(os.path.join(path, "vm-1", "dynamips"))
@ -64,7 +73,7 @@ def test_export(tmpdir, project):
with open(os.path.join(path, "project-files", "snapshots", "test"), 'w+') as f:
f.write("WORLD")
z = export_project(project)
z = async_run(export_project(project, str(tmpdir)))
with open(str(tmpdir / 'zipfile.zip'), 'wb') as f:
for data in z:
@ -81,7 +90,7 @@ def test_export(tmpdir, project):
assert 'vm-1/dynamips/test_log.txt' not in myzip.namelist()
def test_export_disallow_running(tmpdir, project, node):
def test_export_disallow_running(tmpdir, project, node, async_run):
"""
Dissallow export when a node is running
"""
@ -103,10 +112,10 @@ def test_export_disallow_running(tmpdir, project, node):
node._status = "started"
with pytest.raises(aiohttp.web.HTTPConflict):
z = export_project(project)
z = async_run(export_project(project, str(tmpdir)))
def test_export_disallow_some_type(tmpdir, project):
def test_export_disallow_some_type(tmpdir, project, async_run):
"""
Dissalow export for some node type
"""
@ -127,10 +136,10 @@ def test_export_disallow_some_type(tmpdir, project):
json.dump(topology, f)
with pytest.raises(aiohttp.web.HTTPConflict):
z = export_project(project)
z = async_run(export_project(project, str(tmpdir)))
def test_export_fix_path(tmpdir, project):
def test_export_fix_path(tmpdir, project, async_run):
"""
Fix absolute image path
"""
@ -153,7 +162,7 @@ def test_export_fix_path(tmpdir, project):
with open(os.path.join(path, "test.gns3"), 'w+') as f:
json.dump(topology, f)
z = export_project(project)
z = async_run(export_project(project, str(tmpdir)))
with open(str(tmpdir / 'zipfile.zip'), 'wb') as f:
for data in z:
f.write(data)
@ -165,7 +174,7 @@ def test_export_fix_path(tmpdir, project):
assert topology["topology"]["nodes"][0]["properties"]["image"] == "c3725-adventerprisek9-mz.124-25d.image"
def test_export_with_images(tmpdir, project):
def test_export_with_images(tmpdir, project, async_run):
"""
Fix absolute image path
"""
@ -192,7 +201,7 @@ def test_export_with_images(tmpdir, project):
json.dump(topology, f)
with patch("gns3server.compute.Dynamips.get_images_directory", return_value=str(tmpdir / "IOS"),):
z = export_project(project, include_images=True)
z = async_run(export_project(project, str(tmpdir), include_images=True))
with open(str(tmpdir / 'zipfile.zip'), 'wb') as f:
for data in z:
f.write(data)

@ -169,4 +169,4 @@ def test_read_pcap_from_source(project, async_run):
assert link._capture_node is not None
async_run(link.read_pcap_from_source())
link._capture_node["node"].compute.steam_file.assert_called_with(project, "tmp/captures/" + link._capture_file_name)
link._capture_node["node"].compute.stream_file.assert_called_with(project, "tmp/captures/" + link._capture_file_name)

Loading…
Cancel
Save