mirror of
https://github.com/GNS3/gns3-server
synced 2024-12-24 15:58:08 +00:00
commit
7b25ce04e8
@ -20,6 +20,8 @@ import asyncio
|
||||
import json
|
||||
import os
|
||||
import psutil
|
||||
import tempfile
|
||||
import zipfile
|
||||
|
||||
from ...web.route import Route
|
||||
from ...schemas.project import PROJECT_OBJECT_SCHEMA, PROJECT_CREATE_SCHEMA, PROJECT_UPDATE_SCHEMA, PROJECT_FILE_LIST_SCHEMA, PROJECT_LIST_SCHEMA
|
||||
@ -301,7 +303,7 @@ class ProjectHandler:
|
||||
except FileNotFoundError:
|
||||
raise aiohttp.web.HTTPNotFound()
|
||||
except PermissionError:
|
||||
raise aiohttp.web.HTTPForbidden
|
||||
raise aiohttp.web.HTTPForbidden()
|
||||
|
||||
@classmethod
|
||||
@Route.post(
|
||||
@ -341,4 +343,70 @@ class ProjectHandler:
|
||||
except FileNotFoundError:
|
||||
raise aiohttp.web.HTTPNotFound()
|
||||
except PermissionError:
|
||||
raise aiohttp.web.HTTPForbidden
|
||||
raise aiohttp.web.HTTPForbidden()
|
||||
|
||||
@classmethod
|
||||
@Route.get(
|
||||
r"/projects/{project_id}/export",
|
||||
description="Export a project as a portable archive",
|
||||
parameters={
|
||||
"project_id": "The UUID of the project",
|
||||
},
|
||||
raw=True,
|
||||
status_codes={
|
||||
200: "Return the file",
|
||||
404: "The project doesn't exist"
|
||||
})
|
||||
def export_project(request, response):
|
||||
|
||||
pm = ProjectManager.instance()
|
||||
project = pm.get_project(request.match_info["project_id"])
|
||||
response.content_type = 'application/gns3z'
|
||||
response.headers['CONTENT-DISPOSITION'] = 'attachment; filename="{}.gns3z"'.format(project.name)
|
||||
response.enable_chunked_encoding()
|
||||
# Very important: do not send a content length otherwise QT close the connection but curl can consume the Feed
|
||||
response.content_length = None
|
||||
response.start(request)
|
||||
|
||||
for data in project.export():
|
||||
response.write(data)
|
||||
yield from response.drain()
|
||||
|
||||
yield from response.write_eof()
|
||||
|
||||
@classmethod
|
||||
@Route.post(
|
||||
r"/projects/{project_id}/import",
|
||||
description="Import a project from a portable archive",
|
||||
parameters={
|
||||
"project_id": "The UUID of the project",
|
||||
},
|
||||
raw=True,
|
||||
status_codes={
|
||||
200: "Return the file"
|
||||
})
|
||||
def import_project(request, response):
|
||||
|
||||
pm = ProjectManager.instance()
|
||||
project_id = request.match_info["project_id"]
|
||||
project = pm.create_project(project_id=project_id)
|
||||
|
||||
# We write the content to a temporary location
|
||||
# and after extract all. It could be more optimal to stream
|
||||
# this but it's not implemented in Python.
|
||||
#
|
||||
# Spooled mean the file is temporary keep in ram until max_size
|
||||
try:
|
||||
with tempfile.SpooledTemporaryFile(max_size=10000) as temp:
|
||||
while True:
|
||||
packet = yield from request.content.read(512)
|
||||
if not packet:
|
||||
break
|
||||
temp.write(packet)
|
||||
|
||||
with zipfile.ZipFile(temp) as myzip:
|
||||
myzip.extractall(project.path)
|
||||
except OSError as e:
|
||||
raise aiohttp.web.HTTPInternalServerError(text="Could not import the project: {}".format(e))
|
||||
|
||||
response.set_status(201)
|
||||
|
@ -20,6 +20,8 @@ import os
|
||||
import shutil
|
||||
import asyncio
|
||||
import hashlib
|
||||
import zipstream
|
||||
import zipfile
|
||||
|
||||
from uuid import UUID, uuid4
|
||||
from .port_manager import PortManager
|
||||
@ -507,3 +509,34 @@ class Project:
|
||||
break
|
||||
m.update(buf)
|
||||
return m.hexdigest()
|
||||
|
||||
def export(self):
|
||||
"""
|
||||
Export the project as zip. It's a ZipStream object.
|
||||
The file will be read chunk by chunk when you iterate on
|
||||
the zip.
|
||||
|
||||
It will ignore some files like snapshots and
|
||||
|
||||
:returns: ZipStream object
|
||||
"""
|
||||
|
||||
z = zipstream.ZipFile()
|
||||
# topdown allo to modify the list of directory in order to ignore
|
||||
# directory
|
||||
for root, dirs, files in os.walk(self._path, topdown=True):
|
||||
# Remove snapshots
|
||||
if "project-files" in root:
|
||||
dirs[:] = [d for d in dirs if d != "snapshots"]
|
||||
|
||||
# Ignore log files and OS noise
|
||||
files = [f for f in files if not f.endswith('_log.txt') and not f.endswith('.log') and f != '.DS_Store']
|
||||
|
||||
for file in files:
|
||||
path = os.path.join(root, file)
|
||||
# We rename the .gns3 project.gns3 to avoid the task to the client to guess the file name
|
||||
if file.endswith(".gns3"):
|
||||
z.write(path, "project.gns3")
|
||||
else:
|
||||
z.write(path, os.path.relpath(path, self._path))
|
||||
return z
|
||||
|
@ -79,8 +79,6 @@ class ProjectManager:
|
||||
|
||||
if project_id is not None and project_id in self._projects:
|
||||
return self._projects[project_id]
|
||||
# FIXME: should we have an error?
|
||||
#raise aiohttp.web.HTTPConflict(text="Project ID {} is already in use on this server".format(project_id))
|
||||
project = Project(name=name, project_id=project_id, path=path, temporary=temporary)
|
||||
self._projects[project.id] = project
|
||||
return project
|
||||
|
@ -3,3 +3,4 @@ aiohttp==0.19.0
|
||||
Jinja2>=2.7.3
|
||||
raven>=5.2.0
|
||||
psutil>=3.0.0
|
||||
zipstream>=1.1.3
|
||||
|
@ -23,6 +23,7 @@ import uuid
|
||||
import os
|
||||
import asyncio
|
||||
import aiohttp
|
||||
import zipfile
|
||||
|
||||
from unittest.mock import patch
|
||||
from tests.utils import asyncio_patch
|
||||
@ -283,3 +284,40 @@ def test_write_file(server, tmpdir):
|
||||
|
||||
response = server.post("/projects/{project_id}/files/../hello".format(project_id=project.id), body="universe", raw=True)
|
||||
assert response.status == 403
|
||||
|
||||
|
||||
def test_export(server, tmpdir, loop, project):
|
||||
|
||||
os.makedirs(project.path, exist_ok=True)
|
||||
with open(os.path.join(project.path, 'a'), 'w+') as f:
|
||||
f.write('hello')
|
||||
|
||||
response = server.get("/projects/{project_id}/export".format(project_id=project.id), raw=True)
|
||||
assert response.status == 200
|
||||
assert response.headers['CONTENT-TYPE'] == 'application/gns3z'
|
||||
assert response.headers['CONTENT-DISPOSITION'] == 'attachment; filename="{}.gns3z"'.format(project.name)
|
||||
|
||||
with open(str(tmpdir / 'project.zip'), 'wb+') as f:
|
||||
f.write(response.body)
|
||||
|
||||
with zipfile.ZipFile(str(tmpdir / 'project.zip')) as myzip:
|
||||
with myzip.open("a") as myfile:
|
||||
content = myfile.read()
|
||||
assert content == b"hello"
|
||||
|
||||
|
||||
def test_import(server, tmpdir, loop):
|
||||
|
||||
with zipfile.ZipFile(str(tmpdir / "test.zip"), 'w') as myzip:
|
||||
myzip.writestr("demo", b"hello")
|
||||
|
||||
project_id = str(uuid.uuid4())
|
||||
|
||||
with open(str(tmpdir / "test.zip"), "rb") as f:
|
||||
response = server.post("/projects/{project_id}/import".format(project_id=project_id), body=f.read(), raw=True)
|
||||
assert response.status == 201
|
||||
|
||||
project = ProjectManager.instance().get_project(project_id=project_id)
|
||||
with open(os.path.join(project.path, "demo")) as f:
|
||||
content = f.read()
|
||||
assert content == "hello"
|
||||
|
@ -219,7 +219,6 @@ def test_backup_projects(server, tmpdir, loop):
|
||||
assert response.headers['CONTENT-TYPE'] == 'application/x-gtar'
|
||||
|
||||
with open(str(tmpdir / 'projects.tar'), 'wb+') as f:
|
||||
print(len(response.body))
|
||||
f.write(response.body)
|
||||
|
||||
tar = tarfile.open(str(tmpdir / 'projects.tar'), 'r')
|
||||
|
@ -20,6 +20,7 @@ import os
|
||||
import asyncio
|
||||
import pytest
|
||||
import aiohttp
|
||||
import zipfile
|
||||
from uuid import uuid4
|
||||
from unittest.mock import patch
|
||||
|
||||
@ -258,3 +259,37 @@ def test_list_files(tmpdir, loop):
|
||||
"md5sum": "098f6bcd4621d373cade4e832627b4f6"
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
def test_export(tmpdir):
|
||||
project = Project()
|
||||
path = project.path
|
||||
os.makedirs(os.path.join(path, "vm-1", "dynamips"))
|
||||
|
||||
# The .gns3 should be renamed project.gns3 in order to simplify import
|
||||
with open(os.path.join(path, "test.gns3"), 'w+') as f:
|
||||
f.write("{}")
|
||||
|
||||
with open(os.path.join(path, "vm-1", "dynamips", "test"), 'w+') as f:
|
||||
f.write("HELLO")
|
||||
with open(os.path.join(path, "vm-1", "dynamips", "test_log.txt"), 'w+') as f:
|
||||
f.write("LOG")
|
||||
os.makedirs(os.path.join(path, "project-files", "snapshots"))
|
||||
with open(os.path.join(path, "project-files", "snapshots", "test"), 'w+') as f:
|
||||
f.write("WORLD")
|
||||
|
||||
z = project.export()
|
||||
|
||||
with open(str(tmpdir / 'zipfile.zip'), 'wb') as f:
|
||||
for data in z:
|
||||
f.write(data)
|
||||
|
||||
with zipfile.ZipFile(str(tmpdir / 'zipfile.zip')) as myzip:
|
||||
with myzip.open("vm-1/dynamips/test") as myfile:
|
||||
content = myfile.read()
|
||||
assert content == b"HELLO"
|
||||
|
||||
assert 'test.gns3' not in myzip.namelist()
|
||||
assert 'project.gns3' in myzip.namelist()
|
||||
assert 'project-files/snapshots/test' not in myzip.namelist()
|
||||
assert 'vm-1/dynamips/test_log.txt' not in myzip.namelist()
|
||||
|
Loading…
Reference in New Issue
Block a user