1
0
mirror of https://github.com/GNS3/gns3-server synced 2024-11-15 12:59:06 +00:00

Import API

This commit is contained in:
Julien Duponchelle 2016-03-30 16:52:36 +02:00
parent 879591eaf5
commit bd71f0cf4c
No known key found for this signature in database
GPG Key ID: F1E2485547D4595D
3 changed files with 60 additions and 6 deletions

View File

@ -20,6 +20,8 @@ import asyncio
import json
import os
import psutil
import tempfile
import zipfile
from ...web.route import Route
from ...schemas.project import PROJECT_OBJECT_SCHEMA, PROJECT_CREATE_SCHEMA, PROJECT_UPDATE_SCHEMA, PROJECT_FILE_LIST_SCHEMA, PROJECT_LIST_SCHEMA
@ -301,7 +303,7 @@ class ProjectHandler:
except FileNotFoundError:
raise aiohttp.web.HTTPNotFound()
except PermissionError:
raise aiohttp.web.HTTPForbidden
raise aiohttp.web.HTTPForbidden()
@classmethod
@Route.post(
@ -341,7 +343,7 @@ class ProjectHandler:
except FileNotFoundError:
raise aiohttp.web.HTTPNotFound()
except PermissionError:
raise aiohttp.web.HTTPForbidden
raise aiohttp.web.HTTPForbidden()
@classmethod
@Route.get(
@ -353,9 +355,9 @@ class ProjectHandler:
raw=True,
status_codes={
200: "Return the file",
404: "The path doesn't exist"
404: "The project doesn't exist"
})
def export(request, response):
def export_project(request, response):
pm = ProjectManager.instance()
project = pm.get_project(request.match_info["project_id"])
@ -371,3 +373,40 @@ class ProjectHandler:
yield from response.drain()
yield from response.write_eof()
@classmethod
@Route.post(
r"/projects/{project_id}/import",
description="Import a project from a portable archive",
parameters={
"project_id": "The UUID of the project",
},
raw=True,
status_codes={
200: "Return the file"
})
def import_project(request, response):
pm = ProjectManager.instance()
project_id = request.match_info["project_id"]
project = pm.create_project(project_id=project_id)
# We write the content to a temporary location
# and after extract all. It could be more optimal to stream
# this but it's not implemented in Python.
# 
# Spooled mean the file is temporary keep in ram until max_size
try:
with tempfile.SpooledTemporaryFile(max_size=10000) as temp:
while True:
packet = yield from request.content.read(512)
if not packet:
break
temp.write(packet)
with zipfile.ZipFile(temp) as myzip:
myzip.extractall(project.path)
except OSError as e:
raise aiohttp.web.HTTPInternalServerError(text="Could not import the project: {}".format(e))
response.set_status(201)

View File

@ -79,8 +79,6 @@ class ProjectManager:
if project_id is not None and project_id in self._projects:
return self._projects[project_id]
# FIXME: should we have an error?
#raise aiohttp.web.HTTPConflict(text="Project ID {} is already in use on this server".format(project_id))
project = Project(name=name, project_id=project_id, path=path, temporary=temporary)
self._projects[project.id] = project
return project

View File

@ -304,3 +304,20 @@ def test_export(server, tmpdir, loop, project):
with myzip.open("a") as myfile:
content = myfile.read()
assert content == b"hello"
def test_import(server, tmpdir, loop):
with zipfile.ZipFile(str(tmpdir / "test.zip"), 'w') as myzip:
myzip.writestr("demo", b"hello")
project_id = str(uuid.uuid4())
with open(str(tmpdir / "test.zip"), "rb") as f:
response = server.post("/projects/{project_id}/import".format(project_id=project_id), body=f.read(), raw=True)
assert response.status == 201
project = ProjectManager.instance().get_project(project_id=project_id)
with open(os.path.join(project.path, "demo")) as f:
content = f.read()
assert content == "hello"