mirror of
https://github.com/GNS3/gns3-server
synced 2024-11-28 19:28:07 +00:00
Merge pull request #189 from GNS3/project_files_api
API for list & download files of a project
This commit is contained in:
commit
aeda0bec16
@ -15,13 +15,16 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
import asyncio
|
import asyncio
|
||||||
import json
|
import json
|
||||||
|
import os
|
||||||
|
|
||||||
from ...web.route import Route
|
from ...web.route import Route
|
||||||
from ...schemas.project import PROJECT_OBJECT_SCHEMA, PROJECT_CREATE_SCHEMA, PROJECT_UPDATE_SCHEMA
|
from ...schemas.project import PROJECT_OBJECT_SCHEMA, PROJECT_CREATE_SCHEMA, PROJECT_UPDATE_SCHEMA, PROJECT_FILE_LIST_SCHEMA
|
||||||
from ...modules.project_manager import ProjectManager
|
from ...modules.project_manager import ProjectManager
|
||||||
from ...modules import MODULES
|
from ...modules import MODULES
|
||||||
|
from ...utils.asyncio import wait_run_in_executor
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
log = logging.getLogger()
|
log = logging.getLogger()
|
||||||
@ -198,3 +201,71 @@ class ProjectHandler:
|
|||||||
response.write("{\"action\": \"ping\"}\n".encode("utf-8"))
|
response.write("{\"action\": \"ping\"}\n".encode("utf-8"))
|
||||||
project.stop_listen_queue(queue)
|
project.stop_listen_queue(queue)
|
||||||
ProjectHandler._notifications_listening -= 1
|
ProjectHandler._notifications_listening -= 1
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@Route.get(
|
||||||
|
r"/projects/{project_id}/files",
|
||||||
|
description="List files of a project",
|
||||||
|
parameters={
|
||||||
|
"project_id": "The UUID of the project",
|
||||||
|
},
|
||||||
|
status_codes={
|
||||||
|
200: "Return list of files",
|
||||||
|
404: "The project doesn't exist"
|
||||||
|
},
|
||||||
|
output=PROJECT_FILE_LIST_SCHEMA)
|
||||||
|
def list_files(request, response):
|
||||||
|
|
||||||
|
pm = ProjectManager.instance()
|
||||||
|
project = pm.get_project(request.match_info["project_id"])
|
||||||
|
files = yield from project.list_files()
|
||||||
|
response.json(files)
|
||||||
|
response.set_status(200)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@Route.get(
|
||||||
|
r"/projects/{project_id}/files/{path:.+}",
|
||||||
|
description="Get a file of a project",
|
||||||
|
parameters={
|
||||||
|
"project_id": "The UUID of the project",
|
||||||
|
},
|
||||||
|
status_codes={
|
||||||
|
200: "Return the file",
|
||||||
|
403: "Permission denied",
|
||||||
|
404: "The file doesn't exist"
|
||||||
|
})
|
||||||
|
def get_file(request, response):
|
||||||
|
|
||||||
|
pm = ProjectManager.instance()
|
||||||
|
project = pm.get_project(request.match_info["project_id"])
|
||||||
|
path = request.match_info["path"]
|
||||||
|
path = os.path.normpath(path)
|
||||||
|
|
||||||
|
# Raise error if user try to escape
|
||||||
|
if path[0] == ".":
|
||||||
|
raise aiohttp.web.HTTPForbidden
|
||||||
|
path = os.path.join(project.path, path)
|
||||||
|
|
||||||
|
response.content_type = "application/octet-stream"
|
||||||
|
response.set_status(200)
|
||||||
|
response.enable_chunked_encoding()
|
||||||
|
# Very important: do not send a content length otherwise QT close the connection but curl can consume the Feed
|
||||||
|
response.content_length = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
yield from wait_run_in_executor(ProjectHandler._read_file, path, request, response)
|
||||||
|
except FileNotFoundError:
|
||||||
|
raise aiohttp.web.HTTPNotFound()
|
||||||
|
except PermissionError:
|
||||||
|
raise aiohttp.web.HTTPForbidden
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _read_file(path, request, response):
|
||||||
|
|
||||||
|
with open(path, "rb") as f:
|
||||||
|
response.start(request)
|
||||||
|
while True:
|
||||||
|
data = f.read(4096)
|
||||||
|
if not data:
|
||||||
|
break
|
||||||
|
response.write(data)
|
||||||
|
@ -19,6 +19,7 @@ import aiohttp
|
|||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import asyncio
|
import asyncio
|
||||||
|
import hashlib
|
||||||
|
|
||||||
from uuid import UUID, uuid4
|
from uuid import UUID, uuid4
|
||||||
from .port_manager import PortManager
|
from .port_manager import PortManager
|
||||||
@ -457,3 +458,42 @@ class Project:
|
|||||||
"""Stop sending notification to this clients"""
|
"""Stop sending notification to this clients"""
|
||||||
|
|
||||||
self._listeners.remove(queue)
|
self._listeners.remove(queue)
|
||||||
|
|
||||||
|
@asyncio.coroutine
|
||||||
|
def list_files(self):
|
||||||
|
"""
|
||||||
|
:returns: Array of files in project without temporary files. The files are dictionnary {"path": "test.bin", "md5sum": "aaaaa"}
|
||||||
|
"""
|
||||||
|
|
||||||
|
files = []
|
||||||
|
for (dirpath, dirnames, filenames) in os.walk(self.path):
|
||||||
|
for filename in filenames:
|
||||||
|
if not filename.endswith(".ghost"):
|
||||||
|
path = os.path.relpath(dirpath, self.path)
|
||||||
|
path = os.path.join(path, filename)
|
||||||
|
path = os.path.normpath(path)
|
||||||
|
file_info = {"path": path}
|
||||||
|
|
||||||
|
try:
|
||||||
|
file_info["md5sum"] = yield from wait_run_in_executor(self._hash_file, os.path.join(dirpath, filename))
|
||||||
|
except OSError:
|
||||||
|
continue
|
||||||
|
files.append(file_info)
|
||||||
|
|
||||||
|
return files
|
||||||
|
|
||||||
|
def _hash_file(self, path):
|
||||||
|
"""
|
||||||
|
Compute and md5 hash for file
|
||||||
|
|
||||||
|
:returns: hexadecimal md5
|
||||||
|
"""
|
||||||
|
|
||||||
|
m = hashlib.md5()
|
||||||
|
with open(path, "rb") as f:
|
||||||
|
while True:
|
||||||
|
buf = f.read(128)
|
||||||
|
if not buf:
|
||||||
|
break
|
||||||
|
m.update(buf)
|
||||||
|
return m.hexdigest()
|
||||||
|
@ -103,3 +103,26 @@ PROJECT_OBJECT_SCHEMA = {
|
|||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
"required": ["location", "project_id", "temporary"]
|
"required": ["location", "project_id", "temporary"]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
PROJECT_FILE_LIST_SCHEMA = {
|
||||||
|
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||||
|
"description": "List files in the project",
|
||||||
|
"type": "array",
|
||||||
|
"items": [
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"path": {
|
||||||
|
"description": "File path",
|
||||||
|
"type": ["string"]
|
||||||
|
},
|
||||||
|
"md5sum": {
|
||||||
|
"description": "MD5 hash of the file",
|
||||||
|
"type": ["string"]
|
||||||
|
},
|
||||||
|
|
||||||
|
},
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"additionalProperties": False,
|
||||||
|
}
|
||||||
|
@ -24,6 +24,8 @@ import shutil
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
|
||||||
sys._called_from_test = True
|
sys._called_from_test = True
|
||||||
# Prevent execution of external binaries
|
# Prevent execution of external binaries
|
||||||
@ -100,10 +102,11 @@ def server(request, loop, port_manager, monkeypatch):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="function")
|
@pytest.fixture(scope="function")
|
||||||
def project():
|
def project(tmpdir):
|
||||||
"""A GNS3 lab"""
|
"""A GNS3 lab"""
|
||||||
|
|
||||||
return ProjectManager.instance().create_project(project_id="a1e920ca-338a-4e9f-b363-aa607b09dd80")
|
p = ProjectManager.instance().create_project(project_id="a1e920ca-338a-4e9f-b363-aa607b09dd80")
|
||||||
|
return p
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
@pytest.fixture(scope="session")
|
||||||
|
@ -20,12 +20,14 @@ This test suite check /project endpoint
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import uuid
|
import uuid
|
||||||
|
import os
|
||||||
import asyncio
|
import asyncio
|
||||||
import aiohttp
|
import aiohttp
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
from tests.utils import asyncio_patch
|
from tests.utils import asyncio_patch
|
||||||
|
|
||||||
from gns3server.handlers.api.project_handler import ProjectHandler
|
from gns3server.handlers.api.project_handler import ProjectHandler
|
||||||
|
from gns3server.modules.project_manager import ProjectManager
|
||||||
|
|
||||||
|
|
||||||
def test_create_project_with_path(server, tmpdir):
|
def test_create_project_with_path(server, tmpdir):
|
||||||
@ -175,6 +177,42 @@ def test_notification(server, project, loop):
|
|||||||
assert response.body == b'{"action": "ping"}\n{"action": "vm.created", "event": {"a": "b"}}\n'
|
assert response.body == b'{"action": "ping"}\n{"action": "vm.created", "event": {"a": "b"}}\n'
|
||||||
|
|
||||||
|
|
||||||
def test_notification_invalid_id(server, project):
|
def test_notification_invalid_id(server):
|
||||||
response = server.get("/projects/{project_id}/notifications".format(project_id=uuid.uuid4()))
|
response = server.get("/projects/{project_id}/notifications".format(project_id=uuid.uuid4()))
|
||||||
assert response.status == 404
|
assert response.status == 404
|
||||||
|
|
||||||
|
|
||||||
|
def test_list_files(server, project):
|
||||||
|
files = [
|
||||||
|
{
|
||||||
|
"path": "test.txt",
|
||||||
|
"md5sum": "ad0234829205b9033196ba818f7a872b"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "vm-1/dynamips/test.bin",
|
||||||
|
"md5sum": "098f6bcd4621d373cade4e832627b4f6"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
with asyncio_patch("gns3server.modules.project.Project.list_files", return_value=files) as mock:
|
||||||
|
response = server.get("/projects/{project_id}/files".format(project_id=project.id), example=True)
|
||||||
|
assert response.status == 200
|
||||||
|
assert response.json == files
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_file(server, tmpdir):
|
||||||
|
|
||||||
|
with patch("gns3server.config.Config.get_section_config", return_value={"project_directory": str(tmpdir)}):
|
||||||
|
project = ProjectManager.instance().create_project()
|
||||||
|
|
||||||
|
with open(os.path.join(project.path, "hello"), "w+") as f:
|
||||||
|
f.write("world")
|
||||||
|
|
||||||
|
response = server.get("/projects/{project_id}/files/hello".format(project_id=project.id), raw=True, example=True)
|
||||||
|
assert response.status == 200
|
||||||
|
assert response.body == b"world"
|
||||||
|
|
||||||
|
response = server.get("/projects/{project_id}/files/false".format(project_id=project.id), raw=True)
|
||||||
|
assert response.status == 404
|
||||||
|
|
||||||
|
response = server.get("/projects/{project_id}/files/../hello".format(project_id=project.id), raw=True)
|
||||||
|
assert response.status == 403
|
||||||
|
@ -229,3 +229,29 @@ def test_clean_project_directory(tmpdir):
|
|||||||
assert os.path.exists(str(project1))
|
assert os.path.exists(str(project1))
|
||||||
assert os.path.exists(str(oldproject))
|
assert os.path.exists(str(oldproject))
|
||||||
assert not os.path.exists(str(project2))
|
assert not os.path.exists(str(project2))
|
||||||
|
|
||||||
|
|
||||||
|
def test_list_files(tmpdir, loop):
|
||||||
|
|
||||||
|
with patch("gns3server.config.Config.get_section_config", return_value={"project_directory": str(tmpdir)}):
|
||||||
|
project = Project()
|
||||||
|
path = project.path
|
||||||
|
os.makedirs(os.path.join(path, "vm-1", "dynamips"))
|
||||||
|
with open(os.path.join(path, "vm-1", "dynamips", "test.bin"), "w+") as f:
|
||||||
|
f.write("test")
|
||||||
|
open(os.path.join(path, "vm-1", "dynamips", "test.ghost"), "w+").close()
|
||||||
|
with open(os.path.join(path, "test.txt"), "w+") as f:
|
||||||
|
f.write("test2")
|
||||||
|
|
||||||
|
files = loop.run_until_complete(asyncio.async(project.list_files()))
|
||||||
|
|
||||||
|
assert files == [
|
||||||
|
{
|
||||||
|
"path": "test.txt",
|
||||||
|
"md5sum": "ad0234829205b9033196ba818f7a872b"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": os.path.join("vm-1", "dynamips", "test.bin"),
|
||||||
|
"md5sum": "098f6bcd4621d373cade4e832627b4f6"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
Loading…
Reference in New Issue
Block a user