mirror of
https://github.com/GNS3/gns3-server
synced 2024-11-24 17:28:08 +00:00
When importing a project fix the GNS3 version
This commit is contained in:
parent
9b499dc51e
commit
94a262cd46
@ -71,6 +71,7 @@ class Compute:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, compute_id, controller=None, protocol="http", host="localhost", port=3080, user=None, password=None, name=None):
|
def __init__(self, compute_id, controller=None, protocol="http", host="localhost", port=3080, user=None, password=None, name=None):
|
||||||
|
self._http_session = None
|
||||||
assert controller is not None
|
assert controller is not None
|
||||||
log.info("Create compute %s", compute_id)
|
log.info("Create compute %s", compute_id)
|
||||||
|
|
||||||
@ -87,7 +88,6 @@ class Compute:
|
|||||||
self._connected = False
|
self._connected = False
|
||||||
self._controller = controller
|
self._controller = controller
|
||||||
self._set_auth(user, password)
|
self._set_auth(user, password)
|
||||||
self._http_session = None
|
|
||||||
self._version = None
|
self._version = None
|
||||||
self._cpu_usage_percent = None
|
self._cpu_usage_percent = None
|
||||||
self._memory_usage_percent = None
|
self._memory_usage_percent = None
|
||||||
@ -472,4 +472,3 @@ class Compute:
|
|||||||
path = "/projects/{}/files".format(project.id)
|
path = "/projects/{}/files".format(project.id)
|
||||||
res = yield from self.http_query("GET", path, timeout=120)
|
res = yield from self.http_query("GET", path, timeout=120)
|
||||||
return res.json
|
return res.json
|
||||||
|
|
||||||
|
@ -67,17 +67,19 @@ def export_project(project, temporary_dir, include_images=False):
|
|||||||
z.write(path, os.path.relpath(path, project._path), compress_type=zipfile.ZIP_DEFLATED)
|
z.write(path, os.path.relpath(path, project._path), compress_type=zipfile.ZIP_DEFLATED)
|
||||||
|
|
||||||
for compute in project.computes:
|
for compute in project.computes:
|
||||||
if compute.id == "vm":
|
if compute.id != "local":
|
||||||
compute_files = yield from compute.list_files(project)
|
compute_files = yield from compute.list_files(project)
|
||||||
for compute_file in compute_files:
|
for compute_file in compute_files:
|
||||||
if not _filter_files(compute_file["path"]):
|
if not _filter_files(compute_file["path"]):
|
||||||
(fp, temp_path) = tempfile.mkstemp(dir=temporary_dir)
|
(fd, temp_path) = tempfile.mkstemp(dir=temporary_dir)
|
||||||
|
f = open(fd, "wb", closefd=True)
|
||||||
stream = yield from compute.download_file(project, compute_file["path"])
|
stream = yield from compute.download_file(project, compute_file["path"])
|
||||||
while True:
|
while True:
|
||||||
data = yield from stream.read(512)
|
data = yield from stream.read(512)
|
||||||
if not data:
|
if not data:
|
||||||
break
|
break
|
||||||
fp.write(data)
|
f.write(data)
|
||||||
|
f.close()
|
||||||
z.write(temp_path, arcname=compute_file["path"], compress_type=zipfile.ZIP_DEFLATED)
|
z.write(temp_path, arcname=compute_file["path"], compress_type=zipfile.ZIP_DEFLATED)
|
||||||
return z
|
return z
|
||||||
|
|
||||||
@ -102,7 +104,6 @@ def _filter_files(path):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def _export_project_file(project, path, z, include_images):
|
def _export_project_file(project, path, z, include_images):
|
||||||
"""
|
"""
|
||||||
Take a project file (.gns3) and patch it for the export
|
Take a project file (.gns3) and patch it for the export
|
||||||
|
@ -24,6 +24,7 @@ import zipfile
|
|||||||
import aiohttp
|
import aiohttp
|
||||||
|
|
||||||
from ..config import Config
|
from ..config import Config
|
||||||
|
from .topology import load_topology
|
||||||
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
@ -51,15 +52,18 @@ def import_project(controller, project_id, stream):
|
|||||||
try:
|
try:
|
||||||
topology = json.loads(myzip.read("project.gns3").decode())
|
topology = json.loads(myzip.read("project.gns3").decode())
|
||||||
# If the project name is already used we generate a new one
|
# If the project name is already used we generate a new one
|
||||||
topology["name"] = controller.get_free_project_name(topology["name"])
|
project_name = controller.get_free_project_name(topology["name"])
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise aiohttp.web.HTTPConflict(text="Can't import topology the .gns3 is corrupted or missing")
|
raise aiohttp.web.HTTPConflict(text="Can't import topology the .gns3 is corrupted or missing")
|
||||||
|
|
||||||
path = os.path.join(projects_path, topology["name"])
|
path = os.path.join(projects_path, project_name)
|
||||||
os.makedirs(path)
|
os.makedirs(path)
|
||||||
myzip.extractall(path)
|
myzip.extractall(path)
|
||||||
|
|
||||||
dot_gns3_path = os.path.join(path, topology["name"] + ".gns3")
|
topology = load_topology(os.path.join(path, "project.gns3"))
|
||||||
|
topology["name"] = project_name
|
||||||
|
|
||||||
|
dot_gns3_path = os.path.join(path, project_name + ".gns3")
|
||||||
# We change the project_id to avoid erasing the project
|
# We change the project_id to avoid erasing the project
|
||||||
topology["project_id"] = project_id
|
topology["project_id"] = project_id
|
||||||
with open(dot_gns3_path, "w+") as f:
|
with open(dot_gns3_path, "w+") as f:
|
||||||
|
@ -235,7 +235,6 @@ class ProjectHandler:
|
|||||||
controller = Controller.instance()
|
controller = Controller.instance()
|
||||||
project = controller.get_project(request.match_info["project_id"])
|
project = controller.get_project(request.match_info["project_id"])
|
||||||
|
|
||||||
|
|
||||||
with tempfile.TemporaryDirectory() as tmp_dir:
|
with tempfile.TemporaryDirectory() as tmp_dir:
|
||||||
datas = yield from export_project(project, tmp_dir, include_images=bool(request.GET.get("include_images", "0")))
|
datas = yield from export_project(project, tmp_dir, include_images=bool(request.GET.get("include_images", "0")))
|
||||||
# We need to do that now because export could failed and raise an HTTP error
|
# We need to do that now because export could failed and raise an HTTP error
|
||||||
|
@ -261,6 +261,14 @@ def test_streamFile(project, async_run, compute):
|
|||||||
mock.assert_called_with("GET", "https://example.com:84/v2/compute/projects/{}/stream/test/titi".format(project.id), auth=None)
|
mock.assert_called_with("GET", "https://example.com:84/v2/compute/projects/{}/stream/test/titi".format(project.id), auth=None)
|
||||||
|
|
||||||
|
|
||||||
|
def test_downloadFile(project, async_run, compute):
|
||||||
|
response = MagicMock()
|
||||||
|
response.status = 200
|
||||||
|
with asyncio_patch("aiohttp.ClientSession.request", return_value=response) as mock:
|
||||||
|
async_run(compute.download_file(project, "test/titi"))
|
||||||
|
mock.assert_called_with("GET", "https://example.com:84/v2/compute/projects/{}/files/test/titi".format(project.id), auth=None)
|
||||||
|
|
||||||
|
|
||||||
def test_close(compute, async_run):
|
def test_close(compute, async_run):
|
||||||
assert compute.connected is True
|
assert compute.connected is True
|
||||||
async_run(compute.close())
|
async_run(compute.close())
|
||||||
@ -318,3 +326,13 @@ def test_images(compute, async_run, images_dir):
|
|||||||
mock.assert_called_with("GET", "https://example.com:84/v2/compute/qemu/images", auth=None, data=None, headers={'content-type': 'application/json'}, chunked=False)
|
mock.assert_called_with("GET", "https://example.com:84/v2/compute/qemu/images", auth=None, data=None, headers={'content-type': 'application/json'}, chunked=False)
|
||||||
|
|
||||||
assert images == [{"filename": "linux.qcow2", "path": "linux.qcow2"}, {"filename": "asa.qcow2", "path": "asa.qcow2"}]
|
assert images == [{"filename": "linux.qcow2", "path": "linux.qcow2"}, {"filename": "asa.qcow2", "path": "asa.qcow2"}]
|
||||||
|
|
||||||
|
|
||||||
|
def test_list_files(project, async_run, compute):
|
||||||
|
res = [{"path": "test"}]
|
||||||
|
response = AsyncioMagicMock()
|
||||||
|
response.read = AsyncioMagicMock(return_value=json.dumps(res).encode())
|
||||||
|
response.status = 200
|
||||||
|
with asyncio_patch("aiohttp.ClientSession.request", return_value=response) as mock:
|
||||||
|
assert async_run(compute.list_files(project)) == res
|
||||||
|
mock.assert_any_call("GET", "https://example.com:84/v2/compute/projects/{}/files".format(project.id), auth=None, chunked=False, data=None, headers={'content-type': 'application/json'})
|
||||||
|
@ -24,9 +24,10 @@ import zipfile
|
|||||||
|
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
from unittest.mock import MagicMock
|
from unittest.mock import MagicMock
|
||||||
from tests.utils import AsyncioMagicMock
|
from tests.utils import AsyncioMagicMock, AsyncioBytesIO
|
||||||
|
|
||||||
from gns3server.controller.project import Project
|
from gns3server.controller.project import Project
|
||||||
|
from gns3server.controller.compute import Compute
|
||||||
from gns3server.controller.export_project import export_project, _filter_files
|
from gns3server.controller.export_project import export_project, _filter_files
|
||||||
|
|
||||||
|
|
||||||
@ -90,6 +91,44 @@ def test_export(tmpdir, project, async_run):
|
|||||||
assert 'vm-1/dynamips/test_log.txt' not in myzip.namelist()
|
assert 'vm-1/dynamips/test_log.txt' not in myzip.namelist()
|
||||||
|
|
||||||
|
|
||||||
|
def test_export_vm(tmpdir, project, async_run, controller):
|
||||||
|
"""
|
||||||
|
If data is on a remote server export it locally before
|
||||||
|
sending it in the archive.
|
||||||
|
"""
|
||||||
|
|
||||||
|
compute = MagicMock()
|
||||||
|
compute.id = "vm"
|
||||||
|
compute.list_files = AsyncioMagicMock(return_value=[{"path": "vm-1/dynamips/test"}])
|
||||||
|
|
||||||
|
# Fake file that will be download from the vm
|
||||||
|
file_content = AsyncioBytesIO()
|
||||||
|
async_run(file_content.write(b"HELLO"))
|
||||||
|
file_content.seek(0)
|
||||||
|
compute.download_file = AsyncioMagicMock(return_value=file_content)
|
||||||
|
|
||||||
|
project._project_created_on_compute.add(compute)
|
||||||
|
|
||||||
|
path = project.path
|
||||||
|
os.makedirs(os.path.join(path, "vm-1", "dynamips"))
|
||||||
|
|
||||||
|
# The .gns3 should be renamed project.gns3 in order to simplify import
|
||||||
|
with open(os.path.join(path, "test.gns3"), 'w+') as f:
|
||||||
|
f.write("{}")
|
||||||
|
|
||||||
|
z = async_run(export_project(project, str(tmpdir)))
|
||||||
|
assert compute.list_files.called
|
||||||
|
|
||||||
|
with open(str(tmpdir / 'zipfile.zip'), 'wb') as f:
|
||||||
|
for data in z:
|
||||||
|
f.write(data)
|
||||||
|
|
||||||
|
with zipfile.ZipFile(str(tmpdir / 'zipfile.zip')) as myzip:
|
||||||
|
with myzip.open("vm-1/dynamips/test") as myfile:
|
||||||
|
content = myfile.read()
|
||||||
|
assert content == b"HELLO"
|
||||||
|
|
||||||
|
|
||||||
def test_export_disallow_running(tmpdir, project, node, async_run):
|
def test_export_disallow_running(tmpdir, project, node, async_run):
|
||||||
"""
|
"""
|
||||||
Dissallow export when a node is running
|
Dissallow export when a node is running
|
||||||
|
@ -23,6 +23,7 @@ import zipfile
|
|||||||
|
|
||||||
from gns3server.controller.project import Project
|
from gns3server.controller.project import Project
|
||||||
from gns3server.controller.import_project import import_project
|
from gns3server.controller.import_project import import_project
|
||||||
|
from gns3server.version import __version__
|
||||||
|
|
||||||
|
|
||||||
def test_import_project(async_run, tmpdir, controller):
|
def test_import_project(async_run, tmpdir, controller):
|
||||||
@ -66,6 +67,32 @@ def test_import_project(async_run, tmpdir, controller):
|
|||||||
assert project.name != "test"
|
assert project.name != "test"
|
||||||
|
|
||||||
|
|
||||||
|
def test_import_upgrade(async_run, tmpdir, controller):
|
||||||
|
project_id = str(uuid.uuid4())
|
||||||
|
|
||||||
|
topology = {
|
||||||
|
"project_id": str(uuid.uuid4()),
|
||||||
|
"name": "test",
|
||||||
|
"topology": {
|
||||||
|
},
|
||||||
|
"version": "1.4.2"
|
||||||
|
}
|
||||||
|
|
||||||
|
with open(str(tmpdir / "project.gns3"), 'w+') as f:
|
||||||
|
json.dump(topology, f)
|
||||||
|
|
||||||
|
zip_path = str(tmpdir / "project.zip")
|
||||||
|
with zipfile.ZipFile(zip_path, 'w') as myzip:
|
||||||
|
myzip.write(str(tmpdir / "project.gns3"), "project.gns3")
|
||||||
|
|
||||||
|
with open(zip_path, "rb") as f:
|
||||||
|
project = async_run(import_project(controller, project_id, f))
|
||||||
|
|
||||||
|
with open(os.path.join(project.path, "test.gns3")) as f:
|
||||||
|
topo = json.load(f)
|
||||||
|
assert topo["version"] == __version__
|
||||||
|
|
||||||
|
|
||||||
def test_import_with_images(tmpdir, async_run, controller):
|
def test_import_with_images(tmpdir, async_run, controller):
|
||||||
|
|
||||||
project_id = str(uuid.uuid4())
|
project_id = str(uuid.uuid4())
|
||||||
@ -92,8 +119,6 @@ def test_import_with_images(tmpdir, async_run, controller):
|
|||||||
with open(zip_path, "rb") as f:
|
with open(zip_path, "rb") as f:
|
||||||
project = async_run(import_project(controller, project_id, f))
|
project = async_run(import_project(controller, project_id, f))
|
||||||
|
|
||||||
print(project._config().get("images_path"))
|
|
||||||
# TEST import images
|
|
||||||
assert not os.path.exists(os.path.join(project.path, "images/IOS/test.image"))
|
assert not os.path.exists(os.path.join(project.path, "images/IOS/test.image"))
|
||||||
|
|
||||||
path = os.path.join(project._config().get("images_path"), "IOS", "test.image")
|
path = os.path.join(project._config().get("images_path"), "IOS", "test.image")
|
||||||
|
Loading…
Reference in New Issue
Block a user