Merge branch '2.2' into 3.0

# Conflicts:
#	gns3server/compute/qemu/qemu_vm.py
#	gns3server/config_samples/gns3_server.conf
#	gns3server/controller/export_project.py
#	gns3server/controller/project.py
#	gns3server/controller/snapshot.py
#	gns3server/handlers/api/controller/project_handler.py
#	tests/controller/test_export_project.py
#	tests/controller/test_import_project.py
#	tests/controller/test_snapshot.py
pull/2406/head
grossmj 3 months ago
commit 6ed18c561f
No known key found for this signature in database
GPG Key ID: 0A2D76AC45EA25CD

@ -54,6 +54,12 @@ import logging
log = logging.getLogger(__name__)
# forbidden additional options
FORBIDDEN_OPTIONS = {"-blockdev", "-drive", "-hda", "-hdb", "-hdc", "-hdd",
"-fsdev", "-virtfs"}
FORBIDDEN_OPTIONS |= {"-" + opt for opt in FORBIDDEN_OPTIONS
if opt.startswith("-") and not opt.startswith("--")}
class QemuVM(BaseNode):
module_name = "qemu"
@ -2643,9 +2649,16 @@ class QemuVM(BaseNode):
command.extend(self._tpm_options())
if additional_options:
try:
command.extend(shlex.split(additional_options))
additional_opt_list = shlex.split(additional_options)
except ValueError as e:
raise QemuError(f"Invalid additional options: {additional_options} error {e}")
allow_unsafe_options = self.manager.config.settings.Qemu.allow_unsafe_options
if allow_unsafe_options is False:
for opt in additional_opt_list:
if opt in FORBIDDEN_OPTIONS:
raise QemuError("Forbidden additional option: {}".format(opt))
command.extend(additional_opt_list)
# avoiding mouse offset (see https://github.com/GNS3/gns3-server/issues/2335)
if self._console_type == "vnc":
command.extend(['-machine', 'usb=on', '-device', 'usb-tablet'])

@ -148,3 +148,5 @@ monitor_host = 127.0.0.1
enable_hardware_acceleration = True
; Require hardware acceleration in order to start VMs
require_hardware_acceleration = False
; Allow unsafe additional command line options
allow_unsafe_options = False

@ -39,7 +39,7 @@ async def export_project(
temporary_dir,
include_images=False,
include_snapshots=False,
keep_compute_id=False,
keep_compute_ids=False,
allow_all_nodes=False,
reset_mac_addresses=False,
):
@ -54,9 +54,9 @@ async def export_project(
:param temporary_dir: A temporary dir where to store intermediate data
:param include_images: save OS images to the zip file
:param include_snapshots: save snapshots to the zip file
:param keep_compute_id: If false replace all compute id by local (standard behavior for .gns3project to make it portable)
:param allow_all_nodes: Allow all nodes type to be include in the zip even if not portable
:param reset_mac_addresses: Reset MAC addresses for every nodes.
:param keep_compute_ids: If false replace all compute IDs by local (standard behavior for .gns3project to make it portable)
:param allow_all_nodes: Allow all nodes type to be included in the zip even if not portable
:param reset_mac_addresses: Reset MAC addresses for each node.
"""
# To avoid issue with data not saved we disallow the export of a running project
@ -77,7 +77,7 @@ async def export_project(
os.path.join(project._path, file),
zstream,
include_images,
keep_compute_id,
keep_compute_ids,
allow_all_nodes,
temporary_dir,
reset_mac_addresses,
@ -193,7 +193,7 @@ def _is_exportable(path, include_snapshots=False):
async def _patch_project_file(
project, path, zstream, include_images, keep_compute_id, allow_all_nodes, temporary_dir, reset_mac_addresses
project, path, zstream, include_images, keep_compute_ids, allow_all_nodes, temporary_dir, reset_mac_addresses
):
"""
Patch a project file (.gns3) to export a project.
@ -225,7 +225,7 @@ async def _patch_project_file(
if not allow_all_nodes and node["node_type"] in ["virtualbox", "vmware"]:
raise ControllerError("Projects with a {} node cannot be exported".format(node["node_type"]))
if not keep_compute_id:
if not keep_compute_ids:
node["compute_id"] = "local" # To make project portable all node by default run on local
if "properties" in node and node["node_type"] != "docker":
@ -243,13 +243,13 @@ async def _patch_project_file(
if value is None or value.strip() == "":
continue
if not keep_compute_id: # If we keep the original compute we can keep the image path
if not keep_compute_ids: # If we keep the original compute we can keep the image path
node["properties"][prop] = os.path.basename(value)
if include_images is True:
images.append({"compute_id": compute_id, "image": value, "image_type": node["node_type"]})
if not keep_compute_id:
if not keep_compute_ids:
topology["topology"][
"computes"
] = [] # Strip compute information because could contain secret info like password

@ -40,7 +40,7 @@ Handle the import of project from a .gns3project
"""
async def import_project(controller, project_id, stream, location=None, name=None, keep_compute_id=False,
async def import_project(controller, project_id, stream, location=None, name=None, keep_compute_ids=False,
auto_start=False, auto_open=False, auto_close=True):
"""
Import a project contain in a zip file
@ -52,7 +52,7 @@ async def import_project(controller, project_id, stream, location=None, name=Non
:param stream: A io.BytesIO of the zipfile
:param location: Directory for the project if None put in the default directory
:param name: Wanted project name, generate one from the .gns3 if None
:param keep_compute_id: If true do not touch the compute id
:param keep_compute_ids: keep compute IDs unchanged
:returns: Project
"""
@ -126,7 +126,7 @@ async def import_project(controller, project_id, stream, location=None, name=Non
drawing["drawing_id"] = str(uuid.uuid4())
# Modify the compute id of the node depending of compute capacity
if not keep_compute_id:
if not keep_compute_ids:
# For some VM type we move them to the GNS3 VM if possible
# unless it's a linux host without GNS3 VM
if not sys.platform.startswith("linux") or controller.has_compute("vm"):

@ -210,7 +210,11 @@ class Project:
if os.path.exists(snapshot_dir):
for snap in os.listdir(snapshot_dir):
if snap.endswith(".gns3project"):
snapshot = Snapshot(self, filename=snap)
try:
snapshot = Snapshot(self, filename=snap)
except ValueError:
log.error("Invalid snapshot file: {}".format(snap))
continue
self._snapshots[snapshot.id] = snapshot
# Create the project on demand on the compute node
@ -1087,7 +1091,7 @@ class Project:
zstream,
self,
tmpdir,
keep_compute_id=True,
keep_compute_ids=True,
allow_all_nodes=True,
reset_mac_addresses=reset_mac_addresses,
)
@ -1106,7 +1110,7 @@ class Project:
str(uuid.uuid4()),
f,
name=name,
keep_compute_id=True
keep_compute_ids=True
)
log.info(f"Project '{project.name}' duplicated in {time.time() - begin:.4f} seconds")

@ -59,14 +59,9 @@ class Snapshot:
+ ".gns3project"
)
else:
self._name = filename.split("_")[0]
self._name = filename.rsplit("_", 2)[0]
datestring = filename.replace(self._name + "_", "").split(".")[0]
try:
self._created_at = (
datetime.strptime(datestring, "%d%m%y_%H%M%S").replace(tzinfo=timezone.utc).timestamp()
)
except ValueError:
self._created_at = datetime.now(timezone.utc)
self._created_at = (datetime.strptime(datestring, "%d%m%y_%H%M%S").replace(tzinfo=timezone.utc).timestamp())
self._path = os.path.join(project.path, "snapshots", filename)
@property
@ -104,7 +99,7 @@ class Snapshot:
with tempfile.TemporaryDirectory(dir=snapshot_directory) as tmpdir:
# Do not compress the snapshots
with aiozipstream.ZipFile(compression=zipfile.ZIP_STORED) as zstream:
await export_project(zstream, self._project, tmpdir, keep_compute_id=True, allow_all_nodes=True)
await export_project(zstream, self._project, tmpdir, keep_compute_ids=True, allow_all_nodes=True)
async with aiofiles.open(self.path, "wb") as f:
async for chunk in zstream:
await f.write(chunk)

@ -792,6 +792,14 @@ async def test_build_command_with_invalid_options(vm):
await vm._build_command()
@pytest.mark.skipif(sys.platform.startswith("win"), reason="Not supported on Windows")
async def test_build_command_with_forbidden_options(vm):
vm.options = "-blockdev"
with pytest.raises(QemuError):
await vm._build_command()
def test_hda_disk_image(vm, images_dir):
open(os.path.join(images_dir, "test1"), "w+").close()

@ -334,7 +334,7 @@ async def test_export_with_images(tmpdir, project):
@pytest.mark.asyncio
async def test_export_keep_compute_id(tmpdir, project):
async def test_export_keep_compute_ids(tmpdir, project):
"""
If we want to restore the same computes we could ask to keep them
in the file
@ -363,7 +363,7 @@ async def test_export_keep_compute_id(tmpdir, project):
json.dump(data, f)
with aiozipstream.ZipFile() as z:
await export_project(z, project, str(tmpdir), keep_compute_id=True)
await export_project(z, project, str(tmpdir), keep_compute_ids=True)
await write_file(str(tmpdir / 'zipfile.zip'), z)
with zipfile.ZipFile(str(tmpdir / 'zipfile.zip')) as myzip:
@ -469,7 +469,7 @@ async def test_export_with_ignoring_snapshots(tmpdir, project):
Path(os.path.join(snapshots_dir, 'snap.gns3project')).touch()
with aiozipstream.ZipFile() as z:
await export_project(z, project, str(tmpdir), keep_compute_id=True)
await export_project(z, project, str(tmpdir), keep_compute_ids=True)
await write_file(str(tmpdir / 'zipfile.zip'), z)
with zipfile.ZipFile(str(tmpdir / 'zipfile.zip')) as myzip:

@ -462,7 +462,7 @@ async def test_import_node_id(linux_platform, tmpdir, controller):
@pytest.mark.asyncio
async def test_import_keep_compute_id(windows_platform, tmpdir, controller):
async def test_import_keep_compute_ids(windows_platform, tmpdir, controller):
"""
On linux host IOU should be moved to the GNS3 VM
"""
@ -500,7 +500,7 @@ async def test_import_keep_compute_id(windows_platform, tmpdir, controller):
myzip.write(str(tmpdir / "project.gns3"), "project.gns3")
with open(zip_path, "rb") as f:
project = await import_project(controller, project_id, f, keep_compute_id=True)
project = await import_project(controller, project_id, f, keep_compute_ids=True)
with open(os.path.join(project.path, "test.gns3")) as f:
topo = json.load(f)

@ -786,7 +786,7 @@ def test_snapshots(project):
def test_get_snapshot(project):
os.makedirs(os.path.join(project.path, "snapshots"))
open(os.path.join(project.path, "snapshots", "test1.gns3project"), "w+").close()
open(os.path.join(project.path, "snapshots", "test1_260716_103713.gns3project"), "w+").close()
project.reset()
snapshot = list(project.snapshots.values())[0]

@ -61,15 +61,21 @@ def test_snapshot_filename(project):
def test_json(project):
snapshot = Snapshot(project, filename="test1_260716_100439.gns3project")
snapshot = Snapshot(project, filename="snapshot_test_260716_100439.gns3project")
assert snapshot.asdict() == {
"snapshot_id": snapshot._id,
"name": "test1",
"name": "snapshot_test",
"project_id": project.id,
"created_at": 1469527479
}
def test_invalid_snapshot_filename(project):
with pytest.raises(ValueError):
Snapshot(project, filename="snapshot_test_invalid_file.gns3project")
@pytest.mark.asyncio
async def test_restore(project, controller, config):

Loading…
Cancel
Save