mirror of
https://github.com/GNS3/gns3-server
synced 2024-11-12 19:38:57 +00:00
Rename __json__() to asdict()
This commit is contained in:
parent
44074ff7c9
commit
cefab8d362
@ -65,7 +65,7 @@ async def create_atm_switch(project_id: UUID, node_data: schemas.ATMSwitchCreate
|
||||
node_type="atm_switch",
|
||||
mappings=node_data.get("mappings"),
|
||||
)
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.get("/{node_id}", response_model=schemas.ATMSwitch)
|
||||
@ -74,7 +74,7 @@ def get_atm_switch(node: ATMSwitch = Depends(dep_node)):
|
||||
Return an ATM switch node.
|
||||
"""
|
||||
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.post("/{node_id}/duplicate", response_model=schemas.ATMSwitch, status_code=status.HTTP_201_CREATED)
|
||||
@ -84,7 +84,7 @@ async def duplicate_atm_switch(destination_node_id: UUID = Body(..., embed=True)
|
||||
"""
|
||||
|
||||
new_node = await Dynamips.instance().duplicate_node(node.id, str(destination_node_id))
|
||||
return new_node.__json__()
|
||||
return new_node.asdict()
|
||||
|
||||
|
||||
@router.put("/{node_id}", response_model=schemas.ATMSwitch)
|
||||
@ -99,7 +99,7 @@ async def update_atm_switch(node_data: schemas.ATMSwitchUpdate, node: ATMSwitch
|
||||
if "mappings" in node_data:
|
||||
node.mappings = node_data["mappings"]
|
||||
node.updated()
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.delete("/{node_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@ -156,7 +156,7 @@ async def create_nio(
|
||||
|
||||
nio = await Dynamips.instance().create_nio(node, jsonable_encoder(nio_data, exclude_unset=True))
|
||||
await node.add_nio(nio, port_number)
|
||||
return nio.__json__()
|
||||
return nio.asdict()
|
||||
|
||||
|
||||
@router.delete("/{node_id}/adapters/{adapter_number}/ports/{port_number}/nio", status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
@ -72,7 +72,7 @@ async def create_cloud(project_id: UUID, node_data: schemas.CloudCreate):
|
||||
node.remote_console_type = node_data.get("remote_console_type", node.remote_console_type)
|
||||
node.remote_console_http_path = node_data.get("remote_console_http_path", node.remote_console_http_path)
|
||||
node.usage = node_data.get("usage", "")
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.get("/{node_id}", response_model=schemas.Cloud)
|
||||
@ -81,7 +81,7 @@ def get_cloud(node: Cloud = Depends(dep_node)):
|
||||
Return a cloud node.
|
||||
"""
|
||||
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.put("/{node_id}", response_model=schemas.Cloud)
|
||||
@ -95,7 +95,7 @@ def update_cloud(node_data: schemas.CloudUpdate, node: Cloud = Depends(dep_node)
|
||||
if hasattr(node, name) and getattr(node, name) != value:
|
||||
setattr(node, name, value)
|
||||
node.updated()
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.delete("/{node_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@ -154,7 +154,7 @@ async def create_cloud_nio(
|
||||
|
||||
nio = Builtin.instance().create_nio(jsonable_encoder(nio_data, exclude_unset=True))
|
||||
await node.add_nio(nio, port_number)
|
||||
return nio.__json__()
|
||||
return nio.asdict()
|
||||
|
||||
|
||||
@router.put(
|
||||
@ -177,7 +177,7 @@ async def update_cloud_nio(
|
||||
if nio_data.filters:
|
||||
nio.filters = nio_data.filters
|
||||
await node.update_nio(port_number, nio)
|
||||
return nio.__json__()
|
||||
return nio.asdict()
|
||||
|
||||
|
||||
@router.delete("/{node_id}/adapters/{adapter_number}/ports/{port_number}/nio", status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
@ -72,7 +72,7 @@ def network_ports() -> dict:
|
||||
"""
|
||||
|
||||
m = PortManager.instance()
|
||||
return m.__json__()
|
||||
return m.asdict()
|
||||
|
||||
|
||||
@router.get("/version")
|
||||
|
@ -82,7 +82,7 @@ async def create_docker_node(project_id: UUID, node_data: schemas.DockerCreate):
|
||||
if hasattr(container, name) and getattr(container, name) != value:
|
||||
setattr(container, name, value)
|
||||
|
||||
return container.__json__()
|
||||
return container.asdict()
|
||||
|
||||
|
||||
@router.get("/{node_id}", response_model=schemas.Docker)
|
||||
@ -91,7 +91,7 @@ def get_docker_node(node: DockerVM = Depends(dep_node)):
|
||||
Return a Docker node.
|
||||
"""
|
||||
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.put("/{node_id}", response_model=schemas.Docker)
|
||||
@ -128,7 +128,7 @@ async def update_docker_node(node_data: schemas.DockerUpdate, node: DockerVM = D
|
||||
if changed:
|
||||
await node.update()
|
||||
node.updated()
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.post("/{node_id}/start", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@ -201,7 +201,7 @@ async def duplicate_docker_node(destination_node_id: UUID = Body(..., embed=True
|
||||
"""
|
||||
|
||||
new_node = await Docker.instance().duplicate_node(node.id, str(destination_node_id))
|
||||
return new_node.__json__()
|
||||
return new_node.asdict()
|
||||
|
||||
|
||||
@router.post(
|
||||
@ -219,7 +219,7 @@ async def create_docker_node_nio(
|
||||
|
||||
nio = Docker.instance().create_nio(jsonable_encoder(nio_data, exclude_unset=True))
|
||||
await node.adapter_add_nio_binding(adapter_number, nio)
|
||||
return nio.__json__()
|
||||
return nio.asdict()
|
||||
|
||||
|
||||
@router.put(
|
||||
@ -239,7 +239,7 @@ async def update_docker_node_nio(
|
||||
if nio_data.filters:
|
||||
nio.filters = nio_data.filters
|
||||
await node.adapter_update_nio_binding(adapter_number, nio)
|
||||
return nio.__json__()
|
||||
return nio.asdict()
|
||||
|
||||
|
||||
@router.delete("/{node_id}/adapters/{adapter_number}/ports/{port_number}/nio", status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
@ -81,7 +81,7 @@ async def create_router(project_id: UUID, node_data: schemas.DynamipsCreate):
|
||||
node_type="dynamips",
|
||||
)
|
||||
await dynamips_manager.update_vm_settings(vm, node_data)
|
||||
return vm.__json__()
|
||||
return vm.asdict()
|
||||
|
||||
|
||||
@router.get("/{node_id}", response_model=schemas.Dynamips)
|
||||
@ -90,7 +90,7 @@ def get_router(node: Router = Depends(dep_node)):
|
||||
Return Dynamips router.
|
||||
"""
|
||||
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.put("/{node_id}", response_model=schemas.Dynamips)
|
||||
@ -101,7 +101,7 @@ async def update_router(node_data: schemas.DynamipsUpdate, node: Router = Depend
|
||||
|
||||
await Dynamips.instance().update_vm_settings(node, jsonable_encoder(node_data, exclude_unset=True))
|
||||
node.updated()
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.delete("/{node_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@ -171,7 +171,7 @@ async def create_nio(adapter_number: int, port_number: int, nio_data: schemas.UD
|
||||
|
||||
nio = await Dynamips.instance().create_nio(node, jsonable_encoder(nio_data, exclude_unset=True))
|
||||
await node.slot_add_nio_binding(adapter_number, port_number, nio)
|
||||
return nio.__json__()
|
||||
return nio.asdict()
|
||||
|
||||
|
||||
@router.put(
|
||||
@ -188,7 +188,7 @@ async def update_nio(adapter_number: int, port_number: int, nio_data: schemas.UD
|
||||
if nio_data.filters:
|
||||
nio.filters = nio_data.filters
|
||||
await node.slot_update_nio_binding(adapter_number, port_number, nio)
|
||||
return nio.__json__()
|
||||
return nio.asdict()
|
||||
|
||||
|
||||
@router.delete("/{node_id}/adapters/{adapter_number}/ports/{port_number}/nio", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@ -273,7 +273,7 @@ async def duplicate_router(destination_node_id: UUID, node: Router = Depends(dep
|
||||
"""
|
||||
|
||||
new_node = await Dynamips.instance().duplicate_node(node.id, str(destination_node_id))
|
||||
return new_node.__json__()
|
||||
return new_node.asdict()
|
||||
|
||||
|
||||
@router.websocket("/{node_id}/console/ws")
|
||||
|
@ -65,7 +65,7 @@ async def create_ethernet_hub(project_id: UUID, node_data: schemas.EthernetHubCr
|
||||
node_type="ethernet_hub",
|
||||
ports=node_data.get("ports_mapping"),
|
||||
)
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.get("/{node_id}", response_model=schemas.EthernetHub)
|
||||
@ -74,7 +74,7 @@ def get_ethernet_hub(node: EthernetHub = Depends(dep_node)):
|
||||
Return an Ethernet hub.
|
||||
"""
|
||||
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.post("/{node_id}/duplicate", response_model=schemas.EthernetHub, status_code=status.HTTP_201_CREATED)
|
||||
@ -86,7 +86,7 @@ async def duplicate_ethernet_hub(
|
||||
"""
|
||||
|
||||
new_node = await Dynamips.instance().duplicate_node(node.id, str(destination_node_id))
|
||||
return new_node.__json__()
|
||||
return new_node.asdict()
|
||||
|
||||
|
||||
@router.put("/{node_id}", response_model=schemas.EthernetHub)
|
||||
@ -101,7 +101,7 @@ async def update_ethernet_hub(node_data: schemas.EthernetHubUpdate, node: Ethern
|
||||
if "ports_mapping" in node_data:
|
||||
node.ports_mapping = node_data["ports_mapping"]
|
||||
node.updated()
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.delete("/{node_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@ -158,7 +158,7 @@ async def create_nio(
|
||||
|
||||
nio = await Dynamips.instance().create_nio(node, jsonable_encoder(nio_data, exclude_unset=True))
|
||||
await node.add_nio(nio, port_number)
|
||||
return nio.__json__()
|
||||
return nio.asdict()
|
||||
|
||||
|
||||
@router.delete("/{node_id}/adapters/{adapter_number}/ports/{port_number}/nio", status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
@ -68,13 +68,13 @@ async def create_ethernet_switch(project_id: UUID, node_data: schemas.EthernetSw
|
||||
ports=node_data.get("ports_mapping"),
|
||||
)
|
||||
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.get("/{node_id}", response_model=schemas.EthernetSwitch)
|
||||
def get_ethernet_switch(node: EthernetSwitch = Depends(dep_node)):
|
||||
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.post("/{node_id}/duplicate", response_model=schemas.EthernetSwitch, status_code=status.HTTP_201_CREATED)
|
||||
@ -86,7 +86,7 @@ async def duplicate_ethernet_switch(
|
||||
"""
|
||||
|
||||
new_node = await Dynamips.instance().duplicate_node(node.id, str(destination_node_id))
|
||||
return new_node.__json__()
|
||||
return new_node.asdict()
|
||||
|
||||
|
||||
@router.put("/{node_id}", response_model=schemas.EthernetSwitch)
|
||||
@ -104,7 +104,7 @@ async def update_ethernet_switch(node_data: schemas.EthernetSwitchUpdate, node:
|
||||
if "console_type" in node_data:
|
||||
node.console_type = node_data["console_type"]
|
||||
node.updated()
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.delete("/{node_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@ -157,7 +157,7 @@ async def create_nio(
|
||||
|
||||
nio = await Dynamips.instance().create_nio(node, jsonable_encoder(nio_data, exclude_unset=True))
|
||||
await node.add_nio(nio, port_number)
|
||||
return nio.__json__()
|
||||
return nio.asdict()
|
||||
|
||||
|
||||
@router.delete("/{node_id}/adapters/{adapter_number}/ports/{port_number}/nio", status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
@ -65,7 +65,7 @@ async def create_frame_relay_switch(project_id: UUID, node_data: schemas.FrameRe
|
||||
node_type="frame_relay_switch",
|
||||
mappings=node_data.get("mappings"),
|
||||
)
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.get("/{node_id}", response_model=schemas.FrameRelaySwitch)
|
||||
@ -74,7 +74,7 @@ def get_frame_relay_switch(node: FrameRelaySwitch = Depends(dep_node)):
|
||||
Return a Frame Relay switch node.
|
||||
"""
|
||||
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.post("/{node_id}/duplicate", response_model=schemas.FrameRelaySwitch, status_code=status.HTTP_201_CREATED)
|
||||
@ -86,7 +86,7 @@ async def duplicate_frame_relay_switch(
|
||||
"""
|
||||
|
||||
new_node = await Dynamips.instance().duplicate_node(node.id, str(destination_node_id))
|
||||
return new_node.__json__()
|
||||
return new_node.asdict()
|
||||
|
||||
|
||||
@router.put("/{node_id}", response_model=schemas.FrameRelaySwitch)
|
||||
@ -103,7 +103,7 @@ async def update_frame_relay_switch(
|
||||
if "mappings" in node_data:
|
||||
node.mappings = node_data["mappings"]
|
||||
node.updated()
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.delete("/{node_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@ -160,7 +160,7 @@ async def create_nio(
|
||||
|
||||
nio = await Dynamips.instance().create_nio(node, jsonable_encoder(nio_data, exclude_unset=True))
|
||||
await node.add_nio(nio, port_number)
|
||||
return nio.__json__()
|
||||
return nio.asdict()
|
||||
|
||||
|
||||
@router.delete("/{node_id}/adapters/{adapter_number}/ports/{port_number}/nio", status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
@ -79,7 +79,7 @@ async def create_iou_node(project_id: UUID, node_data: schemas.IOUCreate):
|
||||
if node_data.get("use_default_iou_values") and (name == "ram" or name == "nvram"):
|
||||
continue
|
||||
setattr(vm, name, value)
|
||||
return vm.__json__()
|
||||
return vm.asdict()
|
||||
|
||||
|
||||
@router.get("/{node_id}", response_model=schemas.IOU)
|
||||
@ -88,7 +88,7 @@ def get_iou_node(node: IOUVM = Depends(dep_node)):
|
||||
Return an IOU node.
|
||||
"""
|
||||
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.put("/{node_id}", response_model=schemas.IOU)
|
||||
@ -109,7 +109,7 @@ async def update_iou_node(node_data: schemas.IOUUpdate, node: IOUVM = Depends(de
|
||||
# this is important to have the correct NVRAM amount in order to correctly push the configs to the NVRAM
|
||||
await node.update_default_iou_values()
|
||||
node.updated()
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.delete("/{node_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@ -128,7 +128,7 @@ async def duplicate_iou_node(destination_node_id: UUID = Body(..., embed=True),
|
||||
"""
|
||||
|
||||
new_node = await IOU.instance().duplicate_node(node.id, str(destination_node_id))
|
||||
return new_node.__json__()
|
||||
return new_node.asdict()
|
||||
|
||||
|
||||
@router.post("/{node_id}/start", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@ -143,7 +143,7 @@ async def start_iou_node(start_data: schemas.IOUStart, node: IOUVM = Depends(dep
|
||||
setattr(node, name, value)
|
||||
|
||||
await node.start()
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.post("/{node_id}/stop", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@ -191,7 +191,7 @@ async def create_iou_node_nio(
|
||||
|
||||
nio = IOU.instance().create_nio(jsonable_encoder(nio_data, exclude_unset=True))
|
||||
await node.adapter_add_nio_binding(adapter_number, port_number, nio)
|
||||
return nio.__json__()
|
||||
return nio.asdict()
|
||||
|
||||
|
||||
@router.put(
|
||||
@ -213,7 +213,7 @@ async def update_iou_node_nio(
|
||||
if nio_data.filters:
|
||||
nio.filters = nio_data.filters
|
||||
await node.adapter_update_nio_binding(adapter_number, port_number, nio)
|
||||
return nio.__json__()
|
||||
return nio.asdict()
|
||||
|
||||
|
||||
@router.delete("/{node_id}/adapters/{adapter_number}/ports/{port_number}/nio", status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
@ -67,7 +67,7 @@ async def create_nat_node(project_id: UUID, node_data: schemas.NATCreate):
|
||||
)
|
||||
|
||||
node.usage = node_data.get("usage", "")
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.get("/{node_id}", response_model=schemas.NAT)
|
||||
@ -76,7 +76,7 @@ def get_nat_node(node: Nat = Depends(dep_node)):
|
||||
Return a NAT node.
|
||||
"""
|
||||
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.put("/{node_id}", response_model=schemas.NAT)
|
||||
@ -90,7 +90,7 @@ def update_nat_node(node_data: schemas.NATUpdate, node: Nat = Depends(dep_node))
|
||||
if hasattr(node, name) and getattr(node, name) != value:
|
||||
setattr(node, name, value)
|
||||
node.updated()
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.delete("/{node_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@ -149,7 +149,7 @@ async def create_nat_node_nio(
|
||||
|
||||
nio = Builtin.instance().create_nio(jsonable_encoder(nio_data, exclude_unset=True))
|
||||
await node.add_nio(nio, port_number)
|
||||
return nio.__json__()
|
||||
return nio.asdict()
|
||||
|
||||
|
||||
@router.put(
|
||||
@ -172,7 +172,7 @@ async def update_nat_node_nio(
|
||||
if nio_data.filters:
|
||||
nio.filters = nio_data.filters
|
||||
await node.update_nio(port_number, nio)
|
||||
return nio.__json__()
|
||||
return nio.asdict()
|
||||
|
||||
|
||||
@router.delete("/{node_id}/adapters/{adapter_number}/ports/{port_number}/nio", status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
@ -58,7 +58,7 @@ def get_compute_projects():
|
||||
"""
|
||||
|
||||
pm = ProjectManager.instance()
|
||||
return [p.__json__() for p in pm.projects]
|
||||
return [p.asdict() for p in pm.projects]
|
||||
|
||||
|
||||
@router.post("/projects", status_code=status.HTTP_201_CREATED, response_model=schemas.Project)
|
||||
@ -75,7 +75,7 @@ def create_compute_project(project_data: schemas.ProjectCreate):
|
||||
project_id=project_data.get("project_id"),
|
||||
variables=project_data.get("variables", None),
|
||||
)
|
||||
return project.__json__()
|
||||
return project.asdict()
|
||||
|
||||
|
||||
@router.put("/projects/{project_id}", response_model=schemas.Project)
|
||||
@ -85,7 +85,7 @@ async def update_compute_project(project_data: schemas.ProjectUpdate, project: P
|
||||
"""
|
||||
|
||||
await project.update(variables=project_data.variables)
|
||||
return project.__json__()
|
||||
return project.asdict()
|
||||
|
||||
|
||||
@router.get("/projects/{project_id}", response_model=schemas.Project)
|
||||
@ -94,7 +94,7 @@ def get_compute_project(project: Project = Depends(dep_project)):
|
||||
Return a project from the compute.
|
||||
"""
|
||||
|
||||
return project.__json__()
|
||||
return project.asdict()
|
||||
|
||||
|
||||
@router.post("/projects/{project_id}/close", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@ -152,8 +152,8 @@ async def delete_compute_project(project: Project = Depends(dep_project)):
|
||||
# while True:
|
||||
# try:
|
||||
# (action, msg) = await asyncio.wait_for(queue.get(), 5)
|
||||
# if hasattr(msg, "__json__"):
|
||||
# msg = json.dumps({"action": action, "event": msg.__json__()}, sort_keys=True)
|
||||
# if hasattr(msg, "asdict"):
|
||||
# msg = json.dumps({"action": action, "event": msg.asdict()}, sort_keys=True)
|
||||
# else:
|
||||
# msg = json.dumps({"action": action, "event": msg}, sort_keys=True)
|
||||
# log.debug("Send notification: %s", msg)
|
||||
|
@ -76,7 +76,7 @@ async def create_qemu_node(project_id: UUID, node_data: schemas.QemuCreate):
|
||||
if hasattr(vm, name) and getattr(vm, name) != value:
|
||||
setattr(vm, name, value)
|
||||
|
||||
return vm.__json__()
|
||||
return vm.asdict()
|
||||
|
||||
|
||||
@router.get("/{node_id}", response_model=schemas.Qemu)
|
||||
@ -85,7 +85,7 @@ def get_qemu_node(node: QemuVM = Depends(dep_node)):
|
||||
Return a Qemu node.
|
||||
"""
|
||||
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.put("/{node_id}", response_model=schemas.Qemu)
|
||||
@ -101,7 +101,7 @@ async def update_qemu_node(node_data: schemas.QemuUpdate, node: QemuVM = Depends
|
||||
if hasattr(node, name) and getattr(node, name) != value:
|
||||
await node.update_property(name, value)
|
||||
node.updated()
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.delete("/{node_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@ -120,7 +120,7 @@ async def duplicate_qemu_node(destination_node_id: UUID = Body(..., embed=True),
|
||||
"""
|
||||
|
||||
new_node = await Qemu.instance().duplicate_node(node.id, str(destination_node_id))
|
||||
return new_node.__json__()
|
||||
return new_node.asdict()
|
||||
|
||||
|
||||
@router.post("/{node_id}/resize_disk", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@ -196,7 +196,7 @@ async def create_qemu_node_nio(
|
||||
|
||||
nio = Qemu.instance().create_nio(jsonable_encoder(nio_data, exclude_unset=True))
|
||||
await node.adapter_add_nio_binding(adapter_number, nio)
|
||||
return nio.__json__()
|
||||
return nio.asdict()
|
||||
|
||||
|
||||
@router.put(
|
||||
@ -218,7 +218,7 @@ async def update_qemu_node_nio(
|
||||
if nio_data.suspend:
|
||||
nio.suspend = nio_data.suspend
|
||||
await node.adapter_update_nio_binding(adapter_number, nio)
|
||||
return nio.__json__()
|
||||
return nio.asdict()
|
||||
|
||||
|
||||
@router.delete("/{node_id}/adapters/{adapter_number}/ports/{port_number}/nio", status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
@ -80,7 +80,7 @@ async def create_virtualbox_node(project_id: UUID, node_data: schemas.VirtualBox
|
||||
if hasattr(vm, name) and getattr(vm, name) != value:
|
||||
setattr(vm, name, value)
|
||||
|
||||
return vm.__json__()
|
||||
return vm.asdict()
|
||||
|
||||
|
||||
@router.get("/{node_id}", response_model=schemas.VirtualBox)
|
||||
@ -89,7 +89,7 @@ def get_virtualbox_node(node: VirtualBoxVM = Depends(dep_node)):
|
||||
Return a VirtualBox node.
|
||||
"""
|
||||
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.put("/{node_id}", response_model=schemas.VirtualBox)
|
||||
@ -131,7 +131,7 @@ async def update_virtualbox_node(node_data: schemas.VirtualBoxUpdate, node: Virt
|
||||
setattr(node, name, value)
|
||||
|
||||
node.updated()
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.delete("/{node_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@ -203,7 +203,7 @@ async def create_virtualbox_node_nio(
|
||||
|
||||
nio = VirtualBox.instance().create_nio(jsonable_encoder(nio_data, exclude_unset=True))
|
||||
await node.adapter_add_nio_binding(adapter_number, nio)
|
||||
return nio.__json__()
|
||||
return nio.asdict()
|
||||
|
||||
|
||||
@router.put(
|
||||
@ -225,7 +225,7 @@ async def update_virtualbox_node_nio(
|
||||
if nio_data.suspend:
|
||||
nio.suspend = nio_data.suspend
|
||||
await node.adapter_update_nio_binding(adapter_number, nio)
|
||||
return nio.__json__()
|
||||
return nio.asdict()
|
||||
|
||||
|
||||
@router.delete("/{node_id}/adapters/{adapter_number}/ports/{port_number}/nio", status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
@ -73,7 +73,7 @@ async def create_vmware_node(project_id: UUID, node_data: schemas.VMwareCreate):
|
||||
if hasattr(vm, name) and getattr(vm, name) != value:
|
||||
setattr(vm, name, value)
|
||||
|
||||
return vm.__json__()
|
||||
return vm.asdict()
|
||||
|
||||
|
||||
@router.get("/{node_id}", response_model=schemas.VMware)
|
||||
@ -82,7 +82,7 @@ def get_vmware_node(node: VMwareVM = Depends(dep_node)):
|
||||
Return a VMware node.
|
||||
"""
|
||||
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.put("/{node_id}", response_model=schemas.VMware)
|
||||
@ -99,7 +99,7 @@ def update_vmware_node(node_data: schemas.VMwareUpdate, node: VMwareVM = Depends
|
||||
setattr(node, name, value)
|
||||
|
||||
node.updated()
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.delete("/{node_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@ -171,7 +171,7 @@ async def create_vmware_node_nio(
|
||||
|
||||
nio = VMware.instance().create_nio(jsonable_encoder(nio_data, exclude_unset=True))
|
||||
await node.adapter_add_nio_binding(adapter_number, nio)
|
||||
return nio.__json__()
|
||||
return nio.asdict()
|
||||
|
||||
|
||||
@router.put(
|
||||
@ -191,7 +191,7 @@ async def update_vmware_node_nio(
|
||||
if nio_data.filters:
|
||||
nio.filters = nio_data.filters
|
||||
await node.adapter_update_nio_binding(adapter_number, nio)
|
||||
return nio.__json__()
|
||||
return nio.asdict()
|
||||
|
||||
|
||||
@router.delete("/{node_id}/adapters/{adapter_number}/ports/{port_number}/nio", status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
@ -66,7 +66,7 @@ async def create_vpcs_node(project_id: UUID, node_data: schemas.VPCSCreate):
|
||||
startup_script=node_data.get("startup_script"),
|
||||
)
|
||||
|
||||
return vm.__json__()
|
||||
return vm.asdict()
|
||||
|
||||
|
||||
@router.get("/{node_id}", response_model=schemas.VPCS)
|
||||
@ -75,7 +75,7 @@ def get_vpcs_node(node: VPCSVM = Depends(dep_node)):
|
||||
Return a VPCS node.
|
||||
"""
|
||||
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.put("/{node_id}", response_model=schemas.VPCS)
|
||||
@ -89,7 +89,7 @@ def update_vpcs_node(node_data: schemas.VPCSUpdate, node: VPCSVM = Depends(dep_n
|
||||
node.console = node_data.get("console", node.console)
|
||||
node.console_type = node_data.get("console_type", node.console_type)
|
||||
node.updated()
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.delete("/{node_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@ -108,7 +108,7 @@ async def duplicate_vpcs_node(destination_node_id: UUID = Body(..., embed=True),
|
||||
"""
|
||||
|
||||
new_node = await VPCS.instance().duplicate_node(node.id, str(destination_node_id))
|
||||
return new_node.__json__()
|
||||
return new_node.asdict()
|
||||
|
||||
|
||||
@router.post("/{node_id}/start", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@ -163,7 +163,7 @@ async def create_vpcs_node_nio(
|
||||
|
||||
nio = VPCS.instance().create_nio(jsonable_encoder(nio_data, exclude_unset=True))
|
||||
await node.port_add_nio_binding(port_number, nio)
|
||||
return nio.__json__()
|
||||
return nio.asdict()
|
||||
|
||||
|
||||
@router.put(
|
||||
@ -183,7 +183,7 @@ async def update_vpcs_node_nio(
|
||||
if nio_data.filters:
|
||||
nio.filters = nio_data.filters
|
||||
await node.port_update_nio_binding(port_number, nio)
|
||||
return nio.__json__()
|
||||
return nio.asdict()
|
||||
|
||||
|
||||
@router.delete("/{node_id}/adapters/{adapter_number}/ports/{port_number}/nio", status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
@ -36,4 +36,4 @@ async def get_appliances(update: Optional[bool] = None, symbol_theme: Optional[s
|
||||
if update:
|
||||
await controller.appliance_manager.download_appliances()
|
||||
controller.appliance_manager.load_appliances(symbol_theme=symbol_theme)
|
||||
return [c.__json__() for c in controller.appliance_manager.appliances.values()]
|
||||
return [c.asdict() for c in controller.appliance_manager.appliances.values()]
|
||||
|
@ -38,7 +38,7 @@ async def get_drawings(project_id: UUID):
|
||||
"""
|
||||
|
||||
project = await Controller.instance().get_loaded_project(str(project_id))
|
||||
return [v.__json__() for v in project.drawings.values()]
|
||||
return [v.asdict() for v in project.drawings.values()]
|
||||
|
||||
|
||||
@router.post("", status_code=status.HTTP_201_CREATED, response_model=schemas.Drawing)
|
||||
@ -49,7 +49,7 @@ async def create_drawing(project_id: UUID, drawing_data: schemas.Drawing):
|
||||
|
||||
project = await Controller.instance().get_loaded_project(str(project_id))
|
||||
drawing = await project.add_drawing(**jsonable_encoder(drawing_data, exclude_unset=True))
|
||||
return drawing.__json__()
|
||||
return drawing.asdict()
|
||||
|
||||
|
||||
@router.get("/{drawing_id}", response_model=schemas.Drawing, response_model_exclude_unset=True)
|
||||
@ -60,7 +60,7 @@ async def get_drawing(project_id: UUID, drawing_id: UUID):
|
||||
|
||||
project = await Controller.instance().get_loaded_project(str(project_id))
|
||||
drawing = project.get_drawing(str(drawing_id))
|
||||
return drawing.__json__()
|
||||
return drawing.asdict()
|
||||
|
||||
|
||||
@router.put("/{drawing_id}", response_model=schemas.Drawing, response_model_exclude_unset=True)
|
||||
@ -72,7 +72,7 @@ async def update_drawing(project_id: UUID, drawing_id: UUID, drawing_data: schem
|
||||
project = await Controller.instance().get_loaded_project(str(project_id))
|
||||
drawing = project.get_drawing(str(drawing_id))
|
||||
await drawing.update(**jsonable_encoder(drawing_data, exclude_unset=True))
|
||||
return drawing.__json__()
|
||||
return drawing.asdict()
|
||||
|
||||
|
||||
@router.delete("/{drawing_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
@ -54,7 +54,7 @@ async def get_gns3vm_settings():
|
||||
Return the GNS3 VM settings.
|
||||
"""
|
||||
|
||||
return Controller.instance().gns3vm.__json__()
|
||||
return Controller.instance().gns3vm.asdict()
|
||||
|
||||
|
||||
@router.put("", response_model=schemas.GNS3VM, response_model_exclude_unset=True)
|
||||
@ -67,4 +67,4 @@ async def update_gns3vm_settings(gns3vm_data: schemas.GNS3VM):
|
||||
gns3_vm = controller.gns3vm
|
||||
await gns3_vm.update_settings(jsonable_encoder(gns3vm_data, exclude_unset=True))
|
||||
controller.save()
|
||||
return gns3_vm.__json__()
|
||||
return gns3_vm.asdict()
|
||||
|
@ -59,7 +59,7 @@ async def get_links(project_id: UUID):
|
||||
"""
|
||||
|
||||
project = await Controller.instance().get_loaded_project(str(project_id))
|
||||
return [v.__json__() for v in project.links.values()]
|
||||
return [v.asdict() for v in project.links.values()]
|
||||
|
||||
|
||||
@router.post(
|
||||
@ -94,7 +94,7 @@ async def create_link(project_id: UUID, link_data: schemas.LinkCreate):
|
||||
except ControllerError as e:
|
||||
await project.delete_link(link.id)
|
||||
raise e
|
||||
return link.__json__()
|
||||
return link.asdict()
|
||||
|
||||
|
||||
@router.get("/{link_id}/available_filters")
|
||||
@ -112,7 +112,7 @@ async def get_link(link: Link = Depends(dep_link)):
|
||||
Return a link.
|
||||
"""
|
||||
|
||||
return link.__json__()
|
||||
return link.asdict()
|
||||
|
||||
|
||||
@router.put("/{link_id}", response_model=schemas.Link, response_model_exclude_unset=True)
|
||||
@ -128,7 +128,7 @@ async def update_link(link_data: schemas.LinkUpdate, link: Link = Depends(dep_li
|
||||
await link.update_suspend(link_data["suspend"])
|
||||
if "nodes" in link_data:
|
||||
await link.update_nodes(link_data["nodes"])
|
||||
return link.__json__()
|
||||
return link.asdict()
|
||||
|
||||
|
||||
@router.delete("/{link_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@ -148,7 +148,7 @@ async def reset_link(link: Link = Depends(dep_link)):
|
||||
"""
|
||||
|
||||
await link.reset()
|
||||
return link.__json__()
|
||||
return link.asdict()
|
||||
|
||||
|
||||
@router.post("/{link_id}/capture/start", status_code=status.HTTP_201_CREATED, response_model=schemas.Link)
|
||||
@ -161,7 +161,7 @@ async def start_capture(capture_data: dict, link: Link = Depends(dep_link)):
|
||||
data_link_type=capture_data.get("data_link_type", "DLT_EN10MB"),
|
||||
capture_file_name=capture_data.get("capture_file_name"),
|
||||
)
|
||||
return link.__json__()
|
||||
return link.asdict()
|
||||
|
||||
|
||||
@router.post("/{link_id}/capture/stop", status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
@ -117,7 +117,7 @@ async def create_node(node_data: schemas.NodeCreate, project: Project = Depends(
|
||||
compute = controller.get_compute(str(node_data.compute_id))
|
||||
node_data = jsonable_encoder(node_data, exclude_unset=True)
|
||||
node = await project.add_node(compute, node_data.pop("name"), node_data.pop("node_id", None), **node_data)
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.get("", response_model=List[schemas.Node], response_model_exclude_unset=True)
|
||||
@ -126,7 +126,7 @@ async def get_nodes(project: Project = Depends(dep_project)):
|
||||
Return all nodes belonging to a given project.
|
||||
"""
|
||||
|
||||
return [v.__json__() for v in project.nodes.values()]
|
||||
return [v.asdict() for v in project.nodes.values()]
|
||||
|
||||
|
||||
@router.post("/start", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@ -172,7 +172,7 @@ def get_node(node: Node = Depends(dep_node)):
|
||||
Return a node from a given project.
|
||||
"""
|
||||
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.put("/{node_id}", response_model=schemas.Node, response_model_exclude_unset=True)
|
||||
@ -189,7 +189,7 @@ async def update_node(node_data: schemas.NodeUpdate, node: Node = Depends(dep_no
|
||||
node_data.pop("compute_id", None)
|
||||
|
||||
await node.update(**node_data)
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
||||
|
||||
@router.delete(
|
||||
@ -212,7 +212,7 @@ async def duplicate_node(duplicate_data: schemas.NodeDuplicate, node: Node = Dep
|
||||
"""
|
||||
|
||||
new_node = await node.project.duplicate_node(node, duplicate_data.x, duplicate_data.y, duplicate_data.z)
|
||||
return new_node.__json__()
|
||||
return new_node.asdict()
|
||||
|
||||
|
||||
@router.post("/{node_id}/start", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@ -259,7 +259,7 @@ async def get_node_links(node: Node = Depends(dep_node)):
|
||||
|
||||
links = []
|
||||
for link in node.links:
|
||||
links.append(link.__json__())
|
||||
links.append(link.asdict())
|
||||
return links
|
||||
|
||||
|
||||
|
@ -70,7 +70,7 @@ def get_projects():
|
||||
"""
|
||||
|
||||
controller = Controller.instance()
|
||||
return [p.__json__() for p in controller.projects.values()]
|
||||
return [p.asdict() for p in controller.projects.values()]
|
||||
|
||||
|
||||
@router.post(
|
||||
@ -87,7 +87,7 @@ async def create_project(project_data: schemas.ProjectCreate):
|
||||
|
||||
controller = Controller.instance()
|
||||
project = await controller.add_project(**jsonable_encoder(project_data, exclude_unset=True))
|
||||
return project.__json__()
|
||||
return project.asdict()
|
||||
|
||||
|
||||
@router.get("/{project_id}", response_model=schemas.Project)
|
||||
@ -96,7 +96,7 @@ def get_project(project: Project = Depends(dep_project)):
|
||||
Return a project.
|
||||
"""
|
||||
|
||||
return project.__json__()
|
||||
return project.asdict()
|
||||
|
||||
|
||||
@router.put("/{project_id}", response_model=schemas.Project, response_model_exclude_unset=True)
|
||||
@ -106,7 +106,7 @@ async def update_project(project_data: schemas.ProjectUpdate, project: Project =
|
||||
"""
|
||||
|
||||
await project.update(**jsonable_encoder(project_data, exclude_unset=True))
|
||||
return project.__json__()
|
||||
return project.asdict()
|
||||
|
||||
|
||||
@router.delete("/{project_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@ -154,7 +154,7 @@ async def open_project(project: Project = Depends(dep_project)):
|
||||
"""
|
||||
|
||||
await project.open()
|
||||
return project.__json__()
|
||||
return project.asdict()
|
||||
|
||||
|
||||
@router.post(
|
||||
@ -176,7 +176,7 @@ async def load_project(path: str = Body(..., embed=True)):
|
||||
project = await controller.load_project(
|
||||
dot_gns3_file,
|
||||
)
|
||||
return project.__json__()
|
||||
return project.asdict()
|
||||
|
||||
|
||||
@router.get("/{project_id}/notifications")
|
||||
@ -323,7 +323,7 @@ async def import_project(project_id: UUID, request: Request, path: Optional[Path
|
||||
log.info(f"Project '{project.name}' imported in {time.time() - begin:.4f} seconds")
|
||||
except OSError as e:
|
||||
raise ControllerError(f"Could not import the project: {e}")
|
||||
return project.__json__()
|
||||
return project.asdict()
|
||||
|
||||
|
||||
@router.post(
|
||||
@ -348,7 +348,7 @@ async def duplicate_project(project_data: schemas.ProjectDuplicate, project: Pro
|
||||
new_project = await project.duplicate(
|
||||
name=project_data.name, location=location, reset_mac_addresses=reset_mac_addresses
|
||||
)
|
||||
return new_project.__json__()
|
||||
return new_project.asdict()
|
||||
|
||||
|
||||
@router.get("/{project_id}/files/{file_path:path}")
|
||||
|
@ -52,7 +52,7 @@ async def create_snapshot(snapshot_data: schemas.SnapshotCreate, project: Projec
|
||||
"""
|
||||
|
||||
snapshot = await project.snapshot(snapshot_data.name)
|
||||
return snapshot.__json__()
|
||||
return snapshot.asdict()
|
||||
|
||||
|
||||
@router.get("", response_model=List[schemas.Snapshot], response_model_exclude_unset=True)
|
||||
@ -62,7 +62,7 @@ def get_snapshots(project: Project = Depends(dep_project)):
|
||||
"""
|
||||
|
||||
snapshots = [s for s in project.snapshots.values()]
|
||||
return [s.__json__() for s in sorted(snapshots, key=lambda s: (s.created_at, s.name))]
|
||||
return [s.asdict() for s in sorted(snapshots, key=lambda s: (s.created_at, s.name))]
|
||||
|
||||
|
||||
@router.delete("/{snapshot_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@ -82,4 +82,4 @@ async def restore_snapshot(snapshot_id: UUID, project: Project = Depends(dep_pro
|
||||
|
||||
snapshot = project.get_snapshot(str(snapshot_id))
|
||||
project = await snapshot.restore()
|
||||
return project.__json__()
|
||||
return project.asdict()
|
||||
|
@ -145,4 +145,4 @@ async def create_node_from_template(
|
||||
node = await project.add_node_from_template(
|
||||
template, x=template_usage.x, y=template_usage.y, compute_id=template_usage.compute_id
|
||||
)
|
||||
return node.__json__()
|
||||
return node.asdict()
|
||||
|
@ -77,7 +77,7 @@ class Cloud(BaseNode):
|
||||
def _interfaces(self):
|
||||
return gns3server.utils.interfaces.interfaces()
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
host_interfaces = []
|
||||
network_interfaces = gns3server.utils.interfaces.interfaces()
|
||||
|
@ -38,7 +38,7 @@ class EthernetHub(BaseNode):
|
||||
|
||||
super().__init__(name, node_id, project, manager)
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
return {
|
||||
"name": self.name,
|
||||
|
@ -38,7 +38,7 @@ class EthernetSwitch(BaseNode):
|
||||
|
||||
super().__init__(name, node_id, project, manager)
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
return {
|
||||
"name": self.name,
|
||||
|
@ -77,7 +77,7 @@ class Nat(Cloud):
|
||||
def is_supported(self):
|
||||
return True
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
return {
|
||||
"name": self.name,
|
||||
"usage": self.usage,
|
||||
|
@ -133,7 +133,7 @@ class DockerVM(BaseNode):
|
||||
)
|
||||
)
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
return {
|
||||
"name": self._name,
|
||||
"usage": self.usage,
|
||||
|
@ -65,7 +65,7 @@ class NIOGenericEthernet(NIO):
|
||||
|
||||
return self._ethernet_device
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
return {
|
||||
"type": "nio_generic_ethernet",
|
||||
|
@ -64,7 +64,7 @@ class NIOLinuxEthernet(NIO):
|
||||
|
||||
return self._ethernet_device
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
return {
|
||||
"type": "nio_linux_ethernet",
|
||||
|
@ -46,6 +46,6 @@ class NIONull(NIO):
|
||||
await self._hypervisor.send(f"nio create_null {self._name}")
|
||||
log.info(f"NIO NULL {self._name} created.")
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
return {"type": "nio_null"}
|
||||
|
@ -58,7 +58,7 @@ class NIOTAP(NIO):
|
||||
|
||||
return self._tap_device
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
return {
|
||||
"type": "nio_tap",
|
||||
|
@ -126,7 +126,7 @@ class NIOUDP(NIO):
|
||||
|
||||
return self._rport
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
return {
|
||||
"type": "nio_udp",
|
||||
|
@ -79,7 +79,7 @@ class NIOUNIX(NIO):
|
||||
|
||||
return self._remote_file
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
return {
|
||||
"type": "nio_unix",
|
||||
|
@ -79,7 +79,7 @@ class NIOVDE(NIO):
|
||||
|
||||
return self._local_file
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
return {
|
||||
"type": "nio_vde",
|
||||
|
@ -52,7 +52,7 @@ class ATMSwitch(Device):
|
||||
if mappings:
|
||||
self._mappings = mappings
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
mappings = {}
|
||||
for source, destination in self._mappings.items():
|
||||
|
@ -76,11 +76,11 @@ class C1700(Router):
|
||||
self._clock_divisor = 8
|
||||
self._sparsemem = False # never activate sparsemem for c1700 (unstable)
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
c1700_router_info = {"iomem": self._iomem, "chassis": self._chassis, "sparsemem": self._sparsemem}
|
||||
|
||||
router_info = Router.__json__(self)
|
||||
router_info = Router.asdict(self)
|
||||
router_info.update(c1700_router_info)
|
||||
return router_info
|
||||
|
||||
|
@ -93,11 +93,11 @@ class C2600(Router):
|
||||
self._clock_divisor = 8
|
||||
self._sparsemem = False # never activate sparsemem for c2600 (unstable)
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
c2600_router_info = {"iomem": self._iomem, "chassis": self._chassis, "sparsemem": self._sparsemem}
|
||||
|
||||
router_info = Router.__json__(self)
|
||||
router_info = Router.asdict(self)
|
||||
router_info.update(c2600_router_info)
|
||||
return router_info
|
||||
|
||||
|
@ -77,11 +77,11 @@ class C2691(Router):
|
||||
if chassis is not None:
|
||||
raise DynamipsError("c2691 routers do not have chassis")
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
c2691_router_info = {"iomem": self._iomem}
|
||||
|
||||
router_info = Router.__json__(self)
|
||||
router_info = Router.asdict(self)
|
||||
router_info.update(c2691_router_info)
|
||||
return router_info
|
||||
|
||||
|
@ -73,11 +73,11 @@ class C3600(Router):
|
||||
self._chassis = chassis
|
||||
self._clock_divisor = 4
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
c3600_router_info = {"iomem": self._iomem, "chassis": self._chassis}
|
||||
|
||||
router_info = Router.__json__(self)
|
||||
router_info = Router.asdict(self)
|
||||
router_info.update(c3600_router_info)
|
||||
return router_info
|
||||
|
||||
|
@ -77,11 +77,11 @@ class C3725(Router):
|
||||
if chassis is not None:
|
||||
raise DynamipsError("c3725 routers do not have chassis")
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
c3725_router_info = {"iomem": self._iomem}
|
||||
|
||||
router_info = Router.__json__(self)
|
||||
router_info = Router.asdict(self)
|
||||
router_info.update(c3725_router_info)
|
||||
return router_info
|
||||
|
||||
|
@ -77,11 +77,11 @@ class C3745(Router):
|
||||
if chassis is not None:
|
||||
raise DynamipsError("c3745 routers do not have chassis")
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
c3745_router_info = {"iomem": self._iomem}
|
||||
|
||||
router_info = Router.__json__(self)
|
||||
router_info = Router.asdict(self)
|
||||
router_info.update(c3745_router_info)
|
||||
return router_info
|
||||
|
||||
|
@ -92,7 +92,7 @@ class C7200(Router):
|
||||
if chassis is not None:
|
||||
raise DynamipsError("c7200 routers do not have chassis")
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
c7200_router_info = {
|
||||
"npe": self._npe,
|
||||
@ -101,7 +101,7 @@ class C7200(Router):
|
||||
"power_supplies": self._power_supplies,
|
||||
}
|
||||
|
||||
router_info = Router.__json__(self)
|
||||
router_info = Router.asdict(self)
|
||||
router_info.update(c7200_router_info)
|
||||
return router_info
|
||||
|
||||
|
@ -53,7 +53,7 @@ class EthernetHub(Bridge):
|
||||
else:
|
||||
self._ports = ports
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
return {
|
||||
"name": self.name,
|
||||
|
@ -109,7 +109,7 @@ class EthernetSwitch(Device):
|
||||
else:
|
||||
self._ports = ports
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
ethernet_switch_info = {
|
||||
"name": self.name,
|
||||
|
@ -51,7 +51,7 @@ class FrameRelaySwitch(Device):
|
||||
if mappings:
|
||||
self._mappings = mappings
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
mappings = {}
|
||||
for source, destination in self._mappings.items():
|
||||
|
@ -155,7 +155,7 @@ class Router(BaseNode):
|
||||
log.error(f"Can't move {path}: {str(e)}")
|
||||
continue
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
router_info = {
|
||||
"name": self.name,
|
||||
|
@ -219,7 +219,7 @@ class IOUVM(BaseNode):
|
||||
if not os.access(self._path, os.X_OK):
|
||||
raise IOUError(f"IOU image '{self._path}' is not executable")
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
iou_vm_info = {
|
||||
"name": self.name,
|
||||
|
@ -48,7 +48,7 @@ class NIOEthernet(NIO):
|
||||
|
||||
return "NIO Ethernet"
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
return {
|
||||
"type": "nio_ethernet",
|
||||
|
@ -48,7 +48,7 @@ class NIOTAP(NIO):
|
||||
|
||||
return "NIO TAP"
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
return {
|
||||
"type": "nio_tap",
|
||||
|
@ -72,7 +72,7 @@ class NIOUDP(NIO):
|
||||
|
||||
return "NIO UDP"
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
return {
|
||||
"type": "nio_udp",
|
||||
|
@ -127,7 +127,7 @@ class PortManager:
|
||||
cls._instance = cls()
|
||||
return cls._instance
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
return {
|
||||
"console_port_range": self._console_port_range,
|
||||
|
@ -77,7 +77,7 @@ class Project:
|
||||
|
||||
log.info(f"Project {self._id} with path '{self._path}' created")
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
return {
|
||||
"name": self._name,
|
||||
|
@ -1112,7 +1112,7 @@ class QemuVM(BaseNode):
|
||||
|
||||
# In case user upload image manually we don't have md5 sums.
|
||||
# We need generate hashes at this point, otherwise they will be generated
|
||||
# at __json__ but not on separate thread.
|
||||
# at asdict but not on separate thread.
|
||||
await cancellable_wait_run_in_executor(md5sum, self._hda_disk_image)
|
||||
await cancellable_wait_run_in_executor(md5sum, self._hdb_disk_image)
|
||||
await cancellable_wait_run_in_executor(md5sum, self._hdc_disk_image)
|
||||
@ -2506,7 +2506,7 @@ class QemuVM(BaseNode):
|
||||
raise QemuError(f"Invalid additional options: {additional_options} error {e}")
|
||||
return command
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
answer = {"project_id": self.project.id, "node_id": self.id, "node_directory": self.working_path}
|
||||
# Qemu has a long list of options. The JSON schema is the single source of information
|
||||
for field in Qemu.schema()["properties"]:
|
||||
|
@ -86,7 +86,7 @@ class VirtualBoxVM(BaseNode):
|
||||
self._ram = 0
|
||||
self._adapter_type = "Intel PRO/1000 MT Desktop (82540EM)"
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
json = {
|
||||
"name": self.name,
|
||||
|
@ -75,7 +75,7 @@ class VMwareVM(BaseNode):
|
||||
def ethernet_adapters(self):
|
||||
return self._ethernet_adapters
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
json = {
|
||||
"name": self.name,
|
||||
|
@ -120,7 +120,7 @@ class VPCSVM(BaseNode):
|
||||
|
||||
await self._check_vpcs_version()
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
|
||||
return {
|
||||
"name": self.name,
|
||||
|
@ -355,12 +355,12 @@ class Controller:
|
||||
if connect:
|
||||
# call compute.connect() later to give time to the controller to be fully started
|
||||
asyncio.get_event_loop().call_later(1, lambda: asyncio.ensure_future(compute.connect()))
|
||||
self.notification.controller_emit("compute.created", compute.__json__())
|
||||
self.notification.controller_emit("compute.created", compute.asdict())
|
||||
return compute
|
||||
else:
|
||||
if connect:
|
||||
await self._computes[compute_id].connect()
|
||||
self.notification.controller_emit("compute.updated", self._computes[compute_id].__json__())
|
||||
self.notification.controller_emit("compute.updated", self._computes[compute_id].asdict())
|
||||
return self._computes[compute_id]
|
||||
|
||||
async def close_compute_projects(self, compute):
|
||||
@ -399,7 +399,7 @@ class Controller:
|
||||
await compute.close()
|
||||
del self._computes[compute_id]
|
||||
# self.save()
|
||||
self.notification.controller_emit("compute.deleted", compute.__json__())
|
||||
self.notification.controller_emit("compute.deleted", compute.asdict())
|
||||
|
||||
@property
|
||||
def notification(self):
|
||||
|
@ -60,7 +60,7 @@ class Appliance:
|
||||
def symbol(self, new_symbol):
|
||||
self._data["symbol"] = new_symbol
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
"""
|
||||
Appliance data (a hash)
|
||||
"""
|
||||
|
@ -104,7 +104,7 @@ class ApplianceManager:
|
||||
try:
|
||||
with open(path, encoding="utf-8") as f:
|
||||
appliance = Appliance(appliance_id, json.load(f), builtin=builtin)
|
||||
json_data = appliance.__json__() # Check if loaded without error
|
||||
json_data = appliance.asdict() # Check if loaded without error
|
||||
if appliance.status != "broken":
|
||||
self._appliances[appliance.id] = appliance
|
||||
if not appliance.symbol or appliance.symbol.startswith(":/symbols/"):
|
||||
|
@ -162,7 +162,7 @@ class Compute:
|
||||
if self._http_session and not self._http_session.closed:
|
||||
await self._http_session.close()
|
||||
self._connected = False
|
||||
self._controller.notification.controller_emit("compute.updated", self.__json__())
|
||||
self._controller.notification.controller_emit("compute.updated", self.asdict())
|
||||
self._controller.save()
|
||||
|
||||
async def close(self):
|
||||
@ -290,7 +290,7 @@ class Compute:
|
||||
def disk_usage_percent(self):
|
||||
return self._disk_usage_percent
|
||||
|
||||
def __json__(self, topology_dump=False):
|
||||
def asdict(self, topology_dump=False):
|
||||
"""
|
||||
:param topology_dump: Filter to keep only properties require for saving on disk
|
||||
"""
|
||||
@ -444,7 +444,7 @@ class Compute:
|
||||
self._connected = True
|
||||
self._connection_failure = 0
|
||||
self._last_error = None
|
||||
self._controller.notification.controller_emit("compute.updated", self.__json__())
|
||||
self._controller.notification.controller_emit("compute.updated", self.asdict())
|
||||
|
||||
async def _connect_notification(self):
|
||||
"""
|
||||
@ -466,7 +466,7 @@ class Compute:
|
||||
self._memory_usage_percent = event["memory_usage_percent"]
|
||||
self._disk_usage_percent = event["disk_usage_percent"]
|
||||
# FIXME: slow down number of compute events
|
||||
self._controller.notification.controller_emit("compute.updated", self.__json__())
|
||||
self._controller.notification.controller_emit("compute.updated", self.asdict())
|
||||
else:
|
||||
await self._controller.notification.dispatch(
|
||||
action, event, project_id=project_id, compute_id=self.id
|
||||
@ -494,7 +494,7 @@ class Compute:
|
||||
self._cpu_usage_percent = None
|
||||
self._memory_usage_percent = None
|
||||
self._disk_usage_percent = None
|
||||
self._controller.notification.controller_emit("compute.updated", self.__json__())
|
||||
self._controller.notification.controller_emit("compute.updated", self.asdict())
|
||||
|
||||
def _getUrl(self, path):
|
||||
host = self._host
|
||||
@ -523,8 +523,8 @@ class Compute:
|
||||
if data == {}:
|
||||
data = None
|
||||
elif data is not None:
|
||||
if hasattr(data, "__json__"):
|
||||
data = json.dumps(data.__json__())
|
||||
if hasattr(data, "asdict"):
|
||||
data = json.dumps(data.asdict())
|
||||
elif isinstance(data, aiohttp.streams.EmptyStreamReader):
|
||||
data = None
|
||||
# Stream the request
|
||||
|
@ -193,13 +193,13 @@ class Drawing:
|
||||
# To avoid spamming client with large data we don't send the svg if the SVG didn't change
|
||||
svg_changed = True
|
||||
setattr(self, prop, kwargs[prop])
|
||||
data = self.__json__()
|
||||
data = self.asdict()
|
||||
if not svg_changed:
|
||||
del data["svg"]
|
||||
self._project.emit_notification("drawing.updated", data)
|
||||
self._project.dump()
|
||||
|
||||
def __json__(self, topology_dump=False):
|
||||
def asdict(self, topology_dump=False):
|
||||
"""
|
||||
:param topology_dump: Filter to keep only properties require for saving on disk
|
||||
"""
|
||||
|
@ -257,7 +257,7 @@ class GNS3VM:
|
||||
return self._engines["remote"]
|
||||
raise NotImplementedError(f"The engine {engine} for the GNS3 VM is not supported")
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
return self._settings
|
||||
|
||||
@locking
|
||||
|
@ -160,14 +160,14 @@ class Link:
|
||||
self._filters = new_filters
|
||||
if self._created:
|
||||
await self.update()
|
||||
self._project.emit_notification("link.updated", self.__json__())
|
||||
self._project.emit_notification("link.updated", self.asdict())
|
||||
self._project.dump()
|
||||
|
||||
async def update_suspend(self, value):
|
||||
if value != self._suspended:
|
||||
self._suspended = value
|
||||
await self.update()
|
||||
self._project.emit_notification("link.updated", self.__json__())
|
||||
self._project.emit_notification("link.updated", self.asdict())
|
||||
self._project.dump()
|
||||
|
||||
@property
|
||||
@ -231,7 +231,7 @@ class Link:
|
||||
n["node"].add_link(self)
|
||||
n["port"].link = self
|
||||
self._created = True
|
||||
self._project.emit_notification("link.created", self.__json__())
|
||||
self._project.emit_notification("link.created", self.asdict())
|
||||
|
||||
if dump:
|
||||
self._project.dump()
|
||||
@ -244,7 +244,7 @@ class Link:
|
||||
label = node_data.get("label")
|
||||
if label:
|
||||
port["label"] = label
|
||||
self._project.emit_notification("link.updated", self.__json__())
|
||||
self._project.emit_notification("link.updated", self.asdict())
|
||||
self._project.dump()
|
||||
|
||||
async def create(self):
|
||||
@ -286,7 +286,7 @@ class Link:
|
||||
|
||||
self._capturing = True
|
||||
self._capture_file_name = capture_file_name
|
||||
self._project.emit_notification("link.updated", self.__json__())
|
||||
self._project.emit_notification("link.updated", self.asdict())
|
||||
|
||||
async def stop_capture(self):
|
||||
"""
|
||||
@ -294,7 +294,7 @@ class Link:
|
||||
"""
|
||||
|
||||
self._capturing = False
|
||||
self._project.emit_notification("link.updated", self.__json__())
|
||||
self._project.emit_notification("link.updated", self.asdict())
|
||||
|
||||
def pcap_streaming_url(self):
|
||||
"""
|
||||
@ -417,7 +417,7 @@ class Link:
|
||||
def __hash__(self):
|
||||
return hash(self._id)
|
||||
|
||||
def __json__(self, topology_dump=False):
|
||||
def asdict(self, topology_dump=False):
|
||||
"""
|
||||
:param topology_dump: Filter to keep only properties require for saving on disk
|
||||
"""
|
||||
|
@ -419,7 +419,7 @@ class Node:
|
||||
|
||||
# When updating properties used only on controller we don't need to call the compute
|
||||
update_compute = False
|
||||
old_json = self.__json__()
|
||||
old_json = self.asdict()
|
||||
|
||||
compute_properties = None
|
||||
# Update node properties with additional elements
|
||||
@ -449,9 +449,9 @@ class Node:
|
||||
data = self._node_data(properties=compute_properties)
|
||||
response = await self.put(None, data=data)
|
||||
await self.parse_node_response(response.json)
|
||||
elif old_json != self.__json__():
|
||||
elif old_json != self.asdict():
|
||||
# We send notif only if object has changed
|
||||
self.project.emit_notification("node.updated", self.__json__())
|
||||
self.project.emit_notification("node.updated", self.asdict())
|
||||
self.project.dump()
|
||||
|
||||
async def parse_node_response(self, response):
|
||||
@ -777,7 +777,7 @@ class Node:
|
||||
return False
|
||||
return self.id == other.id and other.project.id == self.project.id
|
||||
|
||||
def __json__(self, topology_dump=False):
|
||||
def asdict(self, topology_dump=False):
|
||||
"""
|
||||
:param topology_dump: Filter to keep only properties required for saving on disk
|
||||
"""
|
||||
@ -817,7 +817,7 @@ class Node:
|
||||
"status": self._status,
|
||||
"console_host": str(self._compute.console_host),
|
||||
"node_directory": self._node_directory,
|
||||
"ports": [port.__json__() for port in self.ports]
|
||||
"ports": [port.asdict() for port in self.ports]
|
||||
}
|
||||
topology.update(additional_data)
|
||||
return topology
|
||||
|
@ -98,7 +98,7 @@ class Notification:
|
||||
project = self._controller.get_project(event["project_id"])
|
||||
node = project.get_node(event["node_id"])
|
||||
await node.parse_node_response(event)
|
||||
self.project_emit("node.updated", node.__json__())
|
||||
self.project_emit("node.updated", node.asdict())
|
||||
except ControllerError: # Project closing
|
||||
return
|
||||
elif action == "ping":
|
||||
|
@ -93,7 +93,7 @@ class Port:
|
||||
def short_name(self, val):
|
||||
self._short_name = val
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
info = {
|
||||
"name": self._name,
|
||||
"short_name": self.short_name,
|
||||
|
@ -169,14 +169,14 @@ class Project:
|
||||
:param kwargs: Project properties
|
||||
"""
|
||||
|
||||
old_json = self.__json__()
|
||||
old_json = self.asdict()
|
||||
|
||||
for prop in kwargs:
|
||||
setattr(self, prop, kwargs[prop])
|
||||
|
||||
# We send notif only if object has changed
|
||||
if old_json != self.__json__():
|
||||
self.emit_notification("project.updated", self.__json__())
|
||||
if old_json != self.asdict():
|
||||
self.emit_notification("project.updated", self.asdict())
|
||||
self.dump()
|
||||
|
||||
# update on computes
|
||||
@ -589,7 +589,7 @@ class Project:
|
||||
node = await self._create_node(compute, name, node_id, node_type, **kwargs)
|
||||
else:
|
||||
node = await self._create_node(compute, name, node_id, node_type, **kwargs)
|
||||
self.emit_notification("node.created", node.__json__())
|
||||
self.emit_notification("node.created", node.asdict())
|
||||
if dump:
|
||||
self.dump()
|
||||
return node
|
||||
@ -618,7 +618,7 @@ class Project:
|
||||
# refresh the compute IDs list
|
||||
self._computes = [n.compute.id for n in self.nodes.values()]
|
||||
self.dump()
|
||||
self.emit_notification("node.deleted", node.__json__())
|
||||
self.emit_notification("node.deleted", node.asdict())
|
||||
|
||||
@open_required
|
||||
def get_node(self, node_id):
|
||||
@ -683,7 +683,7 @@ class Project:
|
||||
if drawing_id not in self._drawings:
|
||||
drawing = Drawing(self, drawing_id=drawing_id, **kwargs)
|
||||
self._drawings[drawing.id] = drawing
|
||||
self.emit_notification("drawing.created", drawing.__json__())
|
||||
self.emit_notification("drawing.created", drawing.asdict())
|
||||
if dump:
|
||||
self.dump()
|
||||
return drawing
|
||||
@ -706,7 +706,7 @@ class Project:
|
||||
raise ControllerError(f"Drawing ID {drawing_id} cannot be deleted because it is locked")
|
||||
del self._drawings[drawing.id]
|
||||
self.dump()
|
||||
self.emit_notification("drawing.deleted", drawing.__json__())
|
||||
self.emit_notification("drawing.deleted", drawing.asdict())
|
||||
|
||||
@open_required
|
||||
async def add_link(self, link_id=None, dump=True):
|
||||
@ -733,7 +733,7 @@ class Project:
|
||||
if force_delete is False:
|
||||
raise
|
||||
self.dump()
|
||||
self.emit_notification("link.deleted", link.__json__())
|
||||
self.emit_notification("link.deleted", link.asdict())
|
||||
|
||||
@open_required
|
||||
def get_link(self, link_id):
|
||||
@ -810,7 +810,7 @@ class Project:
|
||||
self._clean_pictures()
|
||||
self._status = "closed"
|
||||
if not ignore_notification:
|
||||
self.emit_notification("project.closed", self.__json__())
|
||||
self.emit_notification("project.closed", self.asdict())
|
||||
self.reset()
|
||||
self._closing = False
|
||||
|
||||
@ -1180,7 +1180,7 @@ class Project:
|
||||
if node.status != "stopped" and not node.is_always_running():
|
||||
raise ControllerError("Cannot duplicate node data while the node is running")
|
||||
|
||||
data = copy.deepcopy(node.__json__(topology_dump=True))
|
||||
data = copy.deepcopy(node.asdict(topology_dump=True))
|
||||
# Some properties like internal ID should not be duplicated
|
||||
for unique_property in (
|
||||
"node_id",
|
||||
@ -1220,7 +1220,7 @@ class Project:
|
||||
"snapshots": len(self._snapshots),
|
||||
}
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
return {
|
||||
"name": self._name,
|
||||
"project_id": self._id,
|
||||
|
@ -133,10 +133,10 @@ class Snapshot:
|
||||
except (OSError, PermissionError) as e:
|
||||
raise ControllerError(str(e))
|
||||
await project.open()
|
||||
self._project.emit_notification("snapshot.restored", self.__json__())
|
||||
self._project.emit_notification("snapshot.restored", self.asdict())
|
||||
return self._project
|
||||
|
||||
def __json__(self):
|
||||
def asdict(self):
|
||||
return {
|
||||
"snapshot_id": self._id,
|
||||
"name": self._name,
|
||||
|
@ -30,6 +30,10 @@ from ..version import __version__
|
||||
from ..utils.qt import qt_font_to_style
|
||||
from ..compute.dynamips import PLATFORMS_DEFAULT_RAM
|
||||
from .controller_error import ControllerError
|
||||
from .compute import Compute
|
||||
from .drawing import Drawing
|
||||
from .node import Node
|
||||
from .link import Link
|
||||
|
||||
from gns3server.schemas.controller.topology import Topology
|
||||
from gns3server.schemas.compute.dynamips_nodes import DynamipsCreate
|
||||
@ -89,23 +93,23 @@ def project_to_topology(project):
|
||||
}
|
||||
|
||||
for node in project.nodes.values():
|
||||
if hasattr(node, "__json__"):
|
||||
data["topology"]["nodes"].append(node.__json__(topology_dump=True))
|
||||
if isinstance(node, Node):
|
||||
data["topology"]["nodes"].append(node.asdict(topology_dump=True))
|
||||
else:
|
||||
data["topology"]["nodes"].append(node)
|
||||
for link in project.links.values():
|
||||
if hasattr(link, "__json__"):
|
||||
data["topology"]["links"].append(link.__json__(topology_dump=True))
|
||||
if isinstance(link, Link):
|
||||
data["topology"]["links"].append(link.asdict(topology_dump=True))
|
||||
else:
|
||||
data["topology"]["links"].append(link)
|
||||
for drawing in project.drawings.values():
|
||||
if hasattr(drawing, "__json__"):
|
||||
data["topology"]["drawings"].append(drawing.__json__(topology_dump=True))
|
||||
if isinstance(drawing, Drawing):
|
||||
data["topology"]["drawings"].append(drawing.asdict(topology_dump=True))
|
||||
else:
|
||||
data["topology"]["drawings"].append(drawing)
|
||||
for compute in project.computes:
|
||||
if hasattr(compute, "__json__"):
|
||||
compute = compute.__json__(topology_dump=True)
|
||||
if isinstance(compute, Compute):
|
||||
compute = compute.asdict(topology_dump=True)
|
||||
if compute["compute_id"] not in (
|
||||
"vm",
|
||||
"local",
|
||||
|
@ -71,8 +71,8 @@ class NotificationQueue(asyncio.Queue):
|
||||
Get a message as a JSON
|
||||
"""
|
||||
(action, msg, kwargs) = await self.get(timeout)
|
||||
if hasattr(msg, "__json__"):
|
||||
msg = {"action": action, "event": msg.__json__()}
|
||||
if hasattr(msg, "asdict"):
|
||||
msg = {"action": action, "event": msg.asdict()}
|
||||
else:
|
||||
msg = {"action": action, "event": msg}
|
||||
msg.update(kwargs)
|
||||
|
@ -51,7 +51,7 @@ async def test_json_with_ports(on_gns3vm, compute_project, manager):
|
||||
}
|
||||
]
|
||||
cloud = Cloud("cloud1", str(uuid.uuid4()), compute_project, manager, ports=ports)
|
||||
assert cloud.__json__() == {
|
||||
assert cloud.asdict() == {
|
||||
"name": "cloud1",
|
||||
"usage": "",
|
||||
"node_id": cloud.id,
|
||||
@ -84,7 +84,7 @@ def test_json_without_ports(on_gns3vm, compute_project, manager):
|
||||
"""
|
||||
|
||||
cloud = Cloud("cloud1", str(uuid.uuid4()), compute_project, manager, ports=None)
|
||||
assert cloud.__json__() == {
|
||||
assert cloud.asdict() == {
|
||||
"name": "cloud1",
|
||||
"usage": "",
|
||||
"node_id": cloud.id,
|
||||
|
@ -24,7 +24,7 @@ from gns3server.compute.builtin.nodes.nat import Nat
|
||||
def test_json_gns3vm(on_gns3vm, compute_project):
|
||||
|
||||
nat = Nat("nat1", str(uuid.uuid4()), compute_project, MagicMock())
|
||||
assert nat.__json__() == {
|
||||
assert nat.asdict() == {
|
||||
"name": "nat1",
|
||||
"usage": "",
|
||||
"node_id": nat.id,
|
||||
@ -47,7 +47,7 @@ def test_json_darwin(darwin_platform, compute_project):
|
||||
{"name": "eth0", "special": False, "type": "ethernet"},
|
||||
{"name": "vmnet8", "special": True, "type": "ethernet"}]):
|
||||
nat = Nat("nat1", str(uuid.uuid4()), compute_project, MagicMock())
|
||||
assert nat.__json__() == {
|
||||
assert nat.asdict() == {
|
||||
"name": "nat1",
|
||||
"usage": "",
|
||||
"node_id": nat.id,
|
||||
@ -68,7 +68,7 @@ def test_json_windows_with_full_name_of_interface(windows_platform, project):
|
||||
with patch("gns3server.utils.interfaces.interfaces", return_value=[
|
||||
{"name": "VMware Network Adapter VMnet8", "special": True, "type": "ethernet"}]):
|
||||
nat = Nat("nat1", str(uuid.uuid4()), project, MagicMock())
|
||||
assert nat.__json__() == {
|
||||
assert nat.asdict() == {
|
||||
"name": "nat1",
|
||||
"usage": "",
|
||||
"node_id": nat.id,
|
||||
|
@ -53,7 +53,7 @@ async def vm(compute_project, manager):
|
||||
|
||||
def test_json(vm, compute_project):
|
||||
|
||||
assert vm.__json__() == {
|
||||
assert vm.asdict() == {
|
||||
'container_id': 'e90e34656842',
|
||||
'image': 'ubuntu:latest',
|
||||
'name': 'test',
|
||||
|
@ -472,7 +472,7 @@ async def test_set_process_priority_normal(vm, fake_qemu_img_binary):
|
||||
|
||||
def test_json(vm, compute_project):
|
||||
|
||||
json = vm.__json__()
|
||||
json = vm.asdict()
|
||||
assert json["name"] == vm.name
|
||||
assert json["project_id"] == compute_project.id
|
||||
|
||||
|
@ -112,7 +112,7 @@ async def test_variables():
|
||||
async def test_json():
|
||||
|
||||
p = Project(project_id=str(uuid4()))
|
||||
assert p.__json__() == {
|
||||
assert p.asdict() == {
|
||||
"name": p.name,
|
||||
"project_id": p.id,
|
||||
"variables": None
|
||||
@ -124,7 +124,7 @@ async def test_json_with_variables():
|
||||
|
||||
variables = [{"name": "VAR1", "value": "VAL1"}]
|
||||
p = Project(project_id=str(uuid4()), variables=variables)
|
||||
assert p.__json__() == {
|
||||
assert p.asdict() == {
|
||||
"name": p.name,
|
||||
"project_id": p.id,
|
||||
"variables": variables
|
||||
|
@ -107,10 +107,10 @@ async def test_vm_adapter_add_nio_binding_adapter_not_exist(vm, manager, free_co
|
||||
|
||||
def test_json(vm, tmpdir, project):
|
||||
|
||||
assert vm.__json__()["node_directory"] is None
|
||||
assert vm.asdict()["node_directory"] is None
|
||||
project._path = str(tmpdir)
|
||||
vm._linked_clone = True
|
||||
assert vm.__json__()["node_directory"] is not None
|
||||
assert vm.asdict()["node_directory"] is not None
|
||||
|
||||
|
||||
def test_patch_vm_uuid(vm):
|
||||
|
@ -49,10 +49,10 @@ async def test_vm(vm):
|
||||
@pytest.mark.asyncio
|
||||
async def test_json(vm, tmpdir, compute_project):
|
||||
|
||||
assert vm.__json__()["node_directory"] is not None
|
||||
assert vm.asdict()["node_directory"] is not None
|
||||
compute_project._path = str(tmpdir)
|
||||
vm._linked_clone = True
|
||||
assert vm.__json__()["node_directory"] is not None
|
||||
assert vm.asdict()["node_directory"] is not None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
|
@ -121,7 +121,7 @@ async def test_compute_httpQueryAuth(compute):
|
||||
# mock.assert_any_call("POST", "https://example.com:84/v2/compute/projects", data=b'{"a": "b"}', headers={'content-type': 'application/json'}, auth=None, chunked=None, timeout=20)
|
||||
# #assert compute._connected
|
||||
# assert compute._capabilities["version"] == __version__
|
||||
# controller.notification.controller_emit.assert_called_with("compute.updated", compute.__json__())
|
||||
# controller.notification.controller_emit.assert_called_with("compute.updated", compute.asdict())
|
||||
# await compute.close()
|
||||
|
||||
|
||||
@ -148,7 +148,7 @@ async def test_compute_httpQueryAuth(compute):
|
||||
# assert controller.gns3vm.start.called
|
||||
# #assert compute._connected
|
||||
# assert compute._capabilities["version"] == __version__
|
||||
# controller.notification.controller_emit.assert_called_with("compute.updated", compute.__json__())
|
||||
# controller.notification.controller_emit.assert_called_with("compute.updated", compute.asdict())
|
||||
# await compute.close()
|
||||
|
||||
|
||||
@ -226,7 +226,7 @@ async def test_compute_httpQuery_project(compute):
|
||||
response.status = 200
|
||||
project = Project(name="Test")
|
||||
await compute.post("/projects", project)
|
||||
mock.assert_called_with("POST", "https://example.com:84/v3/compute/projects", data=json.dumps(project.__json__()), headers={'content-type': 'application/json'}, auth=None, chunked=None, timeout=20)
|
||||
mock.assert_called_with("POST", "https://example.com:84/v3/compute/projects", data=json.dumps(project.asdict()), headers={'content-type': 'application/json'}, auth=None, chunked=None, timeout=20)
|
||||
await compute.close()
|
||||
|
||||
# FIXME: https://github.com/aio-libs/aiohttp/issues/2525
|
||||
@ -298,7 +298,7 @@ async def test_compute_httpQuery_project(compute):
|
||||
async def test_json(compute):
|
||||
|
||||
compute.user = "test"
|
||||
assert compute.__json__() == {
|
||||
assert compute.asdict() == {
|
||||
"compute_id": "my_compute_id",
|
||||
"name": compute.name,
|
||||
"protocol": "https",
|
||||
@ -319,7 +319,7 @@ async def test_json(compute):
|
||||
"node_types": []
|
||||
}
|
||||
}
|
||||
assert compute.__json__(topology_dump=True) == {
|
||||
assert compute.asdict(topology_dump=True) == {
|
||||
"compute_id": "my_compute_id",
|
||||
"name": compute.name,
|
||||
"protocol": "https",
|
||||
@ -358,7 +358,7 @@ async def test_update(compute, controller):
|
||||
await compute.update(name="Test 2")
|
||||
assert compute.name == "Test 2"
|
||||
assert compute.host == "example.org"
|
||||
controller.notification.controller_emit.assert_called_with("compute.updated", compute.__json__())
|
||||
controller.notification.controller_emit.assert_called_with("compute.updated", compute.asdict())
|
||||
assert compute.connected is False
|
||||
assert compute._controller.save.called
|
||||
|
||||
|
@ -36,7 +36,7 @@ from gns3server.version import __version__
|
||||
# data = json.load(f)
|
||||
# assert data["version"] == __version__
|
||||
# assert data["iou_license"] == controller.iou_license
|
||||
# assert data["gns3vm"] == controller.gns3vm.__json__()
|
||||
# assert data["gns3vm"] == controller.gns3vm.asdict()
|
||||
#
|
||||
#
|
||||
# def test_load_controller_settings(controller, controller_config_path):
|
||||
@ -115,10 +115,10 @@ async def test_add_compute(controller):
|
||||
|
||||
controller._notification = MagicMock()
|
||||
c = await controller.add_compute(compute_id="test1", connect=False)
|
||||
controller._notification.controller_emit.assert_called_with("compute.created", c.__json__())
|
||||
controller._notification.controller_emit.assert_called_with("compute.created", c.asdict())
|
||||
assert len(controller.computes) == 1
|
||||
await controller.add_compute(compute_id="test1", connect=False)
|
||||
controller._notification.controller_emit.assert_called_with("compute.updated", c.__json__())
|
||||
controller._notification.controller_emit.assert_called_with("compute.updated", c.asdict())
|
||||
assert len(controller.computes) == 1
|
||||
await controller.add_compute(compute_id="test2", connect=False)
|
||||
assert len(controller.computes) == 2
|
||||
@ -156,7 +156,7 @@ async def test_deleteComputeProjectOpened(controller, controller_config_path):
|
||||
c._connected = True
|
||||
await controller.delete_compute("test1")
|
||||
assert len(controller.computes) == 0
|
||||
controller._notification.controller_emit.assert_called_with("compute.deleted", c.__json__())
|
||||
controller._notification.controller_emit.assert_called_with("compute.deleted", c.asdict())
|
||||
assert c.connected is False
|
||||
|
||||
# Project 1 use this compute it should be close before deleting the compute
|
||||
@ -383,12 +383,12 @@ def test_appliances(controller, config, tmpdir):
|
||||
controller.appliance_manager.load_appliances()
|
||||
assert len(controller.appliance_manager.appliances) > 0
|
||||
for appliance in controller.appliance_manager.appliances.values():
|
||||
assert appliance.__json__()["status"] != "broken"
|
||||
assert "Alpine Linux" in [c.__json__()["name"] for c in controller.appliance_manager.appliances.values()]
|
||||
assert "My Appliance" in [c.__json__()["name"] for c in controller.appliance_manager.appliances.values()]
|
||||
assert appliance.asdict()["status"] != "broken"
|
||||
assert "Alpine Linux" in [c.asdict()["name"] for c in controller.appliance_manager.appliances.values()]
|
||||
assert "My Appliance" in [c.asdict()["name"] for c in controller.appliance_manager.appliances.values()]
|
||||
|
||||
for c in controller.appliance_manager.appliances.values():
|
||||
j = c.__json__()
|
||||
j = c.asdict()
|
||||
if j["name"] == "Alpine Linux":
|
||||
assert j["builtin"]
|
||||
elif j["name"] == "My Appliance":
|
||||
|
@ -47,7 +47,7 @@ def test_init_with_uuid(project):
|
||||
def test_json(project):
|
||||
|
||||
i = Drawing(project, None, svg="<svg></svg>")
|
||||
assert i.__json__() == {
|
||||
assert i.asdict() == {
|
||||
"drawing_id": i.id,
|
||||
"project_id": project.id,
|
||||
"x": i.x,
|
||||
@ -57,7 +57,7 @@ def test_json(project):
|
||||
"svg": i.svg,
|
||||
"rotation": i.rotation
|
||||
}
|
||||
assert i.__json__(topology_dump=True) == {
|
||||
assert i.asdict(topology_dump=True) == {
|
||||
"drawing_id": i.id,
|
||||
"x": i.x,
|
||||
"y": i.y,
|
||||
|
@ -66,7 +66,7 @@ async def test_list(controller):
|
||||
async def test_json(controller):
|
||||
|
||||
vm = GNS3VM(controller)
|
||||
assert vm.__json__() == vm._settings
|
||||
assert vm.asdict() == vm._settings
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform.startswith("win"), reason="Not working well on Windows")
|
||||
|
@ -81,7 +81,7 @@ async def test_add_node(project, compute):
|
||||
await link.add_node(node2, 0, 4)
|
||||
|
||||
assert link.create.called
|
||||
link._project.emit_notification.assert_called_with("link.created", link.__json__())
|
||||
link._project.emit_notification.assert_called_with("link.created", link.asdict())
|
||||
assert link in node2.links
|
||||
|
||||
|
||||
@ -197,7 +197,7 @@ async def test_json(project, compute):
|
||||
link.create = AsyncioMagicMock()
|
||||
await link.add_node(node1, 0, 4)
|
||||
await link.add_node(node2, 1, 3)
|
||||
assert link.__json__() == {
|
||||
assert link.asdict() == {
|
||||
"link_id": link.id,
|
||||
"project_id": project.id,
|
||||
"nodes": [
|
||||
@ -228,7 +228,7 @@ async def test_json(project, compute):
|
||||
"capture_file_path": None,
|
||||
"capture_compute_id": None
|
||||
}
|
||||
assert link.__json__(topology_dump=True) == {
|
||||
assert link.asdict(topology_dump=True) == {
|
||||
"link_id": link.id,
|
||||
"nodes": [
|
||||
{
|
||||
@ -267,7 +267,7 @@ async def test_json_serial_link(project, compute):
|
||||
link.create = AsyncioMagicMock()
|
||||
await link.add_node(node1, 0, 4)
|
||||
await link.add_node(node2, 1, 3)
|
||||
assert link.__json__()["link_type"] == "serial"
|
||||
assert link.asdict()["link_type"] == "serial"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@ -297,7 +297,7 @@ async def test_start_capture(link):
|
||||
await link.start_capture(capture_file_name="test.pcap")
|
||||
assert link._capturing
|
||||
assert link._capture_file_name == "test.pcap"
|
||||
link._project.emit_notification.assert_called_with("link.updated", link.__json__())
|
||||
link._project.emit_notification.assert_called_with("link.updated", link.asdict())
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@ -307,7 +307,7 @@ async def test_stop_capture(link):
|
||||
link._project.emit_notification = MagicMock()
|
||||
await link.stop_capture()
|
||||
assert link._capturing is False
|
||||
link._project.emit_notification.assert_called_with("link.updated", link.__json__())
|
||||
link._project.emit_notification.assert_called_with("link.updated", link.asdict())
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
|
@ -112,7 +112,7 @@ async def test_eq(compute, project, node, controller):
|
||||
|
||||
def test_json(node, compute):
|
||||
|
||||
assert node.__json__() == {
|
||||
assert node.asdict() == {
|
||||
"compute_id": str(compute.id),
|
||||
"project_id": node.project.id,
|
||||
"node_id": node.id,
|
||||
@ -153,7 +153,7 @@ def test_json(node, compute):
|
||||
]
|
||||
}
|
||||
|
||||
assert node.__json__(topology_dump=True) == {
|
||||
assert node.asdict(topology_dump=True) == {
|
||||
"compute_id": str(compute.id),
|
||||
"node_id": node.id,
|
||||
"template_id": None,
|
||||
@ -327,7 +327,7 @@ async def test_update(node, compute, project, controller):
|
||||
assert node._console == 2048
|
||||
assert node.x == 42
|
||||
assert node._properties == {"startup_script": "echo test"}
|
||||
#controller._notification.emit.assert_called_with("node.updated", node.__json__())
|
||||
#controller._notification.emit.assert_called_with("node.updated", node.asdict())
|
||||
assert project.dump.called
|
||||
|
||||
|
||||
@ -355,7 +355,7 @@ async def test_update_properties(node, compute, controller):
|
||||
|
||||
# The notif should contain the old properties because it's the compute that will emit
|
||||
# the correct info
|
||||
#node_notif = copy.deepcopy(node.__json__())
|
||||
#node_notif = copy.deepcopy(node.asdict())
|
||||
#node_notif["properties"]["startup_script"] = "echo test"
|
||||
#controller._notification.emit.assert_called_with("node.updated", node_notif)
|
||||
|
||||
@ -373,7 +373,7 @@ async def test_update_only_controller(node, compute):
|
||||
await node.update(x=42)
|
||||
assert not compute.put.called
|
||||
assert node.x == 42
|
||||
node._project.emit_notification.assert_called_with("node.updated", node.__json__())
|
||||
node._project.emit_notification.assert_called_with("node.updated", node.asdict())
|
||||
|
||||
# If nothing change a second notif should not be sent
|
||||
node._project.emit_notification = AsyncioMagicMock()
|
||||
|
@ -45,7 +45,7 @@ def test_list_ports(node):
|
||||
List port by default
|
||||
"""
|
||||
|
||||
assert node.__json__()["ports"] == [
|
||||
assert node.asdict()["ports"] == [
|
||||
{
|
||||
"name": "Ethernet0",
|
||||
"short_name": "e0",
|
||||
@ -63,7 +63,7 @@ def test_list_ports_vpcs(node):
|
||||
"""
|
||||
|
||||
node._node_type = "vpcs"
|
||||
assert node.__json__()["ports"] == [
|
||||
assert node.asdict()["ports"] == [
|
||||
{
|
||||
"name": "Ethernet0",
|
||||
"short_name": "e0",
|
||||
@ -82,7 +82,7 @@ def test_list_ports_docker(node):
|
||||
|
||||
node._node_type = "docker"
|
||||
node._properties["adapters"] = 2
|
||||
assert node.__json__()["ports"] == [
|
||||
assert node.asdict()["ports"] == [
|
||||
{
|
||||
"name": "eth0",
|
||||
"short_name": "eth0",
|
||||
@ -110,26 +110,26 @@ def test_list_ports_port_name_format(node):
|
||||
node._first_port_name = None
|
||||
node._port_name_format = "eth{}"
|
||||
node._list_ports()
|
||||
assert node.__json__()["ports"][0]["name"] == "eth0"
|
||||
assert node.asdict()["ports"][0]["name"] == "eth0"
|
||||
node._port_name_format = "eth{port0}"
|
||||
node._list_ports()
|
||||
assert node.__json__()["ports"][0]["name"] == "eth0"
|
||||
assert node.asdict()["ports"][0]["name"] == "eth0"
|
||||
node._port_name_format = "eth{port1}"
|
||||
node._list_ports()
|
||||
assert node.__json__()["ports"][0]["name"] == "eth1"
|
||||
assert node.asdict()["ports"][0]["name"] == "eth1"
|
||||
|
||||
node._first_port_name = ""
|
||||
node._port_segment_size = 2
|
||||
node._port_name_format = "eth{segment0}/{port0}"
|
||||
node.properties["adapters"] = 8
|
||||
node._list_ports()
|
||||
assert node.__json__()["ports"][6]["name"] == "eth3/0"
|
||||
assert node.__json__()["ports"][7]["name"] == "eth3/1"
|
||||
assert node.asdict()["ports"][6]["name"] == "eth3/0"
|
||||
assert node.asdict()["ports"][7]["name"] == "eth3/1"
|
||||
|
||||
node._first_port_name = "mgnt0"
|
||||
node._list_ports()
|
||||
assert node.__json__()["ports"][0]["name"] == "mgnt0"
|
||||
assert node.__json__()["ports"][1]["name"] == "eth0/0"
|
||||
assert node.asdict()["ports"][0]["name"] == "mgnt0"
|
||||
assert node.asdict()["ports"][1]["name"] == "eth0/0"
|
||||
|
||||
|
||||
def test_list_ports_adapters(node):
|
||||
@ -138,7 +138,7 @@ def test_list_ports_adapters(node):
|
||||
"""
|
||||
|
||||
node.properties["adapters"] = 2
|
||||
assert node.__json__()["ports"] == [
|
||||
assert node.asdict()["ports"] == [
|
||||
{
|
||||
"name": "Ethernet0",
|
||||
"short_name": "e0",
|
||||
@ -175,7 +175,7 @@ def test_list_ports_adapters_cloud(project, compute):
|
||||
}
|
||||
]
|
||||
|
||||
assert node.__json__()["ports"] == [
|
||||
assert node.asdict()["ports"] == [
|
||||
{
|
||||
"name": "eth0",
|
||||
"short_name": "eth0",
|
||||
@ -206,7 +206,7 @@ def test_list_ports_ethernet_hub(project, compute):
|
||||
}
|
||||
]
|
||||
|
||||
assert node.__json__()["ports"] == [
|
||||
assert node.asdict()["ports"] == [
|
||||
{
|
||||
"name": "Ethernet0",
|
||||
"short_name": "e0",
|
||||
@ -238,7 +238,7 @@ def test_list_ports_atm_switch(project, compute):
|
||||
"1:0:100": "10:0:200"
|
||||
}
|
||||
|
||||
assert node.__json__()["ports"] == [
|
||||
assert node.asdict()["ports"] == [
|
||||
{
|
||||
"name": "1",
|
||||
"short_name": "1",
|
||||
@ -271,7 +271,7 @@ def test_list_ports_frame_relay_switch(project, compute):
|
||||
"2:102": "11:203"
|
||||
}
|
||||
|
||||
assert node.__json__()["ports"] == [
|
||||
assert node.asdict()["ports"] == [
|
||||
{
|
||||
"name": "1",
|
||||
"short_name": "1",
|
||||
@ -317,7 +317,7 @@ def test_list_ports_iou(compute, project):
|
||||
node_type="iou")
|
||||
node.properties["serial_adapters"] = 2
|
||||
node.properties["ethernet_adapters"] = 3
|
||||
assert node.__json__()["ports"] == [
|
||||
assert node.asdict()["ports"] == [
|
||||
{
|
||||
"name": "Ethernet0/0",
|
||||
"short_name": "e0/0",
|
||||
@ -526,7 +526,7 @@ def test_list_ports_dynamips(project, compute):
|
||||
node.properties["wic0"] = "WIC-2T"
|
||||
node.properties["wic1"] = "WIC-2T"
|
||||
|
||||
assert node.__json__()["ports"] == [
|
||||
assert node.asdict()["ports"] == [
|
||||
{
|
||||
"name": "FastEthernet0/0",
|
||||
"short_name": "f0/0",
|
||||
|
@ -126,4 +126,4 @@ def test_various_notification(controller, node):
|
||||
notif.project_emit("log.info", {"message": "Image uploaded"})
|
||||
notif.project_emit("log.warning", {"message": "Warning ASA 8 is not officially supported by GNS3"})
|
||||
notif.project_emit("log.error", {"message": "Permission denied on /tmp"})
|
||||
notif.project_emit("node.updated", node.__json__())
|
||||
notif.project_emit("node.updated", node.asdict())
|
||||
|
@ -59,7 +59,7 @@ async def test_json():
|
||||
|
||||
p = Project(name="Test")
|
||||
|
||||
assert p.__json__() == {
|
||||
assert p.asdict() == {
|
||||
"name": "Test",
|
||||
"project_id": p.id,
|
||||
"path": p.path,
|
||||
@ -90,7 +90,7 @@ async def test_update(controller):
|
||||
assert project.name == "Hello"
|
||||
await project.update(name="World")
|
||||
assert project.name == "World"
|
||||
project.emit_notification.assert_any_call("project.updated", project.__json__())
|
||||
project.emit_notification.assert_any_call("project.updated", project.asdict())
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@ -180,7 +180,7 @@ async def test_add_node_local(controller):
|
||||
'name': 'test'},
|
||||
timeout=1200)
|
||||
assert compute in project._project_created_on_compute
|
||||
project.emit_notification.assert_any_call("node.created", node.__json__())
|
||||
project.emit_notification.assert_any_call("node.created", node.asdict())
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@ -208,7 +208,7 @@ async def test_add_node_non_local(controller):
|
||||
'startup_script': 'test.cfg',
|
||||
'name': 'test'}, timeout=1200)
|
||||
assert compute in project._project_created_on_compute
|
||||
project.emit_notification.assert_any_call("node.created", node.__json__())
|
||||
project.emit_notification.assert_any_call("node.created", node.asdict())
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@ -373,7 +373,7 @@ async def test_add_node_iou_no_id_available(controller):
|
||||
# })
|
||||
#
|
||||
# assert compute in project._project_created_on_compute
|
||||
# project.emit_notification.assert_any_call("node.created", node.__json__())
|
||||
# project.emit_notification.assert_any_call("node.created", node.asdict())
|
||||
#
|
||||
#
|
||||
# @pytest.mark.asyncio
|
||||
@ -392,7 +392,7 @@ async def test_add_node_iou_no_id_available(controller):
|
||||
# }, builtin=True)
|
||||
#
|
||||
# controller.template_manager.templates[template.id] = template
|
||||
# template.__json__()
|
||||
# template.asdict()
|
||||
# controller._computes["local"] = compute
|
||||
#
|
||||
# response = MagicMock()
|
||||
@ -407,7 +407,7 @@ async def test_add_node_iou_no_id_available(controller):
|
||||
# })
|
||||
#
|
||||
# assert compute in project._project_created_on_compute
|
||||
# project.emit_notification.assert_any_call("node.created", node.__json__())
|
||||
# project.emit_notification.assert_any_call("node.created", node.asdict())
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@ -429,7 +429,7 @@ async def test_delete_node(controller):
|
||||
assert node.id not in project._nodes
|
||||
|
||||
compute.delete.assert_any_call('/projects/{}/vpcs/nodes/{}'.format(project.id, node.id))
|
||||
project.emit_notification.assert_any_call("node.deleted", node.__json__())
|
||||
project.emit_notification.assert_any_call("node.deleted", node.asdict())
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@ -476,8 +476,8 @@ async def test_delete_node_delete_link(controller):
|
||||
assert link.id not in project._links
|
||||
|
||||
compute.delete.assert_any_call('/projects/{}/vpcs/nodes/{}'.format(project.id, node.id))
|
||||
project.emit_notification.assert_any_call("node.deleted", node.__json__())
|
||||
project.emit_notification.assert_any_call("link.deleted", link.__json__())
|
||||
project.emit_notification.assert_any_call("node.deleted", node.asdict())
|
||||
project.emit_notification.assert_any_call("link.deleted", link.asdict())
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@ -541,7 +541,7 @@ async def test_add_link(project):
|
||||
await link.add_node(vm2, 4, 2)
|
||||
assert mock_udp_create.called
|
||||
assert len(link._nodes) == 2
|
||||
project.emit_notification.assert_any_call("link.created", link.__json__())
|
||||
project.emit_notification.assert_any_call("link.created", link.asdict())
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@ -587,7 +587,7 @@ async def test_delete_link(project):
|
||||
assert len(project._links) == 1
|
||||
project.emit_notification = MagicMock()
|
||||
await project.delete_link(link.id)
|
||||
project.emit_notification.assert_any_call("link.deleted", link.__json__())
|
||||
project.emit_notification.assert_any_call("link.deleted", link.asdict())
|
||||
assert len(project._links) == 0
|
||||
|
||||
|
||||
@ -597,7 +597,7 @@ async def test_add_drawing(project):
|
||||
project.emit_notification = MagicMock()
|
||||
drawing = await project.add_drawing(None, svg="<svg></svg>")
|
||||
assert len(project._drawings) == 1
|
||||
project.emit_notification.assert_any_call("drawing.created", drawing.__json__())
|
||||
project.emit_notification.assert_any_call("drawing.created", drawing.asdict())
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@ -628,7 +628,7 @@ async def test_delete_drawing(project):
|
||||
assert len(project._drawings) == 1
|
||||
project.emit_notification = MagicMock()
|
||||
await project.delete_drawing(drawing.id)
|
||||
project.emit_notification.assert_any_call("drawing.deleted", drawing.__json__())
|
||||
project.emit_notification.assert_any_call("drawing.deleted", drawing.asdict())
|
||||
assert len(project._drawings) == 0
|
||||
|
||||
|
||||
@ -703,7 +703,7 @@ async def test_open_close(controller):
|
||||
project.emit_notification = MagicMock()
|
||||
await project.close()
|
||||
assert project.status == "closed"
|
||||
project.emit_notification.assert_any_call("project.closed", project.__json__())
|
||||
project.emit_notification.assert_any_call("project.closed", project.asdict())
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
|
@ -62,7 +62,7 @@ def test_snapshot_filename(project):
|
||||
def test_json(project):
|
||||
|
||||
snapshot = Snapshot(project, filename="test1_260716_100439.gns3project")
|
||||
assert snapshot.__json__() == {
|
||||
assert snapshot.asdict() == {
|
||||
"snapshot_id": snapshot._id,
|
||||
"name": "test1",
|
||||
"project_id": project.id,
|
||||
|
@ -82,10 +82,10 @@ async def test_basic_topology(controller):
|
||||
|
||||
topo = project_to_topology(project)
|
||||
assert len(topo["topology"]["nodes"]) == 2
|
||||
assert node1.__json__(topology_dump=True) in topo["topology"]["nodes"]
|
||||
assert topo["topology"]["links"][0] == link.__json__(topology_dump=True)
|
||||
assert topo["topology"]["computes"][0] == compute.__json__(topology_dump=True)
|
||||
assert topo["topology"]["drawings"][0] == drawing.__json__(topology_dump=True)
|
||||
assert node1.asdict(topology_dump=True) in topo["topology"]["nodes"]
|
||||
assert topo["topology"]["links"][0] == link.asdict(topology_dump=True)
|
||||
assert topo["topology"]["computes"][0] == compute.asdict(topology_dump=True)
|
||||
assert topo["topology"]["drawings"][0] == drawing.asdict(topology_dump=True)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
|
Loading…
Reference in New Issue
Block a user