mirror of
https://github.com/GNS3/gns3-server
synced 2024-11-24 09:18:08 +00:00
Fix link filters/suspend tests.
This commit is contained in:
parent
8b91894fa4
commit
279b4d7b20
@ -233,7 +233,7 @@ class Qemu(BaseManager):
|
||||
@asyncio.coroutine
|
||||
def create_disk(self, qemu_img, path, options):
|
||||
"""
|
||||
Create a qemu disk with qemu-img
|
||||
Create a Qemu disk with qemu-img
|
||||
|
||||
:param qemu_img: qemu-img binary path
|
||||
:param path: Image path
|
||||
@ -251,9 +251,9 @@ class Qemu(BaseManager):
|
||||
|
||||
try:
|
||||
if os.path.exists(path):
|
||||
raise QemuError("Could not create disk image {} already exist".format(path))
|
||||
raise QemuError("Could not create disk image '{}', file already exists".format(path))
|
||||
except UnicodeEncodeError:
|
||||
raise QemuError("Could not create disk image {}, "
|
||||
raise QemuError("Could not create disk image '{}', "
|
||||
"path contains characters not supported by filesystem".format(path))
|
||||
|
||||
command = [qemu_img, "create", "-f", img_format]
|
||||
@ -266,3 +266,22 @@ class Qemu(BaseManager):
|
||||
yield from process.wait()
|
||||
except (OSError, subprocess.SubprocessError) as e:
|
||||
raise QemuError("Could not create disk image {}:{}".format(path, e))
|
||||
|
||||
@asyncio.coroutine
|
||||
def resize_disk(self, qemu_img, path, size):
|
||||
"""
|
||||
Resize a Qemu disk with qemu-img
|
||||
|
||||
:param qemu_img: qemu-img binary path
|
||||
:param path: Image path
|
||||
:param size: size
|
||||
"""
|
||||
|
||||
try:
|
||||
if not os.path.exists(path):
|
||||
raise QemuError("Qemu image '{}' does not exist".format(path))
|
||||
command = [qemu_img, "resize", path, "{}M".format(size)]
|
||||
process = yield from asyncio.create_subprocess_exec(*command)
|
||||
yield from process.wait()
|
||||
except (OSError, subprocess.SubprocessError) as e:
|
||||
raise QemuError("Could not create disk image {}:{}".format(path, e))
|
||||
|
@ -26,7 +26,7 @@ from tests.utils import asyncio_patch
|
||||
|
||||
@pytest.fixture
|
||||
def nio():
|
||||
return NIOUDP(4242, "127.0.0.1", 4343, {})
|
||||
return NIOUDP(4242, "127.0.0.1", 4343)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
@ -24,9 +24,9 @@ def test_mac_command(async_run):
|
||||
node = AsyncioMagicMock()
|
||||
node.name = "Test"
|
||||
node.nios = {}
|
||||
node.nios[0] = NIOUDP(55, "127.0.0.1", 56, {})
|
||||
node.nios[0] = NIOUDP(55, "127.0.0.1", 56)
|
||||
node.nios[0].name = "Ethernet0"
|
||||
node.nios[1] = NIOUDP(55, "127.0.0.1", 56, {})
|
||||
node.nios[1] = NIOUDP(55, "127.0.0.1", 56)
|
||||
node.nios[1].name = "Ethernet1"
|
||||
node._hypervisor.send = AsyncioMagicMock(return_value=["0050.7966.6801 1 Ethernet0", "0050.7966.6802 1 Ethernet1"])
|
||||
console = EthernetSwitchConsole(node)
|
||||
|
@ -130,8 +130,9 @@ def test_update_ubridge_udp_connection(node, async_run):
|
||||
"latency": [10]
|
||||
}
|
||||
|
||||
snio = NIOUDP(1245, "localhost", 1246, {})
|
||||
dnio = NIOUDP(1245, "localhost", 1244, filters)
|
||||
snio = NIOUDP(1245, "localhost", 1246)
|
||||
dnio = NIOUDP(1245, "localhost", 1244)
|
||||
dnio.filters = filters
|
||||
with asyncio_patch("gns3server.compute.base_node.BaseNode._ubridge_apply_filters") as mock:
|
||||
async_run(node.update_ubridge_udp_connection('VPCS-10', snio, dnio))
|
||||
mock.assert_called_with("VPCS-10", filters)
|
||||
|
@ -85,14 +85,16 @@ def test_create(async_run, project):
|
||||
"rhost": "192.168.1.2",
|
||||
"rport": 2048,
|
||||
"type": "nio_udp",
|
||||
"filters": {"latency": [10]}
|
||||
"filters": {"latency": [10]},
|
||||
"suspend": False,
|
||||
}, timeout=120)
|
||||
compute2.post.assert_any_call("/projects/{}/vpcs/nodes/{}/adapters/3/ports/1/nio".format(project.id, node2.id), data={
|
||||
"lport": 2048,
|
||||
"rhost": "192.168.1.1",
|
||||
"rport": 1024,
|
||||
"type": "nio_udp",
|
||||
"filters": {}
|
||||
"filters": {},
|
||||
"suspend": False,
|
||||
}, timeout=120)
|
||||
|
||||
|
||||
@ -151,14 +153,16 @@ def test_create_one_side_failure(async_run, project):
|
||||
"rhost": "192.168.1.2",
|
||||
"rport": 2048,
|
||||
"type": "nio_udp",
|
||||
"filters": {}
|
||||
"filters": {},
|
||||
"suspend": False,
|
||||
}, timeout=120)
|
||||
compute2.post.assert_any_call("/projects/{}/vpcs/nodes/{}/adapters/3/ports/1/nio".format(project.id, node2.id), data={
|
||||
"lport": 2048,
|
||||
"rhost": "192.168.1.1",
|
||||
"rport": 1024,
|
||||
"type": "nio_udp",
|
||||
"filters": {}
|
||||
"filters": {},
|
||||
"suspend": False,
|
||||
}, timeout=120)
|
||||
# The link creation has failed we rollback the nio
|
||||
compute1.delete.assert_any_call("/projects/{}/vpcs/nodes/{}/adapters/0/ports/4/nio".format(project.id, node1.id), timeout=120)
|
||||
@ -362,6 +366,7 @@ def test_update(async_run, project):
|
||||
"rhost": "192.168.1.2",
|
||||
"rport": 2048,
|
||||
"type": "nio_udp",
|
||||
"suspend": False,
|
||||
"filters": {"latency": [10]}
|
||||
}, timeout=120)
|
||||
compute2.post.assert_any_call("/projects/{}/vpcs/nodes/{}/adapters/3/ports/1/nio".format(project.id, node2.id), data={
|
||||
@ -369,6 +374,7 @@ def test_update(async_run, project):
|
||||
"rhost": "192.168.1.1",
|
||||
"rport": 1024,
|
||||
"type": "nio_udp",
|
||||
"suspend": False,
|
||||
"filters": {}
|
||||
}, timeout=120)
|
||||
|
||||
@ -379,6 +385,7 @@ def test_update(async_run, project):
|
||||
"rhost": "192.168.1.2",
|
||||
"rport": 2048,
|
||||
"type": "nio_udp",
|
||||
"suspend": False,
|
||||
"filters": {
|
||||
"drop": [5],
|
||||
"bpf": ["icmp[icmptype] == 8"]
|
||||
@ -440,12 +447,14 @@ def test_update_suspend(async_run, project):
|
||||
"rhost": "192.168.1.2",
|
||||
"rport": 2048,
|
||||
"type": "nio_udp",
|
||||
"filters": {"frequency_drop": [-1]}
|
||||
"filters": {"frequency_drop": [-1]},
|
||||
"suspend": True
|
||||
}, timeout=120)
|
||||
compute2.post.assert_any_call("/projects/{}/vpcs/nodes/{}/adapters/3/ports/1/nio".format(project.id, node2.id), data={
|
||||
"lport": 2048,
|
||||
"rhost": "192.168.1.1",
|
||||
"rport": 1024,
|
||||
"type": "nio_udp",
|
||||
"filters": {}
|
||||
"filters": {},
|
||||
"suspend": True
|
||||
}, timeout=120)
|
||||
|
Loading…
Reference in New Issue
Block a user