1
0
mirror of https://github.com/GNS3/gns3-server synced 2024-12-01 04:38:12 +00:00

Correclty cleanup packet capture objects when closing server

Fix #592
This commit is contained in:
Julien Duponchelle 2016-08-19 11:05:54 +02:00
parent 8eab94f01e
commit f0fad5289c
No known key found for this signature in database
GPG Key ID: CE8B29639E07F5E8
5 changed files with 30 additions and 13 deletions

View File

@ -295,11 +295,24 @@ class Compute:
:returns: A file stream :returns: A file stream
""" """
# Due to Python 3.4 limitation we can't use with and asyncio
# https://www.python.org/dev/peps/pep-0492/
# that why we wrap the answer
class StreamResponse:
def __init__(self, response):
self._response = response
def __enter__(self):
return self._response.content
def __exit__(self):
self._response.close()
url = self._getUrl("/projects/{}/stream/{}".format(project.id, path)) url = self._getUrl("/projects/{}/stream/{}".format(project.id, path))
response = yield from self._session().request("GET", url, auth=self._auth) response = yield from self._session().request("GET", url, auth=self._auth)
if response.status == 404: if response.status == 404:
raise aiohttp.web.HTTPNotFound(text="{} not found on compute".format(path)) raise aiohttp.web.HTTPNotFound(text="{} not found on compute".format(path))
return response.content return StreamResponse(response)
@asyncio.coroutine @asyncio.coroutine
def http_query(self, method, path, data=None, **kwargs): def http_query(self, method, path, data=None, **kwargs):

View File

@ -116,17 +116,18 @@ class Link:
Dump a pcap file on disk Dump a pcap file on disk
""" """
stream = yield from self.read_pcap_from_source() stream_content = yield from self.read_pcap_from_source()
with open(self.capture_file_path, "wb+") as f: with stream_content as stream:
while self._capturing: with open(self.capture_file_path, "wb+") as f:
# We read 1 bytes by 1 otherwise the remaining data is not read if the traffic stops while self._capturing:
data = yield from stream.read(1) # We read 1 bytes by 1 otherwise the remaining data is not read if the traffic stops
if data: data = yield from stream.read(1)
f.write(data) if data:
# Flush to disk otherwise the live is not really live f.write(data)
f.flush() # Flush to disk otherwise the live is not really live
else: f.flush()
break else:
break
@asyncio.coroutine @asyncio.coroutine
def stop_capture(self): def stop_capture(self):

View File

@ -120,7 +120,7 @@ class UDPLink(Link):
:returns: Node where the capture should run :returns: Node where the capture should run
""" """
# use the local node first to save bandwidth # use the local node first to save bandwidth
for node in self._nodes: for node in self._nodes:
if node["node"].compute.id == "local" and node["node"].node_type not in [""]: # FIXME if node["node"].compute.id == "local" and node["node"].node_type not in [""]: # FIXME
return node return node

View File

@ -127,3 +127,4 @@ def wait_for_named_pipe_creation(pipe_path, timeout=60):
else: else:
return return
raise asyncio.TimeoutError() raise asyncio.TimeoutError()

View File

@ -67,3 +67,5 @@ def test_wait_for_process_termination(loop):
exec = wait_for_process_termination(process, timeout=0.5) exec = wait_for_process_termination(process, timeout=0.5)
with pytest.raises(asyncio.TimeoutError): with pytest.raises(asyncio.TimeoutError):
loop.run_until_complete(asyncio.async(exec)) loop.run_until_complete(asyncio.async(exec))