Correclty cleanup packet capture objects when closing server

Fix #592
pull/638/head
Julien Duponchelle 8 years ago
parent 8eab94f01e
commit f0fad5289c
No known key found for this signature in database
GPG Key ID: CE8B29639E07F5E8

@ -295,11 +295,24 @@ class Compute:
:returns: A file stream
"""
# Due to Python 3.4 limitation we can't use with and asyncio
# https://www.python.org/dev/peps/pep-0492/
# that why we wrap the answer
class StreamResponse:
def __init__(self, response):
self._response = response
def __enter__(self):
return self._response.content
def __exit__(self):
self._response.close()
url = self._getUrl("/projects/{}/stream/{}".format(project.id, path))
response = yield from self._session().request("GET", url, auth=self._auth)
if response.status == 404:
raise aiohttp.web.HTTPNotFound(text="{} not found on compute".format(path))
return response.content
return StreamResponse(response)
@asyncio.coroutine
def http_query(self, method, path, data=None, **kwargs):

@ -116,17 +116,18 @@ class Link:
Dump a pcap file on disk
"""
stream = yield from self.read_pcap_from_source()
with open(self.capture_file_path, "wb+") as f:
while self._capturing:
# We read 1 bytes by 1 otherwise the remaining data is not read if the traffic stops
data = yield from stream.read(1)
if data:
f.write(data)
# Flush to disk otherwise the live is not really live
f.flush()
else:
break
stream_content = yield from self.read_pcap_from_source()
with stream_content as stream:
with open(self.capture_file_path, "wb+") as f:
while self._capturing:
# We read 1 bytes by 1 otherwise the remaining data is not read if the traffic stops
data = yield from stream.read(1)
if data:
f.write(data)
# Flush to disk otherwise the live is not really live
f.flush()
else:
break
@asyncio.coroutine
def stop_capture(self):

@ -120,7 +120,7 @@ class UDPLink(Link):
:returns: Node where the capture should run
"""
# use the local node first to save bandwidth
# use the local node first to save bandwidth
for node in self._nodes:
if node["node"].compute.id == "local" and node["node"].node_type not in [""]: # FIXME
return node

@ -127,3 +127,4 @@ def wait_for_named_pipe_creation(pipe_path, timeout=60):
else:
return
raise asyncio.TimeoutError()

@ -67,3 +67,5 @@ def test_wait_for_process_termination(loop):
exec = wait_for_process_termination(process, timeout=0.5)
with pytest.raises(asyncio.TimeoutError):
loop.run_until_complete(asyncio.async(exec))

Loading…
Cancel
Save