mirror of
https://github.com/searxng/searxng.git
synced 2024-11-22 12:10:11 +01:00
[fix] searx.network.stream: fix memory leak
This commit is contained in:
parent
2eab89b4ca
commit
29893cf816
@ -9,6 +9,7 @@ from types import MethodType
|
||||
from timeit import default_timer
|
||||
|
||||
import httpx
|
||||
import anyio
|
||||
import h2.exceptions
|
||||
|
||||
from .network import get_network, initialize
|
||||
@ -166,7 +167,7 @@ async def stream_chunk_to_queue(network, queue, method, url, **kwargs):
|
||||
async for chunk in response.aiter_raw(65536):
|
||||
if len(chunk) > 0:
|
||||
queue.put(chunk)
|
||||
except httpx.StreamClosed:
|
||||
except (httpx.StreamClosed, anyio.ClosedResourceError):
|
||||
# the response was queued before the exception.
|
||||
# the exception was raised on aiter_raw.
|
||||
# we do nothing here: in the finally block, None will be queued
|
||||
@ -183,11 +184,35 @@ async def stream_chunk_to_queue(network, queue, method, url, **kwargs):
|
||||
queue.put(None)
|
||||
|
||||
|
||||
def _stream_generator(method, url, **kwargs):
|
||||
queue = SimpleQueue()
|
||||
network = get_context_network()
|
||||
future = asyncio.run_coroutine_threadsafe(
|
||||
stream_chunk_to_queue(network, queue, method, url, **kwargs),
|
||||
get_loop()
|
||||
)
|
||||
|
||||
# yield chunks
|
||||
obj_or_exception = queue.get()
|
||||
while obj_or_exception is not None:
|
||||
if isinstance(obj_or_exception, Exception):
|
||||
raise obj_or_exception
|
||||
yield obj_or_exception
|
||||
obj_or_exception = queue.get()
|
||||
future.result()
|
||||
|
||||
|
||||
def _close_response_method(self):
|
||||
asyncio.run_coroutine_threadsafe(
|
||||
self.aclose(),
|
||||
get_loop()
|
||||
)
|
||||
# reach the end of _self.generator ( _stream_generator ) to an avoid memory leak.
|
||||
# it makes sure that :
|
||||
# * the httpx response is closed (see the stream_chunk_to_queue function)
|
||||
# * to call future.result() in _stream_generator
|
||||
for _ in self._generator: # pylint: disable=protected-access
|
||||
continue
|
||||
|
||||
|
||||
def stream(method, url, **kwargs):
|
||||
@ -202,25 +227,15 @@ def stream(method, url, **kwargs):
|
||||
httpx.Client.stream requires to write the httpx.HTTPTransport version of the
|
||||
the httpx.AsyncHTTPTransport declared above.
|
||||
"""
|
||||
queue = SimpleQueue()
|
||||
network = get_context_network()
|
||||
future = asyncio.run_coroutine_threadsafe(
|
||||
stream_chunk_to_queue(network, queue, method, url, **kwargs),
|
||||
get_loop()
|
||||
)
|
||||
generator = _stream_generator(method, url, **kwargs)
|
||||
|
||||
# yield response
|
||||
response = queue.get()
|
||||
response = next(generator) # pylint: disable=stop-iteration-return
|
||||
if isinstance(response, Exception):
|
||||
raise response
|
||||
|
||||
response._generator = generator # pylint: disable=protected-access
|
||||
response.close = MethodType(_close_response_method, response)
|
||||
yield response
|
||||
|
||||
# yield chunks
|
||||
chunk_or_exception = queue.get()
|
||||
while chunk_or_exception is not None:
|
||||
if isinstance(chunk_or_exception, Exception):
|
||||
raise chunk_or_exception
|
||||
yield chunk_or_exception
|
||||
chunk_or_exception = queue.get()
|
||||
future.result()
|
||||
yield from generator
|
||||
|
@ -6,6 +6,7 @@ import asyncio
|
||||
import logging
|
||||
import threading
|
||||
|
||||
import anyio
|
||||
import httpcore
|
||||
import httpx
|
||||
from httpx_socks import AsyncProxyTransport
|
||||
@ -102,6 +103,9 @@ class AsyncProxyTransportFixed(AsyncProxyTransport):
|
||||
# then each new request creates a new stream and raise the same WriteError
|
||||
await close_connections_for_url(self, url)
|
||||
raise e
|
||||
except anyio.ClosedResourceError as e:
|
||||
await close_connections_for_url(self, url)
|
||||
raise httpx.CloseError from e
|
||||
except httpx.RemoteProtocolError as e:
|
||||
# in case of httpx.RemoteProtocolError: Server disconnected
|
||||
await close_connections_for_url(self, url)
|
||||
@ -130,6 +134,9 @@ class AsyncHTTPTransportFixed(httpx.AsyncHTTPTransport):
|
||||
# then each new request creates a new stream and raise the same WriteError
|
||||
await close_connections_for_url(self._pool, url)
|
||||
raise e
|
||||
except anyio.ClosedResourceError as e:
|
||||
await close_connections_for_url(self._pool, url)
|
||||
raise httpx.CloseError from e
|
||||
except httpx.RemoteProtocolError as e:
|
||||
# in case of httpx.RemoteProtocolError: Server disconnected
|
||||
await close_connections_for_url(self._pool, url)
|
||||
|
@ -85,7 +85,10 @@ def _download_and_check_if_image(image_url: str) -> bool:
|
||||
})
|
||||
r = next(stream)
|
||||
r.close()
|
||||
is_image = r.headers["content-type"].startswith('image/')
|
||||
if r.status_code == 200:
|
||||
is_image = r.headers.get('content-type', '').startswith('image/')
|
||||
else:
|
||||
is_image = False
|
||||
del r
|
||||
del stream
|
||||
return is_image
|
||||
|
Loading…
Reference in New Issue
Block a user