这是indexloc提供的服务,不要输入任何密码
Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/integration.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ jobs:
name: Python ${{ matrix.python-version }}
runs-on: ubuntu-20.04
strategy:
fail-fast: false
matrix:
os: [ubuntu-20.04]
python-version: [3.6, 3.7, 3.8, 3.9]
Expand Down
5 changes: 3 additions & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,11 @@ lxml==4.6.3
pygments==2.10.0
python-dateutil==2.8.2
pyyaml==5.4.1
httpx[http2]==0.17.1
httpx==0.17.1
aiohttp==3.7.4.post0
aiohttp-socks==0.6.0
Brotli==1.0.9
uvloop==0.16.0; python_version >= '3.7'
uvloop==0.14.0; python_version < '3.7'
httpx-socks[asyncio]==0.3.1
langdetect==1.0.9
setproctitle==1.2.2
4 changes: 2 additions & 2 deletions searx/autocomplete.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from json import loads
from urllib.parse import urlencode

from httpx import HTTPError
from aiohttp import ClientError


from searx import settings
Expand Down Expand Up @@ -137,5 +137,5 @@ def search_autocomplete(backend_name, query, lang):

try:
return backend(query, lang)
except (HTTPError, SearxEngineResponseException):
except (ClientError, SearxEngineResponseException):
return []
59 changes: 22 additions & 37 deletions searx/network/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,11 @@
from types import MethodType
from timeit import default_timer

import httpx
import h2.exceptions
import aiohttp

from .network import get_network, initialize
from .client import get_loop
from .response import Response
from .raise_for_httperror import raise_for_httperror

# queue.SimpleQueue: Support Python 3.6
Expand Down Expand Up @@ -73,12 +73,12 @@ def get_context_network():
return THREADLOCAL.__dict__.get('network') or get_network()


def request(method, url, **kwargs):
def request(method, url, **kwargs) -> Response:
"""same as requests/requests/api.py request(...)"""
global THREADLOCAL
time_before_request = default_timer()

# timeout (httpx)
# timeout (aiohttp)
if 'timeout' in kwargs:
timeout = kwargs['timeout']
else:
Expand Down Expand Up @@ -108,16 +108,15 @@ def request(method, url, **kwargs):
# network
network = get_context_network()

#
# kwargs['compress'] = True

# do request
future = asyncio.run_coroutine_threadsafe(network.request(method, url, **kwargs), get_loop())
try:
response = future.result(timeout)
except concurrent.futures.TimeoutError as e:
raise httpx.TimeoutException('Timeout', request=None) from e

# requests compatibility
# see also https://www.python-httpx.org/compatibility/#checking-for-4xx5xx-responses
response.ok = not response.is_error
raise asyncio.TimeoutError() from e

# update total_time.
# See get_time_for_thread() and reset_time_for_thread()
Expand All @@ -132,73 +131,60 @@ def request(method, url, **kwargs):
return response


def get(url, **kwargs):
def get(url, **kwargs) -> Response:
kwargs.setdefault('allow_redirects', True)
return request('get', url, **kwargs)


def options(url, **kwargs):
def options(url, **kwargs) -> Response:
kwargs.setdefault('allow_redirects', True)
return request('options', url, **kwargs)


def head(url, **kwargs):
def head(url, **kwargs) -> Response:
kwargs.setdefault('allow_redirects', False)
return request('head', url, **kwargs)


def post(url, data=None, **kwargs):
def post(url, data=None, **kwargs) -> Response:
return request('post', url, data=data, **kwargs)


def put(url, data=None, **kwargs):
def put(url, data=None, **kwargs) -> Response:
return request('put', url, data=data, **kwargs)


def patch(url, data=None, **kwargs):
def patch(url, data=None, **kwargs) -> Response:
return request('patch', url, data=data, **kwargs)


def delete(url, **kwargs):
def delete(url, **kwargs) -> Response:
return request('delete', url, **kwargs)


async def stream_chunk_to_queue(network, queue, method, url, **kwargs):
try:
async with network.stream(method, url, **kwargs) as response:
async with await network.request(method, url, stream=True, **kwargs) as response:
queue.put(response)
# aiter_raw: access the raw bytes on the response without applying any HTTP content decoding
# https://www.python-httpx.org/quickstart/#streaming-responses
async for chunk in response.aiter_raw(65536):
if len(chunk) > 0:
queue.put(chunk)
except httpx.ResponseClosed:
# the response was closed
pass
except (httpx.HTTPError, OSError, h2.exceptions.ProtocolError) as e:
chunk = await response.iter_content(65536)
while chunk:
queue.put(chunk)
chunk = await response.iter_content(65536)
except aiohttp.client.ClientError as e:
queue.put(e)
finally:
queue.put(None)


def _close_response_method(self):
asyncio.run_coroutine_threadsafe(
self.aclose(),
get_loop()
)


def stream(method, url, **kwargs):
"""Replace httpx.stream.
"""Stream Response in sync world

Usage:
stream = poolrequests.stream(...)
response = next(stream)
for chunk in stream:
...

httpx.Client.stream requires to write the httpx.HTTPTransport version of the
the httpx.AsyncHTTPTransport declared above.
"""
queue = SimpleQueue()
future = asyncio.run_coroutine_threadsafe(
Expand All @@ -210,7 +196,6 @@ def stream(method, url, **kwargs):
response = queue.get()
if isinstance(response, Exception):
raise response
response.close = MethodType(_close_response_method, response)
yield response

# yield chunks
Expand Down
Loading