这是indexloc提供的服务,不要输入任何密码
Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 4 additions & 3 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,8 @@ search.checker.%: install
$(Q)./manage pyenv.cmd searx-checker -v "$(subst _, ,$(patsubst search.checker.%,%,$@))"

PHONY += test ci.test test.shell
ci.test: test.yamllint test.pep8 test.pylint test.unit test.robot
test: test.yamllint test.pep8 test.pylint test.unit test.robot test.shell
ci.test: test.yamllint test.black test.pylint test.unit test.robot
test: test.yamllint test.black test.pylint test.unit test.robot test.shell
test.shell:
$(Q)shellcheck -x -s dash \
dockerfiles/docker-entrypoint.sh
Expand Down Expand Up @@ -88,7 +88,8 @@ MANAGE += node.env node.clean
MANAGE += py.build py.clean
MANAGE += pyenv pyenv.install pyenv.uninstall
MANAGE += pypi.upload pypi.upload.test
MANAGE += test.yamllint test.pylint test.pep8 test.unit test.coverage test.robot test.clean
MANAGE += format.python
MANAGE += test.yamllint test.pylint test.black test.unit test.coverage test.robot test.clean
MANAGE += themes.all themes.oscar themes.simple themes.simple.test pygments.less
MANAGE += static.build.commit static.build.drop static.build.restore
MANAGE += nvm.install nvm.clean nvm.status nvm.nodejs
Expand Down
27 changes: 15 additions & 12 deletions manage
Original file line number Diff line number Diff line change
Expand Up @@ -24,15 +24,16 @@ PY_SETUP_EXTRAS='[test]'
GECKODRIVER_VERSION="v0.30.0"
export NODE_MINIMUM_VERSION="16.13.0"
# SPHINXOPTS=
BLACK_OPTIONS=("--target-version" "py37" "--line-length" "120" "--skip-string-normalization")
BLACK_TARGETS=("--exclude" "searx/static,searx/languages.py" "searx" "searxng_extra" "tests")

pylint.FILES() {

# List files tagged by comment:
#
# # lint: pylint
#
# These py files are linted by test.pylint(), all other files are linted by
# test.pep8()
# These py files are linted by test.pylint()

grep -l -r --include \*.py '^#[[:blank:]]*lint:[[:blank:]]*pylint' searx searxng_extra tests
}
Expand Down Expand Up @@ -89,10 +90,12 @@ pyenv.:
OK : test if virtualenv is OK
pypi.upload:
Upload python packages to PyPi (to test use pypi.upload.test)
format.:
python : format Python code source using black
test.:
yamllint : lint YAML files (YAMLLINT_FILES)
pylint : lint PYLINT_FILES, searx/engines, searx & tests
pep8 : pycodestyle (pep8) for all files except PYLINT_FILES
black : check black code format
unit : run unit tests
coverage : run unit tests with coverage
robot : run robot test
Expand Down Expand Up @@ -617,6 +620,12 @@ pypi.upload.test() {
pyenv.cmd twine upload -r testpypi "${PYDIST}"/*
}

format.python() {
build_msg TEST "[format.python] black \$BLACK_TARGETS"
pyenv.cmd black "${BLACK_OPTIONS[@]}" "${BLACK_TARGETS[@]}"
dump_return $?
}

test.yamllint() {
build_msg TEST "[yamllint] \$YAMLLINT_FILES"
pyenv.cmd yamllint --format parsable "${YAMLLINT_FILES[@]}"
Expand Down Expand Up @@ -646,15 +655,9 @@ test.pylint() {
dump_return $?
}

test.pep8() {
build_msg TEST 'pycodestyle (formerly pep8)'
local _exclude=""
printf -v _exclude '%s, ' "${PYLINT_FILES[@]}"
pyenv.cmd pycodestyle \
--exclude="searx/static, searx/languages.py, $_exclude " \
--max-line-length=120 \
--ignore "E117,E252,E402,E722,E741,W503,W504,W605" \
searx tests
test.black() {
build_msg TEST "[black] \$BLACK_TARGETS"
pyenv.cmd black --check --diff "${BLACK_OPTIONS[@]}" "${BLACK_TARGETS[@]}"
dump_return $?
}

Expand Down
1 change: 1 addition & 0 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
mock==4.0.3
nose2[coverage_plugin]==0.10.0
cov-core==1.15.0
black==21.12b0
pycodestyle==2.8.0
pylint==2.12.2
splinter==0.17.0
Expand Down
10 changes: 3 additions & 7 deletions searx/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@

_unset = object()


def get_setting(name, default=_unset):
"""Returns the value to which ``name`` point. If there is no such name in the
settings and the ``default`` is unset, a :py:obj:`KeyError` is raised.
Expand Down Expand Up @@ -80,14 +81,9 @@ def logging_config_debug():
'levelname': {'color': 8},
'name': {'color': 8},
'programname': {'color': 'cyan'},
'username': {'color': 'yellow'}
'username': {'color': 'yellow'},
}
coloredlogs.install(
level=log_level,
level_styles=level_styles,
field_styles=field_styles,
fmt=LOG_FORMAT_DEBUG
)
coloredlogs.install(level=log_level, level_styles=level_styles, field_styles=field_styles, fmt=LOG_FORMAT_DEBUG)
else:
logging.basicConfig(level=logging.getLevelName(log_level), format=LOG_FORMAT_DEBUG)

Expand Down
25 changes: 14 additions & 11 deletions searx/answerers/random/answerer.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,12 @@
# specifies which search query keywords triggers this answerer
keywords = ('random',)

random_int_max = 2**31
random_int_max = 2 ** 31
random_string_letters = string.ascii_lowercase + string.digits + string.ascii_uppercase


def random_characters():
return [random.choice(random_string_letters)
for _ in range(random.randint(8, 32))]
return [random.choice(random_string_letters) for _ in range(random.randint(8, 32))]


def random_string():
Expand All @@ -39,11 +38,13 @@ def random_uuid():
return str(uuid.uuid4())


random_types = {'string': random_string,
'int': random_int,
'float': random_float,
'sha256': random_sha256,
'uuid': random_uuid}
random_types = {
'string': random_string,
'int': random_int,
'float': random_float,
'sha256': random_sha256,
'uuid': random_uuid,
}


# required answerer function
Expand All @@ -62,6 +63,8 @@ def answer(query):
# required answerer function
# returns information about the answerer
def self_info():
return {'name': gettext('Random value generator'),
'description': gettext('Generate different random values'),
'examples': ['random {}'.format(x) for x in random_types]}
return {
'name': gettext('Random value generator'),
'description': gettext('Generate different random values'),
'examples': ['random {}'.format(x) for x in random_types],
}
14 changes: 6 additions & 8 deletions searx/answerers/statistics/answerer.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,7 @@
from flask_babel import gettext


keywords = ('min',
'max',
'avg',
'sum',
'prod')
keywords = ('min', 'max', 'avg', 'sum', 'prod')


# required answerer function
Expand Down Expand Up @@ -47,6 +43,8 @@ def answer(query):
# required answerer function
# returns information about the answerer
def self_info():
return {'name': gettext('Statistics functions'),
'description': gettext('Compute {functions} of the arguments').format(functions='/'.join(keywords)),
'examples': ['avg 123 548 2.04 24.2']}
return {
'name': gettext('Statistics functions'),
'description': gettext('Compute {functions} of the arguments').format(functions='/'.join(keywords)),
'examples': ['avg 123 548 2.04 24.2'],
}
17 changes: 9 additions & 8 deletions searx/autocomplete.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,14 +120,15 @@ def wikipedia(query, lang):
return []


backends = {'dbpedia': dbpedia,
'duckduckgo': duckduckgo,
'google': google,
'startpage': startpage,
'swisscows': swisscows,
'qwant': qwant,
'wikipedia': wikipedia
}
backends = {
'dbpedia': dbpedia,
'duckduckgo': duckduckgo,
'google': google,
'startpage': startpage,
'swisscows': swisscows,
'qwant': qwant,
'wikipedia': wikipedia,
}


def search_autocomplete(backend_name, query, lang):
Expand Down
3 changes: 3 additions & 0 deletions searx/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,12 @@

data_dir = Path(__file__).parent


def _load(filename):
with open(data_dir / filename, encoding='utf-8') as f:
return json.load(f)


def ahmia_blacklist_loader():
"""Load data from `ahmia_blacklist.txt` and return a list of MD5 values of onion
names. The MD5 values are fetched by::
Expand All @@ -39,6 +41,7 @@ def ahmia_blacklist_loader():
with open(str(data_dir / 'ahmia_blacklist.txt'), encoding='utf-8') as f:
return f.read().split()


ENGINES_LANGUAGES = _load('engines_languages.json')
CURRENCIES = _load('currencies.json')
USER_AGENTS = _load('useragents.json')
Expand Down
16 changes: 10 additions & 6 deletions searx/engines/1337x.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,11 +43,15 @@ def response(resp):
filesize, filesize_multiplier = filesize_info.split()
filesize = get_torrent_size(filesize, filesize_multiplier)

results.append({'url': href,
'title': title,
'seed': seed,
'leech': leech,
'filesize': filesize,
'template': 'torrent.html'})
results.append(
{
'url': href,
'title': title,
'seed': seed,
'leech': leech,
'filesize': filesize,
'template': 'torrent.html',
}
)

return results
28 changes: 11 additions & 17 deletions searx/engines/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@

"""


def load_engine(engine_data):
"""Load engine from ``engine_data``.

Expand Down Expand Up @@ -166,20 +167,19 @@ def set_language_attributes(engine):
# settings.yml
if engine.language not in engine.supported_languages:
raise ValueError(
"settings.yml - engine: '%s' / language: '%s' not supported" % (
engine.name, engine.language ))
"settings.yml - engine: '%s' / language: '%s' not supported" % (engine.name, engine.language)
)

if isinstance(engine.supported_languages, dict):
engine.supported_languages = {
engine.language : engine.supported_languages[engine.language]
}
engine.supported_languages = {engine.language: engine.supported_languages[engine.language]}
else:
engine.supported_languages = [engine.language]

# find custom aliases for non standard language codes
for engine_lang in engine.supported_languages:
iso_lang = match_language(engine_lang, BABEL_LANGS, fallback=None)
if (iso_lang
if (
iso_lang
and iso_lang != engine_lang
and not engine_lang.startswith(iso_lang)
and iso_lang not in engine.supported_languages
Expand All @@ -197,14 +197,12 @@ def set_language_attributes(engine):
}
engine.fetch_supported_languages = (
# pylint: disable=protected-access
lambda: engine._fetch_supported_languages(
get(engine.supported_languages_url, headers=headers))
lambda: engine._fetch_supported_languages(get(engine.supported_languages_url, headers=headers))
)


def update_attributes_for_tor(engine):
if (settings['outgoing'].get('using_tor_proxy')
and hasattr(engine, 'onion_url') ):
if settings['outgoing'].get('using_tor_proxy') and hasattr(engine, 'onion_url'):
engine.search_url = engine.onion_url + getattr(engine, 'search_path', '')
engine.timeout += settings['outgoing'].get('extra_proxy_timeout', 0)

Expand All @@ -217,9 +215,7 @@ def is_missing_required_attributes(engine):
missing = False
for engine_attr in dir(engine):
if not engine_attr.startswith('_') and getattr(engine, engine_attr) is None:
logger.error(
'Missing engine config attribute: "{0}.{1}"'
.format(engine.name, engine_attr))
logger.error('Missing engine config attribute: "{0}.{1}"'.format(engine.name, engine_attr))
missing = True
return missing

Expand All @@ -230,8 +226,7 @@ def is_engine_active(engine):
return False

# exclude onion engines if not using tor
if ('onions' in engine.categories
and not settings['outgoing'].get('using_tor_proxy') ):
if 'onions' in engine.categories and not settings['outgoing'].get('using_tor_proxy'):
return False

return True
Expand All @@ -253,8 +248,7 @@ def register_engine(engine):


def load_engines(engine_list):
"""usage: ``engine_list = settings['engines']``
"""
"""usage: ``engine_list = settings['engines']``"""
engines.clear()
engine_shortcuts.clear()
categories.clear()
Expand Down
11 changes: 3 additions & 8 deletions searx/engines/ahmia.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,7 @@
# search url
search_url = 'http://juhanurmihxlp77nkq76byazcldy2hlmovfu2epvl5ankdibsot4csyd.onion/search/?{query}'
time_range_support = True
time_range_dict = {'day': 1,
'week': 7,
'month': 30}
time_range_dict = {'day': 1, 'week': 7, 'month': 30}

# xpaths
results_xpath = '//li[@class="result"]'
Expand All @@ -54,7 +52,7 @@ def response(resp):
# trim results so there's not way too many at once
first_result_index = page_size * (resp.search_params.get('pageno', 1) - 1)
all_results = eval_xpath_list(dom, results_xpath)
trimmed_results = all_results[first_result_index:first_result_index + page_size]
trimmed_results = all_results[first_result_index : first_result_index + page_size]

# get results
for result in trimmed_results:
Expand All @@ -65,10 +63,7 @@ def response(resp):
title = extract_text(eval_xpath(result, title_xpath))
content = extract_text(eval_xpath(result, content_xpath))

results.append({'url': cleaned_url,
'title': title,
'content': content,
'is_onion': True})
results.append({'url': cleaned_url, 'title': title, 'content': content, 'is_onion': True})

# get spelling corrections
for correction in eval_xpath_list(dom, correction_xpath):
Expand Down
10 changes: 3 additions & 7 deletions searx/engines/apkmirror.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,8 @@

def request(query, params):
params['url'] = search_url.format(
pageno = params['pageno'],
query = urlencode({'s': query}),
pageno=params['pageno'],
query=urlencode({'s': query}),
)
logger.debug("query_url --> %s", params['url'])
return params
Expand All @@ -55,11 +55,7 @@ def response(resp):
url = base_url + link.attrib.get('href') + '#downloads'
title = extract_text(link)
img_src = base_url + eval_xpath_getindex(result, './/img/@src', 0)
res = {
'url': url,
'title': title,
'img_src': img_src
}
res = {'url': url, 'title': title, 'img_src': img_src}

results.append(res)

Expand Down
Loading