1
0
mirror of https://github.com/searxng/searxng.git synced 2024-11-19 02:40:11 +01:00

[fix] issues reported by pylint

Fix pylint issues from commit (3d96a983)

    [format.python] initial formatting of the python code

Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
This commit is contained in:
Markus Heiser 2021-12-27 10:16:20 +01:00
parent 3d96a9839a
commit d84226bf63
9 changed files with 55 additions and 21 deletions

View File

@ -6,13 +6,13 @@
from urllib.parse import urlencode from urllib.parse import urlencode
from lxml import html from lxml import html
from json import loads from json import loads
from searx.utils import match_language
from searx.utils import match_language
from searx.engines.bing import language_aliases from searx.engines.bing import language_aliases
from searx.engines.bing import ( from searx.engines.bing import ( # pylint: disable=unused-import
_fetch_supported_languages, _fetch_supported_languages,
supported_languages_url, supported_languages_url,
) # NOQA # pylint: disable=unused-import )
# about # about
about = { about = {
@ -34,7 +34,15 @@ number_of_results = 28
# search-url # search-url
base_url = 'https://www.bing.com/' base_url = 'https://www.bing.com/'
search_string = 'images/search' '?{query}' '&count={count}' '&first={first}' '&tsc=ImageHoverTitle' search_string = (
# fmt: off
'images/search'
'?{query}'
'&count={count}'
'&first={first}'
'&tsc=ImageHoverTitle'
# fmt: on
)
time_range_string = '&qft=+filterui:age-lt{interval}' time_range_string = '&qft=+filterui:age-lt{interval}'
time_range_dict = {'day': '1440', 'week': '10080', 'month': '43200', 'year': '525600'} time_range_dict = {'day': '1440', 'week': '10080', 'month': '43200', 'year': '525600'}

View File

@ -6,15 +6,15 @@
from json import loads from json import loads
from lxml import html from lxml import html
from urllib.parse import urlencode from urllib.parse import urlencode
from searx.utils import match_language
from searx.utils import match_language
from searx.engines.bing import language_aliases from searx.engines.bing import language_aliases
from searx.engines.bing import (
from searx.engines.bing import ( # pylint: disable=unused-import
_fetch_supported_languages, _fetch_supported_languages,
supported_languages_url, supported_languages_url,
) # NOQA # pylint: disable=unused-import )
# about
about = { about = {
"website": 'https://www.bing.com/videos', "website": 'https://www.bing.com/videos',
"wikidata_id": 'Q4914152', "wikidata_id": 'Q4914152',
@ -31,7 +31,16 @@ time_range_support = True
number_of_results = 28 number_of_results = 28
base_url = 'https://www.bing.com/' base_url = 'https://www.bing.com/'
search_string = 'videos/search' '?{query}' '&count={count}' '&first={first}' '&scope=video' '&FORM=QBLH' search_string = (
# fmt: off
'videos/search'
'?{query}'
'&count={count}'
'&first={first}'
'&scope=video'
'&FORM=QBLH'
# fmt: on
)
time_range_string = '&qft=+filterui:videoage-lt{interval}' time_range_string = '&qft=+filterui:videoage-lt{interval}'
time_range_dict = {'day': '1440', 'week': '10080', 'month': '43200', 'year': '525600'} time_range_dict = {'day': '1440', 'week': '10080', 'month': '43200', 'year': '525600'}

View File

@ -25,9 +25,14 @@ number_of_results = 5
# search-url # search-url
# Doku is OpenSearch compatible # Doku is OpenSearch compatible
base_url = 'http://localhost:8090' base_url = 'http://localhost:8090'
search_url = '/?do=search' '&{query}' search_url = (
# TODO '&startRecord={offset}'\ # fmt: off
# TODO '&maximumRecords={limit}'\ '/?do=search'
'&{query}'
# fmt: on
)
# TODO '&startRecord={offset}'
# TODO '&maximumRecords={limit}'
# do search-request # do search-request

View File

@ -10,10 +10,10 @@ from lxml import html
from searx.data import WIKIDATA_UNITS from searx.data import WIKIDATA_UNITS
from searx.engines.duckduckgo import language_aliases from searx.engines.duckduckgo import language_aliases
from searx.engines.duckduckgo import ( from searx.engines.duckduckgo import ( # pylint: disable=unused-import
_fetch_supported_languages, _fetch_supported_languages,
supported_languages_url, supported_languages_url,
) # NOQA # pylint: disable=unused-import )
from searx.utils import extract_text, html_to_text, match_language, get_string_replaces_function from searx.utils import extract_text, html_to_text, match_language, get_string_replaces_function
from searx.external_urls import get_external_url, get_earth_coordinates_url, area_to_osm_zoom from searx.external_urls import get_external_url, get_earth_coordinates_url, area_to_osm_zoom

View File

@ -7,10 +7,10 @@ from json import loads
from urllib.parse import urlencode from urllib.parse import urlencode
from searx.exceptions import SearxEngineAPIException from searx.exceptions import SearxEngineAPIException
from searx.engines.duckduckgo import get_region_code from searx.engines.duckduckgo import get_region_code
from searx.engines.duckduckgo import ( from searx.engines.duckduckgo import ( # pylint: disable=unused-import
_fetch_supported_languages, _fetch_supported_languages,
supported_languages_url, supported_languages_url,
) # NOQA # pylint: disable=unused-import )
from searx.network import get from searx.network import get
# about # about

View File

@ -22,7 +22,14 @@ paging = False
safesearch = True safesearch = True
base_url = 'https://www.etools.ch' base_url = 'https://www.etools.ch'
search_path = '/searchAdvancedSubmit.do' '?query={search_term}' '&pageResults=20' '&safeSearch={safesearch}' search_path = (
# fmt: off
'/searchAdvancedSubmit.do'
'?query={search_term}'
'&pageResults=20'
'&safeSearch={safesearch}'
# fmt: on
)
def request(query, params): def request(query, params):

View File

@ -14,10 +14,10 @@ from searx.data import WIKIDATA_UNITS
from searx.network import post, get from searx.network import post, get
from searx.utils import match_language, searx_useragent, get_string_replaces_function from searx.utils import match_language, searx_useragent, get_string_replaces_function
from searx.external_urls import get_external_url, get_earth_coordinates_url, area_to_osm_zoom from searx.external_urls import get_external_url, get_earth_coordinates_url, area_to_osm_zoom
from searx.engines.wikipedia import ( from searx.engines.wikipedia import ( # pylint: disable=unused-import
_fetch_supported_languages, _fetch_supported_languages,
supported_languages_url, supported_languages_url,
) # NOQA # pylint: disable=unused-import )
# about # about
about = { about = {

View File

@ -39,7 +39,12 @@ paging = True
categories = ['news'] categories = ['news']
# search-url # search-url
search_url = 'https://news.search.yahoo.com/search' '?{query}&b={offset}' search_url = (
# fmt: off
'https://news.search.yahoo.com/search'
'?{query}&b={offset}'
# fmt: on
)
AGO_RE = re.compile(r'([0-9]+)\s*(year|month|week|day|minute|hour)') AGO_RE = re.compile(r'([0-9]+)\s*(year|month|week|day|minute|hour)')
AGO_TIMEDELTA = { AGO_TIMEDELTA = {

View File

@ -22,7 +22,7 @@ class TestWebUtils(SearxTestCase):
self.assertEqual(webutils.highlight_content('', None), None) self.assertEqual(webutils.highlight_content('', None), None)
self.assertEqual(webutils.highlight_content(False, None), None) self.assertEqual(webutils.highlight_content(False, None), None)
contents = ['<html></html>' 'not<'] contents = ['<html></html>not<']
for content in contents: for content in contents:
self.assertEqual(webutils.highlight_content(content, None), content) self.assertEqual(webutils.highlight_content(content, None), content)