1
0
mirror of https://github.com/searxng/searxng.git synced 2024-11-23 04:27:46 +01:00

Merge pull request #695 from return42/fix-sp

[fix] startpage engine / modified API
This commit is contained in:
Alexandre Flament 2022-01-16 20:27:36 +01:00 committed by GitHub
commit e07417848f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -1,16 +1,28 @@
# SPDX-License-Identifier: AGPL-3.0-or-later # SPDX-License-Identifier: AGPL-3.0-or-later
""" # lint: pylint
Startpage (Web) """Startpage (Web)
""" """
from lxml import html
from dateutil import parser
from datetime import datetime, timedelta
import re import re
from time import time
from urllib.parse import urlencode
from unicodedata import normalize, combining from unicodedata import normalize, combining
from datetime import datetime, timedelta
from dateutil import parser
from lxml import html
from babel import Locale from babel import Locale
from babel.localedata import locale_identifiers from babel.localedata import locale_identifiers
from searx.network import get
from searx.utils import extract_text, eval_xpath, match_language from searx.utils import extract_text, eval_xpath, match_language
from searx.exceptions import (
SearxEngineResponseException,
SearxEngineCaptchaException,
)
# about # about
about = { about = {
@ -33,7 +45,7 @@ supported_languages_url = 'https://www.startpage.com/do/settings'
# search-url # search-url
base_url = 'https://startpage.com/' base_url = 'https://startpage.com/'
search_url = base_url + 'do/search' search_url = base_url + 'sp/search?'
# specific xpath variables # specific xpath variables
# ads xpath //div[@id="results"]/div[@id="sponsored"]//div[@class="result"] # ads xpath //div[@id="results"]/div[@id="sponsored"]//div[@class="result"]
@ -42,18 +54,74 @@ results_xpath = '//div[@class="w-gl__result__main"]'
link_xpath = './/a[@class="w-gl__result-title result-link"]' link_xpath = './/a[@class="w-gl__result-title result-link"]'
content_xpath = './/p[@class="w-gl__description"]' content_xpath = './/p[@class="w-gl__description"]'
# timestamp of the last fetch of 'sc' code
sc_code_ts = 0
sc_code = ''
def raise_captcha(resp):
if str(resp.url).startswith('https://www.startpage.com/sp/captcha'):
# suspend CAPTCHA for 7 days
raise SearxEngineCaptchaException(suspended_time=7 * 24 * 3600)
def get_sc_code(headers):
"""Get an actual `sc` argument from startpage's home page.
Startpage puts a `sc` argument on every link. Without this argument
startpage considers the request is from a bot. We do not know what is
encoded in the value of the `sc` argument, but it seems to be a kind of a
*time-stamp*. This *time-stamp* is valid for a few hours.
This function scrap a new *time-stamp* from startpage's home page every hour
(3000 sec).
"""
global sc_code_ts, sc_code # pylint: disable=global-statement
if time() > (sc_code_ts + 3000):
logger.debug("query new sc time-stamp ...")
resp = get(base_url, headers=headers)
raise_captcha(resp)
dom = html.fromstring(resp.text)
try:
# href --> '/?sc=adrKJMgF8xwp20'
href = eval_xpath(dom, '//a[@class="footer-home__logo"]')[0].get('href')
except IndexError as exc:
# suspend startpage API --> https://github.com/searxng/searxng/pull/695
raise SearxEngineResponseException(
suspended_time=7 * 24 * 3600, message="PR-695: query new sc time-stamp failed!"
) from exc
sc_code = href[5:]
sc_code_ts = time()
logger.debug("new value is: %s", sc_code)
return sc_code
# do search-request # do search-request
def request(query, params): def request(query, params):
params['url'] = search_url # pylint: disable=line-too-long
params['method'] = 'POST' # The format string from Startpage's FFox add-on [1]::
params['data'] = { #
# https://www.startpage.com/do/dsearch?query={searchTerms}&cat=web&pl=ext-ff&language=__MSG_extensionUrlLanguage__&extVersion=1.3.0
#
# [1] https://addons.mozilla.org/en-US/firefox/addon/startpage-private-search/
args = {
'query': query, 'query': query,
'page': params['pageno'], 'page': params['pageno'],
'cat': 'web', 'cat': 'web',
'cmd': 'process_search', # 'pl': 'ext-ff',
'engine0': 'v1all', # 'extVersion': '1.3.0',
# 'abp': "-1",
'sc': get_sc_code(params['headers']),
} }
# set language if specified # set language if specified
@ -61,9 +129,10 @@ def request(query, params):
lang_code = match_language(params['language'], supported_languages, fallback=None) lang_code = match_language(params['language'], supported_languages, fallback=None)
if lang_code: if lang_code:
language_name = supported_languages[lang_code]['alias'] language_name = supported_languages[lang_code]['alias']
params['data']['language'] = language_name args['language'] = language_name
params['data']['lui'] = language_name args['lui'] = language_name
params['url'] = search_url + urlencode(args)
return params return params
@ -134,10 +203,11 @@ def response(resp):
# get supported languages from their site # get supported languages from their site
def _fetch_supported_languages(resp): def _fetch_supported_languages(resp):
# startpage's language selector is a mess # startpage's language selector is a mess each option has a displayed name
# each option has a displayed name and a value, either of which may represent the language name # and a value, either of which may represent the language name in the native
# in the native script, the language name in English, an English transliteration of the native name, # script, the language name in English, an English transliteration of the
# the English name of the writing script used by the language, or occasionally something else entirely. # native name, the English name of the writing script used by the language,
# or occasionally something else entirely.
# this cases are so special they need to be hardcoded, a couple of them are mispellings # this cases are so special they need to be hardcoded, a couple of them are mispellings
language_names = { language_names = {
@ -151,7 +221,15 @@ def _fetch_supported_languages(resp):
} }
# get the English name of every language known by babel # get the English name of every language known by babel
language_names.update({name.lower(): lang_code for lang_code, name in Locale('en')._data['languages'].items()}) language_names.update(
{
# fmt: off
name.lower(): lang_code
# pylint: disable=protected-access
for lang_code, name in Locale('en')._data['languages'].items()
# fmt: on
}
)
# get the native name of every language known by babel # get the native name of every language known by babel
for lang_code in filter(lambda lang_code: lang_code.find('_') == -1, locale_identifiers()): for lang_code in filter(lambda lang_code: lang_code.find('_') == -1, locale_identifiers()):
@ -176,8 +254,8 @@ def _fetch_supported_languages(resp):
if isinstance(lang_code, str): if isinstance(lang_code, str):
supported_languages[lang_code] = {'alias': sp_option_value} supported_languages[lang_code] = {'alias': sp_option_value}
elif isinstance(lang_code, list): elif isinstance(lang_code, list):
for lc in lang_code: for _lc in lang_code:
supported_languages[lc] = {'alias': sp_option_value} supported_languages[_lc] = {'alias': sp_option_value}
else: else:
print('Unknown language option in Startpage: {} ({})'.format(sp_option_value, sp_option_text)) print('Unknown language option in Startpage: {} ({})'.format(sp_option_value, sp_option_text))