diff --git a/searx/engines/google.py b/searx/engines/google.py index 92c9e1399..841212e09 100644 --- a/searx/engines/google.py +++ b/searx/engines/google.py @@ -133,56 +133,121 @@ suggestion_xpath = '//div[contains(@class, "card-section")]//a' spelling_suggestion_xpath = '//div[@class="med"]/p/a' -def get_lang_info(params, lang_list, custom_aliases): - ret_val = {} +def get_lang_info(params, lang_list, custom_aliases, supported_any_language): + """Composing various language properties for the google engines. + + This function is called by the various google engines (google itself, + google-images, -news, -scholar, -videos). + + :param dict param: request parameters of the engine + + :param list lang_list: list of supported languages of the engine + :py:obj:`ENGINES_LANGUAGES[engine-name] ` + + :param dict lang_list: custom aliases for non standard language codes + (used when calling :py:func:`searx.utils.match_language) + + :param bool supported_any_language: When a language is not specified, the + language interpretation is left up to Google to decide how the search + results should be delivered. This argument is ``True`` for the google + engine and ``False`` for the other engines (google-images, -news, + -scholar, -videos). + + :rtype: dict + :returns: + Py-Dictionary with the key/value pairs: + + language: + Return value from :py:func:`searx.utils.match_language + + country: + The country code (e.g. US, AT, CA, FR, DE ..) + + subdomain: + Google subdomain :py:obj:`google_domains` that fits to the country + code. + + params: + Py-Dictionary with additional request arguments (can be passed to + :py:func:`urllib.parse.urlencode`). + + headers: + Py-Dictionary with additional HTTP headers (can be passed to + request's headers) + """ + ret_val = { + 'language' : None, + 'country' : None, + 'subdomain' : None, + 'params' : {}, + 'headers' : {}, + } + + # language ... _lang = params['language'] - if _lang.lower() == 'all': + _any_language = _lang.lower() == 'all' + if _any_language: _lang = 'en-US' - language = match_language(_lang, lang_list, custom_aliases) ret_val['language'] = language - # the requested language from params (en, en-US, de, de-AT, fr, fr-CA, ...) - _l = _lang.split('-') + # country ... - # the country code (US, AT, CA) + _l = _lang.split('-') if len(_l) == 2: country = _l[1] else: country = _l[0].upper() if country == 'EN': country = 'US' - ret_val['country'] = country - # the combination (en-US, en-EN, de-DE, de-AU, fr-FR, fr-FR) - lang_country = '%s-%s' % (language, country) + # subdomain ... - # Accept-Language: fr-CH, fr;q=0.8, en;q=0.6, *;q=0.5 - ret_val['Accept-Language'] = ','.join([ - lang_country, - language + ';q=0.8,', - 'en;q=0.6', - '*;q=0.5', - ]) - - # subdomain ret_val['subdomain'] = 'www.' + google_domains.get(country.upper(), 'google.com') + # params & headers + + lang_country = '%s-%s' % (language, country) # (en-US, en-EN, de-DE, de-AU, fr-FR ..) + # hl parameter: # https://developers.google.com/custom-search/docs/xml_results#hlsp The # Interface Language: # https://developers.google.com/custom-search/docs/xml_results_appendices#interfaceLanguages - ret_val['hl'] = lang_list.get(lang_country, language) + ret_val['params']['hl'] = lang_list.get(lang_country, language) # lr parameter: + # The lr (language restrict) parameter restricts search results to + # documents written in a particular language. # https://developers.google.com/custom-search/docs/xml_results#lrsp - # Language Collection Values: + # Language Collection Values: # https://developers.google.com/custom-search/docs/xml_results_appendices#languageCollections - ret_val['lr'] = "lang_" + lang_list.get(lang_country, language) + if _any_language and supported_any_language: + + # interpretation is left up to Google (based on whoogle) + # + # - add parameter ``source=lnt`` + # - don't use parameter ``lr`` + # - don't add a ``Accept-Language`` HTTP header. + + ret_val['params']['source'] = 'lnt' + + else: + + # restricts search results to documents written in a particular + # language. + ret_val['params']['lr'] = "lang_" + lang_list.get(lang_country, language) + + # Accept-Language: fr-CH, fr;q=0.8, en;q=0.6, *;q=0.5 + ret_val['headers']['Accept-Language'] = ','.join([ + lang_country, + language + ';q=0.8,', + 'en;q=0.6', + '*;q=0.5', + ]) return ret_val @@ -198,14 +263,13 @@ def request(query, params): lang_info = get_lang_info( # pylint: disable=undefined-variable - params, supported_languages, language_aliases + params, supported_languages, language_aliases, True ) # https://www.google.de/search?q=corona&hl=de&lr=lang_de&start=0&tbs=qdr%3Ad&safe=medium query_url = 'https://' + lang_info['subdomain'] + '/search' + "?" + urlencode({ 'q': query, - 'hl': lang_info['hl'], - 'lr': lang_info['lr'], + **lang_info['params'], 'ie': "utf8", 'oe': "utf8", 'start': offset, @@ -215,12 +279,9 @@ def request(query, params): query_url += '&' + urlencode({'tbs': 'qdr:' + time_range_dict[params['time_range']]}) if params['safesearch']: query_url += '&' + urlencode({'safe': filter_mapping[params['safesearch']]}) - - logger.debug("query_url --> %s", query_url) params['url'] = query_url - logger.debug("HTTP header Accept-Language --> %s", lang_info['Accept-Language']) - params['headers']['Accept-Language'] = lang_info['Accept-Language'] + params['headers'].update(lang_info['headers']) params['headers']['Accept'] = ( 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' ) diff --git a/searx/engines/google_images.py b/searx/engines/google_images.py index 919c99c63..e7382a6fe 100644 --- a/searx/engines/google_images.py +++ b/searx/engines/google_images.py @@ -101,14 +101,15 @@ def request(query, params): lang_info = get_lang_info( # pylint: disable=undefined-variable - params, supported_languages, language_aliases + params, supported_languages, language_aliases, False ) + logger.debug( + "HTTP header Accept-Language --> %s", lang_info['headers']['Accept-Language']) query_url = 'https://' + lang_info['subdomain'] + '/search' + "?" + urlencode({ 'q': query, 'tbm': "isch", - 'hl': lang_info['hl'], - 'lr': lang_info['lr'], + **lang_info['params'], 'ie': "utf8", 'oe': "utf8", 'num': 30, @@ -118,12 +119,9 @@ def request(query, params): query_url += '&' + urlencode({'tbs': 'qdr:' + time_range_dict[params['time_range']]}) if params['safesearch']: query_url += '&' + urlencode({'safe': filter_mapping[params['safesearch']]}) - - logger.debug("query_url --> %s", query_url) params['url'] = query_url - logger.debug("HTTP header Accept-Language --> %s", lang_info['Accept-Language']) - params['headers']['Accept-Language'] = lang_info['Accept-Language'] + params['headers'].update(lang_info['headers']) params['headers']['Accept'] = ( 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' ) diff --git a/searx/engines/google_news.py b/searx/engines/google_news.py index 355155de8..485d602bc 100644 --- a/searx/engines/google_news.py +++ b/searx/engines/google_news.py @@ -83,8 +83,10 @@ def request(query, params): lang_info = get_lang_info( # pylint: disable=undefined-variable - params, supported_languages, language_aliases + params, supported_languages, language_aliases, False ) + logger.debug( + "HTTP header Accept-Language --> %s", lang_info['headers']['Accept-Language']) # google news has only one domain lang_info['subdomain'] = 'news.google.com' @@ -102,18 +104,14 @@ def request(query, params): query_url = 'https://' + lang_info['subdomain'] + '/search' + "?" + urlencode({ 'q': query, - 'hl': lang_info['hl'], - 'lr': lang_info['lr'], + **lang_info['params'], 'ie': "utf8", 'oe': "utf8", 'gl': lang_info['country'], }) + ('&ceid=%s' % ceid) # ceid includes a ':' character which must not be urlencoded - - logger.debug("query_url --> %s", query_url) params['url'] = query_url - logger.debug("HTTP header Accept-Language --> %s", lang_info['Accept-Language']) - params['headers']['Accept-Language'] = lang_info['Accept-Language'] + params['headers'].update(lang_info['headers']) params['headers']['Accept'] = ( 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' ) diff --git a/searx/engines/google_scholar.py b/searx/engines/google_scholar.py index a0ca70e53..8442a7bfa 100644 --- a/searx/engines/google_scholar.py +++ b/searx/engines/google_scholar.py @@ -77,31 +77,26 @@ def request(query, params): offset = (params['pageno'] - 1) * 10 lang_info = get_lang_info( # pylint: disable=undefined-variable - - - # params, {}, language_aliases - - params, supported_languages, language_aliases + params, supported_languages, language_aliases, False ) + logger.debug( + "HTTP header Accept-Language --> %s", lang_info['headers']['Accept-Language']) + # subdomain is: scholar.google.xy lang_info['subdomain'] = lang_info['subdomain'].replace("www.", "scholar.") query_url = 'https://'+ lang_info['subdomain'] + '/scholar' + "?" + urlencode({ 'q': query, - 'hl': lang_info['hl'], - 'lr': lang_info['lr'], + **lang_info['params'], 'ie': "utf8", 'oe': "utf8", 'start' : offset, }) query_url += time_range_url(params) - - logger.debug("query_url --> %s", query_url) params['url'] = query_url - logger.debug("HTTP header Accept-Language --> %s", lang_info['Accept-Language']) - params['headers']['Accept-Language'] = lang_info['Accept-Language'] + params['headers'].update(lang_info['headers']) params['headers']['Accept'] = ( 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' ) diff --git a/searx/engines/google_videos.py b/searx/engines/google_videos.py index 3e3f5d6f1..c57db4e63 100644 --- a/searx/engines/google_videos.py +++ b/searx/engines/google_videos.py @@ -119,14 +119,15 @@ def request(query, params): lang_info = get_lang_info( # pylint: disable=undefined-variable - params, supported_languages, language_aliases + params, supported_languages, language_aliases, False ) + logger.debug( + "HTTP header Accept-Language --> %s", lang_info['headers']['Accept-Language']) query_url = 'https://' + lang_info['subdomain'] + '/search' + "?" + urlencode({ 'q': query, 'tbm': "vid", - 'hl': lang_info['hl'], - 'lr': lang_info['lr'], + **lang_info['params'], 'ie': "utf8", 'oe': "utf8", }) @@ -135,12 +136,9 @@ def request(query, params): query_url += '&' + urlencode({'tbs': 'qdr:' + time_range_dict[params['time_range']]}) if params['safesearch']: query_url += '&' + urlencode({'safe': filter_mapping[params['safesearch']]}) - - logger.debug("query_url --> %s", query_url) params['url'] = query_url - logger.debug("HTTP header Accept-Language --> %s", lang_info['Accept-Language']) - params['headers']['Accept-Language'] = lang_info['Accept-Language'] + params['headers'].update(lang_info['headers']) params['headers']['Accept'] = ( 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' )