2021-01-13 11:31:25 +01:00
|
|
|
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
2022-10-28 19:12:59 +02:00
|
|
|
|
# lint: pylint
|
|
|
|
|
"""This module implements the Wikipedia engine. Some of this implementations
|
|
|
|
|
are shared by other engines:
|
|
|
|
|
|
|
|
|
|
- :ref:`wikidata engine`
|
|
|
|
|
|
2023-04-04 15:17:12 +02:00
|
|
|
|
The list of supported languages is :py:obj:`fetched <fetch_wikimedia_traits>` from
|
|
|
|
|
the article linked by :py:obj:`list_of_wikipedias`.
|
|
|
|
|
|
|
|
|
|
Unlike traditional search engines, wikipedia does not support one Wikipedia for
|
|
|
|
|
all languages, but there is one Wikipedia for each supported language. Some of
|
|
|
|
|
these Wikipedias have a LanguageConverter_ enabled
|
|
|
|
|
(:py:obj:`rest_v1_summary_url`).
|
|
|
|
|
|
|
|
|
|
A LanguageConverter_ (LC) is a system based on language variants that
|
|
|
|
|
automatically converts the content of a page into a different variant. A variant
|
|
|
|
|
is mostly the same language in a different script.
|
|
|
|
|
|
|
|
|
|
- `Wikipedias in multiple writing systems`_
|
|
|
|
|
- `Automatic conversion between traditional and simplified Chinese characters`_
|
|
|
|
|
|
|
|
|
|
PR-2554_:
|
|
|
|
|
The Wikipedia link returned by the API is still the same in all cases
|
|
|
|
|
(`https://zh.wikipedia.org/wiki/出租車`_) but if your browser's
|
|
|
|
|
``Accept-Language`` is set to any of ``zh``, ``zh-CN``, ``zh-TW``, ``zh-HK``
|
|
|
|
|
or .. Wikipedia's LC automatically returns the desired script in their
|
|
|
|
|
web-page.
|
|
|
|
|
|
|
|
|
|
- You can test the API here: https://reqbin.com/gesg2kvx
|
|
|
|
|
|
|
|
|
|
.. _https://zh.wikipedia.org/wiki/出租車:
|
|
|
|
|
https://zh.wikipedia.org/wiki/%E5%87%BA%E7%A7%9F%E8%BB%8A
|
|
|
|
|
|
|
|
|
|
To support Wikipedia's LanguageConverter_, a SearXNG request to Wikipedia uses
|
|
|
|
|
:py:obj:`get_wiki_params` and :py:obj:`wiki_lc_locale_variants' in the
|
|
|
|
|
:py:obj:`fetch_wikimedia_traits` function.
|
|
|
|
|
|
|
|
|
|
To test in SearXNG, query for ``!wp 出租車`` with each of the available Chinese
|
|
|
|
|
options:
|
|
|
|
|
|
|
|
|
|
- ``!wp 出租車 :zh`` should show 出租車
|
|
|
|
|
- ``!wp 出租車 :zh-CN`` should show 出租车
|
|
|
|
|
- ``!wp 出租車 :zh-TW`` should show 計程車
|
|
|
|
|
- ``!wp 出租車 :zh-HK`` should show 的士
|
|
|
|
|
- ``!wp 出租車 :zh-SG`` should show 德士
|
|
|
|
|
|
|
|
|
|
.. _LanguageConverter:
|
|
|
|
|
https://www.mediawiki.org/wiki/Writing_systems#LanguageConverter
|
|
|
|
|
.. _Wikipedias in multiple writing systems:
|
|
|
|
|
https://meta.wikimedia.org/wiki/Wikipedias_in_multiple_writing_systems
|
|
|
|
|
.. _Automatic conversion between traditional and simplified Chinese characters:
|
|
|
|
|
https://en.wikipedia.org/wiki/Chinese_Wikipedia#Automatic_conversion_between_traditional_and_simplified_Chinese_characters
|
|
|
|
|
.. _PR-2554: https://github.com/searx/searx/pull/2554
|
|
|
|
|
|
2016-03-14 07:32:36 +01:00
|
|
|
|
"""
|
|
|
|
|
|
2022-10-28 19:12:59 +02:00
|
|
|
|
import urllib.parse
|
|
|
|
|
import babel
|
|
|
|
|
|
2022-10-08 16:22:26 +02:00
|
|
|
|
from lxml import html
|
2022-10-28 19:12:59 +02:00
|
|
|
|
|
2023-04-04 15:17:12 +02:00
|
|
|
|
from searx import utils
|
2023-06-25 12:37:31 +02:00
|
|
|
|
from searx import network as _network
|
2023-04-04 15:17:12 +02:00
|
|
|
|
from searx import locales
|
2022-10-08 16:22:26 +02:00
|
|
|
|
from searx.enginelib.traits import EngineTraits
|
|
|
|
|
|
2022-10-28 19:12:59 +02:00
|
|
|
|
traits: EngineTraits
|
2016-08-06 06:34:56 +02:00
|
|
|
|
|
2021-01-13 11:31:25 +01:00
|
|
|
|
# about
|
|
|
|
|
about = {
|
|
|
|
|
"website": 'https://www.wikipedia.org/',
|
|
|
|
|
"wikidata_id": 'Q52',
|
|
|
|
|
"official_api_documentation": 'https://en.wikipedia.org/api/',
|
|
|
|
|
"use_official_api": True,
|
|
|
|
|
"require_api_key": False,
|
|
|
|
|
"results": 'JSON',
|
|
|
|
|
}
|
|
|
|
|
|
2023-09-19 10:31:02 +02:00
|
|
|
|
display_type = ["infobox"]
|
|
|
|
|
"""A list of display types composed from ``infobox`` and ``list``. The latter
|
|
|
|
|
one will add a hit to the result list. The first one will show a hit in the
|
|
|
|
|
info box. Both values can be set, or one of the two can be set."""
|
|
|
|
|
|
2022-08-01 17:01:59 +02:00
|
|
|
|
send_accept_language_header = True
|
2023-04-04 15:17:12 +02:00
|
|
|
|
"""The HTTP ``Accept-Language`` header is needed for wikis where
|
|
|
|
|
LanguageConverter_ is enabled."""
|
|
|
|
|
|
|
|
|
|
list_of_wikipedias = 'https://meta.wikimedia.org/wiki/List_of_Wikipedias'
|
|
|
|
|
"""`List of all wikipedias <https://meta.wikimedia.org/wiki/List_of_Wikipedias>`_
|
|
|
|
|
"""
|
2022-08-01 17:01:59 +02:00
|
|
|
|
|
2022-10-28 19:12:59 +02:00
|
|
|
|
wikipedia_article_depth = 'https://meta.wikimedia.org/wiki/Wikipedia_article_depth'
|
|
|
|
|
"""The *editing depth* of Wikipedia is one of several possible rough indicators
|
|
|
|
|
of the encyclopedia's collaborative quality, showing how frequently its articles
|
|
|
|
|
are updated. The measurement of depth was introduced after some limitations of
|
|
|
|
|
the classic measurement of article count were realized.
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
rest_v1_summary_url = 'https://{wiki_netloc}/api/rest_v1/page/summary/{title}'
|
2023-04-04 15:17:12 +02:00
|
|
|
|
"""
|
|
|
|
|
`wikipedia rest_v1 summary API`_:
|
|
|
|
|
The summary response includes an extract of the first paragraph of the page in
|
|
|
|
|
plain text and HTML as well as the type of page. This is useful for page
|
|
|
|
|
previews (fka. Hovercards, aka. Popups) on the web and link previews in the
|
|
|
|
|
apps.
|
|
|
|
|
|
|
|
|
|
HTTP ``Accept-Language`` header (:py:obj:`send_accept_language_header`):
|
|
|
|
|
The desired language variant code for wikis where LanguageConverter_ is
|
|
|
|
|
enabled.
|
2016-03-14 07:32:36 +01:00
|
|
|
|
|
2023-04-04 15:17:12 +02:00
|
|
|
|
.. _wikipedia rest_v1 summary API:
|
|
|
|
|
https://en.wikipedia.org/api/rest_v1/#/Page%20content/get_page_summary__title_
|
2016-03-14 07:32:36 +01:00
|
|
|
|
|
2022-10-28 19:12:59 +02:00
|
|
|
|
"""
|
2016-03-14 07:32:36 +01:00
|
|
|
|
|
2023-04-04 15:17:12 +02:00
|
|
|
|
wiki_lc_locale_variants = {
|
|
|
|
|
"zh": (
|
|
|
|
|
"zh-CN",
|
|
|
|
|
"zh-HK",
|
|
|
|
|
"zh-MO",
|
|
|
|
|
"zh-MY",
|
|
|
|
|
"zh-SG",
|
|
|
|
|
"zh-TW",
|
|
|
|
|
),
|
|
|
|
|
"zh-classical": ("zh-classical",),
|
|
|
|
|
}
|
|
|
|
|
"""Mapping rule of the LanguageConverter_ to map a language and its variants to
|
|
|
|
|
a Locale (used in the HTTP ``Accept-Language`` header). For example see `LC
|
|
|
|
|
Chinese`_.
|
|
|
|
|
|
|
|
|
|
.. _LC Chinese:
|
|
|
|
|
https://meta.wikimedia.org/wiki/Wikipedias_in_multiple_writing_systems#Chinese
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
wikipedia_script_variants = {
|
|
|
|
|
"zh": (
|
|
|
|
|
"zh_Hant",
|
|
|
|
|
"zh_Hans",
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_wiki_params(sxng_locale, eng_traits):
|
|
|
|
|
"""Returns the Wikipedia language tag and the netloc that fits to the
|
|
|
|
|
``sxng_locale``. To support LanguageConverter_ this function rates a locale
|
|
|
|
|
(region) higher than a language (compare :py:obj:`wiki_lc_locale_variants`).
|
|
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
eng_tag = eng_traits.get_region(sxng_locale, eng_traits.get_language(sxng_locale, 'en'))
|
|
|
|
|
wiki_netloc = eng_traits.custom['wiki_netloc'].get(eng_tag, 'en.wikipedia.org')
|
|
|
|
|
return eng_tag, wiki_netloc
|
|
|
|
|
|
2016-03-14 07:32:36 +01:00
|
|
|
|
|
|
|
|
|
def request(query, params):
|
2022-10-28 19:12:59 +02:00
|
|
|
|
"""Assemble a request (`wikipedia rest_v1 summary API`_)."""
|
2016-03-14 07:32:36 +01:00
|
|
|
|
if query.islower():
|
2020-09-08 07:05:21 +02:00
|
|
|
|
query = query.title()
|
2016-03-14 07:32:36 +01:00
|
|
|
|
|
2023-04-04 15:17:12 +02:00
|
|
|
|
_eng_tag, wiki_netloc = get_wiki_params(params['searxng_locale'], traits)
|
2022-10-28 19:12:59 +02:00
|
|
|
|
title = urllib.parse.quote(query)
|
|
|
|
|
params['url'] = rest_v1_summary_url.format(wiki_netloc=wiki_netloc, title=title)
|
2021-02-09 05:56:45 +01:00
|
|
|
|
|
2020-12-09 21:23:20 +01:00
|
|
|
|
params['raise_for_httperror'] = False
|
2020-12-04 20:04:39 +01:00
|
|
|
|
params['soft_max_redirects'] = 2
|
2020-09-08 07:05:21 +02:00
|
|
|
|
|
2016-03-14 07:32:36 +01:00
|
|
|
|
return params
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# get response from search-request
|
|
|
|
|
def response(resp):
|
2022-10-28 19:12:59 +02:00
|
|
|
|
|
|
|
|
|
results = []
|
2020-12-04 20:04:39 +01:00
|
|
|
|
if resp.status_code == 404:
|
2016-03-14 07:32:36 +01:00
|
|
|
|
return []
|
2021-02-11 12:29:21 +01:00
|
|
|
|
if resp.status_code == 400:
|
|
|
|
|
try:
|
2022-10-28 19:12:59 +02:00
|
|
|
|
api_result = resp.json()
|
|
|
|
|
except Exception: # pylint: disable=broad-except
|
2021-02-11 12:29:21 +01:00
|
|
|
|
pass
|
|
|
|
|
else:
|
2021-12-27 09:26:22 +01:00
|
|
|
|
if (
|
|
|
|
|
api_result['type'] == 'https://mediawiki.org/wiki/HyperSwitch/errors/bad_request'
|
|
|
|
|
and api_result['detail'] == 'title-invalid-characters'
|
|
|
|
|
):
|
2021-02-11 12:29:21 +01:00
|
|
|
|
return []
|
|
|
|
|
|
2023-06-25 12:37:31 +02:00
|
|
|
|
_network.raise_for_httperror(resp)
|
2016-03-14 07:32:36 +01:00
|
|
|
|
|
2022-10-28 19:12:59 +02:00
|
|
|
|
api_result = resp.json()
|
2023-04-04 15:17:12 +02:00
|
|
|
|
title = utils.html_to_text(api_result.get('titles', {}).get('display') or api_result.get('title'))
|
2020-09-08 07:05:21 +02:00
|
|
|
|
wikipedia_link = api_result['content_urls']['desktop']['page']
|
2023-09-19 10:31:02 +02:00
|
|
|
|
|
|
|
|
|
if "list" in display_type or api_result.get('type') != 'standard':
|
|
|
|
|
# show item in the result list if 'list' is in the display options or it
|
|
|
|
|
# is a item that can't be displayed in a infobox.
|
|
|
|
|
results.append({'url': wikipedia_link, 'title': title, 'content': api_result.get('description', '')})
|
|
|
|
|
|
|
|
|
|
if "infobox" in display_type:
|
|
|
|
|
if api_result.get('type') == 'standard':
|
|
|
|
|
results.append(
|
|
|
|
|
{
|
|
|
|
|
'infobox': title,
|
|
|
|
|
'id': wikipedia_link,
|
|
|
|
|
'content': api_result.get('extract', ''),
|
|
|
|
|
'img_src': api_result.get('thumbnail', {}).get('source'),
|
|
|
|
|
'urls': [{'title': 'Wikipedia', 'url': wikipedia_link}],
|
|
|
|
|
}
|
|
|
|
|
)
|
2016-03-14 07:32:36 +01:00
|
|
|
|
|
|
|
|
|
return results
|
2016-11-06 03:51:38 +01:00
|
|
|
|
|
|
|
|
|
|
2022-10-08 16:22:26 +02:00
|
|
|
|
# Nonstandard language codes
|
|
|
|
|
#
|
|
|
|
|
# These Wikipedias use language codes that do not conform to the ISO 639
|
|
|
|
|
# standard (which is how wiki subdomains are chosen nowadays).
|
|
|
|
|
|
2023-04-04 15:17:12 +02:00
|
|
|
|
lang_map = locales.LOCALE_BEST_MATCH.copy()
|
|
|
|
|
lang_map.update(
|
|
|
|
|
{
|
|
|
|
|
'be-tarask': 'bel',
|
|
|
|
|
'ak': 'aka',
|
|
|
|
|
'als': 'gsw',
|
|
|
|
|
'bat-smg': 'sgs',
|
|
|
|
|
'cbk-zam': 'cbk',
|
|
|
|
|
'fiu-vro': 'vro',
|
|
|
|
|
'map-bms': 'map',
|
|
|
|
|
'no': 'nb-NO',
|
|
|
|
|
'nrm': 'nrf',
|
|
|
|
|
'roa-rup': 'rup',
|
|
|
|
|
'nds-nl': 'nds',
|
|
|
|
|
#'simple: – invented code used for the Simple English Wikipedia (not the official IETF code en-simple)
|
|
|
|
|
'zh-min-nan': 'nan',
|
|
|
|
|
'zh-yue': 'yue',
|
|
|
|
|
'an': 'arg',
|
|
|
|
|
}
|
|
|
|
|
)
|
2022-10-08 16:22:26 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def fetch_traits(engine_traits: EngineTraits):
|
2023-04-04 15:17:12 +02:00
|
|
|
|
fetch_wikimedia_traits(engine_traits)
|
|
|
|
|
print("WIKIPEDIA_LANGUAGES: %s" % len(engine_traits.custom['WIKIPEDIA_LANGUAGES']))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def fetch_wikimedia_traits(engine_traits: EngineTraits):
|
|
|
|
|
"""Fetch languages from Wikipedia. Not all languages from the
|
|
|
|
|
:py:obj:`list_of_wikipedias` are supported by SearXNG locales, only those
|
|
|
|
|
known from :py:obj:`searx.locales.LOCALE_NAMES` or those with a minimal
|
|
|
|
|
:py:obj:`editing depth <wikipedia_article_depth>`.
|
2022-10-28 19:12:59 +02:00
|
|
|
|
|
|
|
|
|
The location of the Wikipedia address of a language is mapped in a
|
|
|
|
|
:py:obj:`custom field <searx.enginelib.traits.EngineTraits.custom>`
|
|
|
|
|
(``wiki_netloc``). Here is a reduced example:
|
|
|
|
|
|
|
|
|
|
.. code:: python
|
2022-10-08 16:22:26 +02:00
|
|
|
|
|
2022-10-28 19:12:59 +02:00
|
|
|
|
traits.custom['wiki_netloc'] = {
|
|
|
|
|
"en": "en.wikipedia.org",
|
|
|
|
|
..
|
|
|
|
|
"gsw": "als.wikipedia.org",
|
|
|
|
|
..
|
|
|
|
|
"zh": "zh.wikipedia.org",
|
|
|
|
|
"zh-classical": "zh-classical.wikipedia.org"
|
|
|
|
|
}
|
|
|
|
|
"""
|
2023-04-04 15:17:12 +02:00
|
|
|
|
# pylint: disable=too-many-branches
|
2022-10-28 19:12:59 +02:00
|
|
|
|
engine_traits.custom['wiki_netloc'] = {}
|
2023-04-04 15:17:12 +02:00
|
|
|
|
engine_traits.custom['WIKIPEDIA_LANGUAGES'] = []
|
|
|
|
|
|
|
|
|
|
# insert alias to map from a script or region to a wikipedia variant
|
2022-10-28 19:12:59 +02:00
|
|
|
|
|
2023-04-04 15:17:12 +02:00
|
|
|
|
for eng_tag, sxng_tag_list in wikipedia_script_variants.items():
|
|
|
|
|
for sxng_tag in sxng_tag_list:
|
|
|
|
|
engine_traits.languages[sxng_tag] = eng_tag
|
|
|
|
|
for eng_tag, sxng_tag_list in wiki_lc_locale_variants.items():
|
|
|
|
|
for sxng_tag in sxng_tag_list:
|
|
|
|
|
engine_traits.regions[sxng_tag] = eng_tag
|
2022-10-28 19:12:59 +02:00
|
|
|
|
|
2023-06-25 12:37:31 +02:00
|
|
|
|
resp = _network.get(list_of_wikipedias)
|
2022-10-08 16:22:26 +02:00
|
|
|
|
if not resp.ok:
|
|
|
|
|
print("ERROR: response from Wikipedia is not OK.")
|
|
|
|
|
|
|
|
|
|
dom = html.fromstring(resp.text)
|
|
|
|
|
for row in dom.xpath('//table[contains(@class,"sortable")]//tbody/tr'):
|
|
|
|
|
|
|
|
|
|
cols = row.xpath('./td')
|
|
|
|
|
if not cols:
|
|
|
|
|
continue
|
|
|
|
|
cols = [c.text_content().strip() for c in cols]
|
|
|
|
|
|
2023-04-04 15:17:12 +02:00
|
|
|
|
depth = float(cols[11].replace('-', '0').replace(',', ''))
|
2022-10-28 19:12:59 +02:00
|
|
|
|
articles = int(cols[4].replace(',', '').replace(',', ''))
|
|
|
|
|
|
2023-04-04 15:17:12 +02:00
|
|
|
|
eng_tag = cols[3]
|
|
|
|
|
wiki_url = row.xpath('./td[4]/a/@href')[0]
|
2022-10-28 19:12:59 +02:00
|
|
|
|
wiki_url = urllib.parse.urlparse(wiki_url)
|
2022-10-08 16:22:26 +02:00
|
|
|
|
|
|
|
|
|
try:
|
2023-04-04 15:17:12 +02:00
|
|
|
|
sxng_tag = locales.language_tag(babel.Locale.parse(lang_map.get(eng_tag, eng_tag), sep='-'))
|
2022-10-08 16:22:26 +02:00
|
|
|
|
except babel.UnknownLocaleError:
|
2023-04-04 15:17:12 +02:00
|
|
|
|
# print("ERROR: %s [%s] is unknown by babel" % (cols[0], eng_tag))
|
2022-10-08 16:22:26 +02:00
|
|
|
|
continue
|
2023-04-04 15:17:12 +02:00
|
|
|
|
finally:
|
|
|
|
|
engine_traits.custom['WIKIPEDIA_LANGUAGES'].append(eng_tag)
|
|
|
|
|
|
|
|
|
|
if sxng_tag not in locales.LOCALE_NAMES:
|
|
|
|
|
|
|
|
|
|
if articles < 10000:
|
|
|
|
|
# exclude languages with too few articles
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
if int(depth) < 20:
|
|
|
|
|
# Rough indicator of a Wikipedia’s quality, showing how
|
|
|
|
|
# frequently its articles are updated.
|
|
|
|
|
continue
|
2022-10-08 16:22:26 +02:00
|
|
|
|
|
|
|
|
|
conflict = engine_traits.languages.get(sxng_tag)
|
|
|
|
|
if conflict:
|
|
|
|
|
if conflict != eng_tag:
|
|
|
|
|
print("CONFLICT: babel %s --> %s, %s" % (sxng_tag, conflict, eng_tag))
|
|
|
|
|
continue
|
|
|
|
|
|
2022-10-28 19:12:59 +02:00
|
|
|
|
engine_traits.languages[sxng_tag] = eng_tag
|
|
|
|
|
engine_traits.custom['wiki_netloc'][eng_tag] = wiki_url.netloc
|
2023-04-04 15:17:12 +02:00
|
|
|
|
|
|
|
|
|
engine_traits.custom['WIKIPEDIA_LANGUAGES'].sort()
|