2021-01-13 11:31:25 +01:00
|
|
|
#!/usr/bin/env python
|
2022-01-03 12:58:48 +01:00
|
|
|
# lint: pylint
|
2021-10-03 15:12:09 +02:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
2021-01-13 11:31:25 +01:00
|
|
|
|
2022-01-03 12:40:06 +01:00
|
|
|
"""Fetch website description from websites and from
|
|
|
|
:origin:`searx/engines/wikidata.py` engine.
|
|
|
|
|
|
|
|
Output file: :origin:`searx/data/engine_descriptions.json`.
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
2022-01-03 12:58:48 +01:00
|
|
|
# pylint: disable=invalid-name, global-statement
|
|
|
|
|
2021-01-13 11:31:25 +01:00
|
|
|
import json
|
2021-09-18 10:58:57 +02:00
|
|
|
from urllib.parse import urlparse
|
|
|
|
from os.path import join
|
|
|
|
|
2021-01-13 11:31:25 +01:00
|
|
|
from lxml.html import fromstring
|
|
|
|
|
2021-09-19 11:10:02 +02:00
|
|
|
from searx.engines import wikidata, set_loggers
|
2023-04-08 09:12:12 +02:00
|
|
|
from searx.utils import extract_text, searx_useragent
|
2023-02-07 14:11:58 +01:00
|
|
|
from searx.locales import LOCALE_NAMES, locales_initialize, match_locale
|
2021-09-18 10:58:57 +02:00
|
|
|
from searx import searx_dir
|
2022-12-11 16:45:47 +01:00
|
|
|
from searx.utils import gen_useragent, detect_language
|
2021-01-13 11:31:25 +01:00
|
|
|
import searx.search
|
[httpx] replace searx.poolrequests by searx.network
settings.yml:
* outgoing.networks:
* can contains network definition
* propertiers: enable_http, verify, http2, max_connections, max_keepalive_connections,
keepalive_expiry, local_addresses, support_ipv4, support_ipv6, proxies, max_redirects, retries
* retries: 0 by default, number of times searx retries to send the HTTP request (using different IP & proxy each time)
* local_addresses can be "192.168.0.1/24" (it supports IPv6)
* support_ipv4 & support_ipv6: both True by default
see https://github.com/searx/searx/pull/1034
* each engine can define a "network" section:
* either a full network description
* either reference an existing network
* all HTTP requests of engine use the same HTTP configuration (it was not the case before, see proxy configuration in master)
2021-04-05 10:43:33 +02:00
|
|
|
import searx.network
|
2021-01-13 11:31:25 +01:00
|
|
|
|
2021-09-19 11:10:02 +02:00
|
|
|
set_loggers(wikidata, 'wikidata')
|
2022-06-29 20:56:16 +02:00
|
|
|
locales_initialize()
|
2021-09-19 11:10:02 +02:00
|
|
|
|
2023-04-08 09:12:12 +02:00
|
|
|
# you can run the query in https://query.wikidata.org
|
|
|
|
# replace %IDS% by Wikidata entities separated by spaces with the prefix wd:
|
|
|
|
# for example wd:Q182496 wd:Q1540899
|
|
|
|
# replace %LANGUAGES_SPARQL% by languages
|
2021-01-13 11:31:25 +01:00
|
|
|
SPARQL_WIKIPEDIA_ARTICLE = """
|
2023-04-08 09:12:12 +02:00
|
|
|
SELECT DISTINCT ?item ?name ?article ?lang
|
2021-01-13 11:31:25 +01:00
|
|
|
WHERE {
|
2021-09-18 10:58:57 +02:00
|
|
|
hint:Query hint:optimizer "None".
|
2021-01-13 11:31:25 +01:00
|
|
|
VALUES ?item { %IDS% }
|
|
|
|
?article schema:about ?item ;
|
|
|
|
schema:inLanguage ?lang ;
|
|
|
|
schema:name ?name ;
|
|
|
|
schema:isPartOf [ wikibase:wikiGroup "wikipedia" ] .
|
|
|
|
FILTER(?lang in (%LANGUAGES_SPARQL%)) .
|
|
|
|
FILTER (!CONTAINS(?name, ':')) .
|
|
|
|
}
|
2023-04-08 09:12:12 +02:00
|
|
|
ORDER BY ?item ?lang
|
2021-01-13 11:31:25 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
SPARQL_DESCRIPTION = """
|
|
|
|
SELECT DISTINCT ?item ?itemDescription
|
|
|
|
WHERE {
|
|
|
|
VALUES ?item { %IDS% }
|
|
|
|
?item schema:description ?itemDescription .
|
|
|
|
FILTER (lang(?itemDescription) in (%LANGUAGES_SPARQL%))
|
|
|
|
}
|
|
|
|
ORDER BY ?itemLang
|
|
|
|
"""
|
|
|
|
|
2021-09-18 10:58:57 +02:00
|
|
|
NOT_A_DESCRIPTION = [
|
|
|
|
'web site',
|
|
|
|
'site web',
|
|
|
|
'komputa serĉilo',
|
|
|
|
'interreta serĉilo',
|
|
|
|
'bilaketa motor',
|
|
|
|
'web search engine',
|
|
|
|
'wikimedia täpsustuslehekülg',
|
|
|
|
]
|
|
|
|
|
|
|
|
SKIP_ENGINE_SOURCE = [
|
2021-12-27 09:16:03 +01:00
|
|
|
# fmt: off
|
2021-12-27 09:26:22 +01:00
|
|
|
('gitlab', 'wikidata')
|
|
|
|
# descriptions are about wikipedia disambiguation pages
|
2021-12-27 09:16:03 +01:00
|
|
|
# fmt: on
|
2021-09-18 10:58:57 +02:00
|
|
|
]
|
|
|
|
|
2023-04-08 09:12:12 +02:00
|
|
|
WIKIPEDIA_LANGUAGES = {}
|
2021-09-18 10:58:57 +02:00
|
|
|
LANGUAGES_SPARQL = ''
|
2021-01-13 11:31:25 +01:00
|
|
|
IDS = None
|
2023-04-08 09:12:12 +02:00
|
|
|
WIKIPEDIA_LANGUAGE_VARIANTS = {'zh_Hant': 'zh-tw'}
|
|
|
|
|
2021-01-13 11:31:25 +01:00
|
|
|
|
|
|
|
descriptions = {}
|
|
|
|
wd_to_engine_name = {}
|
|
|
|
|
|
|
|
|
|
|
|
def normalize_description(description):
|
|
|
|
for c in [chr(c) for c in range(0, 31)]:
|
|
|
|
description = description.replace(c, ' ')
|
|
|
|
description = ' '.join(description.strip().split())
|
|
|
|
return description
|
|
|
|
|
|
|
|
|
|
|
|
def update_description(engine_name, lang, description, source, replace=True):
|
2021-09-18 10:58:57 +02:00
|
|
|
if not isinstance(description, str):
|
|
|
|
return
|
|
|
|
description = normalize_description(description)
|
|
|
|
if description.lower() == engine_name.lower():
|
|
|
|
return
|
|
|
|
if description.lower() in NOT_A_DESCRIPTION:
|
|
|
|
return
|
|
|
|
if (engine_name, source) in SKIP_ENGINE_SOURCE:
|
|
|
|
return
|
|
|
|
if ' ' not in description:
|
|
|
|
# skip unique word description (like "website")
|
|
|
|
return
|
2021-01-13 11:31:25 +01:00
|
|
|
if replace or lang not in descriptions[engine_name]:
|
2021-09-18 10:58:57 +02:00
|
|
|
descriptions[engine_name][lang] = [description, source]
|
2021-01-13 11:31:25 +01:00
|
|
|
|
|
|
|
|
2023-04-08 09:12:12 +02:00
|
|
|
def get_wikipedia_summary(wikipedia_url, searxng_locale):
|
|
|
|
# get the REST API URL from the HTML URL
|
|
|
|
|
|
|
|
# Headers
|
|
|
|
headers = {'User-Agent': searx_useragent()}
|
|
|
|
|
|
|
|
if searxng_locale in WIKIPEDIA_LANGUAGE_VARIANTS:
|
|
|
|
headers['Accept-Language'] = WIKIPEDIA_LANGUAGE_VARIANTS.get(searxng_locale)
|
|
|
|
|
|
|
|
# URL path : from HTML URL to REST API URL
|
|
|
|
parsed_url = urlparse(wikipedia_url)
|
|
|
|
# remove the /wiki/ prefix
|
|
|
|
article_name = parsed_url.path.split('/wiki/')[1]
|
|
|
|
# article_name is already encoded but not the / which is required for the REST API call
|
|
|
|
encoded_article_name = article_name.replace('/', '%2F')
|
|
|
|
path = '/api/rest_v1/page/summary/' + encoded_article_name
|
|
|
|
wikipedia_rest_url = parsed_url._replace(path=path).geturl()
|
2021-01-13 11:31:25 +01:00
|
|
|
try:
|
2023-04-08 09:12:12 +02:00
|
|
|
response = searx.network.get(wikipedia_rest_url, headers=headers, timeout=10)
|
2021-01-13 11:31:25 +01:00
|
|
|
response.raise_for_status()
|
2023-04-08 09:12:12 +02:00
|
|
|
except Exception as e: # pylint: disable=broad-except
|
|
|
|
print(" ", wikipedia_url, e)
|
2021-01-13 11:31:25 +01:00
|
|
|
return None
|
2023-04-08 09:12:12 +02:00
|
|
|
api_result = json.loads(response.text)
|
|
|
|
return api_result.get('extract')
|
2021-01-13 11:31:25 +01:00
|
|
|
|
|
|
|
|
|
|
|
def get_website_description(url, lang1, lang2=None):
|
|
|
|
headers = {
|
2021-09-18 10:58:57 +02:00
|
|
|
'User-Agent': gen_useragent(),
|
2021-01-13 11:31:25 +01:00
|
|
|
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
|
|
|
|
'DNT': '1',
|
|
|
|
'Upgrade-Insecure-Requests': '1',
|
|
|
|
'Sec-GPC': '1',
|
|
|
|
'Cache-Control': 'max-age=0',
|
|
|
|
}
|
|
|
|
if lang1 is not None:
|
|
|
|
lang_list = [lang1]
|
|
|
|
if lang2 is not None:
|
|
|
|
lang_list.append(lang2)
|
|
|
|
headers['Accept-Language'] = f'{",".join(lang_list)};q=0.8'
|
|
|
|
try:
|
[httpx] replace searx.poolrequests by searx.network
settings.yml:
* outgoing.networks:
* can contains network definition
* propertiers: enable_http, verify, http2, max_connections, max_keepalive_connections,
keepalive_expiry, local_addresses, support_ipv4, support_ipv6, proxies, max_redirects, retries
* retries: 0 by default, number of times searx retries to send the HTTP request (using different IP & proxy each time)
* local_addresses can be "192.168.0.1/24" (it supports IPv6)
* support_ipv4 & support_ipv6: both True by default
see https://github.com/searx/searx/pull/1034
* each engine can define a "network" section:
* either a full network description
* either reference an existing network
* all HTTP requests of engine use the same HTTP configuration (it was not the case before, see proxy configuration in master)
2021-04-05 10:43:33 +02:00
|
|
|
response = searx.network.get(url, headers=headers, timeout=10)
|
2021-01-13 11:31:25 +01:00
|
|
|
response.raise_for_status()
|
2022-01-03 12:58:48 +01:00
|
|
|
except Exception: # pylint: disable=broad-except
|
2021-01-13 11:31:25 +01:00
|
|
|
return (None, None)
|
|
|
|
|
|
|
|
try:
|
|
|
|
html = fromstring(response.text)
|
|
|
|
except ValueError:
|
|
|
|
html = fromstring(response.content)
|
|
|
|
|
|
|
|
description = extract_text(html.xpath('/html/head/meta[@name="description"]/@content'))
|
|
|
|
if not description:
|
|
|
|
description = extract_text(html.xpath('/html/head/meta[@property="og:description"]/@content'))
|
|
|
|
if not description:
|
|
|
|
description = extract_text(html.xpath('/html/head/title'))
|
|
|
|
lang = extract_text(html.xpath('/html/@lang'))
|
|
|
|
if lang is None and len(lang1) > 0:
|
|
|
|
lang = lang1
|
|
|
|
lang = detect_language(description) or lang or 'en'
|
|
|
|
lang = lang.split('_')[0]
|
|
|
|
lang = lang.split('-')[0]
|
|
|
|
return (lang, description)
|
|
|
|
|
|
|
|
|
|
|
|
def initialize():
|
2023-04-04 15:17:12 +02:00
|
|
|
global IDS, LANGUAGES_SPARQL
|
2021-01-13 11:31:25 +01:00
|
|
|
searx.search.initialize()
|
2021-09-18 10:58:57 +02:00
|
|
|
wikipedia_engine = searx.engines.engines['wikipedia']
|
2023-04-04 15:17:12 +02:00
|
|
|
|
|
|
|
locale2lang = {'nl-BE': 'nl'}
|
2023-04-08 09:12:12 +02:00
|
|
|
for sxng_ui_lang in LOCALE_NAMES:
|
2023-04-04 15:17:12 +02:00
|
|
|
|
|
|
|
sxng_ui_alias = locale2lang.get(sxng_ui_lang, sxng_ui_lang)
|
|
|
|
wiki_lang = None
|
|
|
|
|
|
|
|
if sxng_ui_alias in wikipedia_engine.traits.custom['WIKIPEDIA_LANGUAGES']:
|
|
|
|
wiki_lang = sxng_ui_alias
|
|
|
|
if not wiki_lang:
|
|
|
|
wiki_lang = wikipedia_engine.traits.get_language(sxng_ui_alias)
|
|
|
|
if not wiki_lang:
|
|
|
|
print(f"WIKIPEDIA_LANGUAGES missing {sxng_ui_lang}")
|
|
|
|
continue
|
|
|
|
WIKIPEDIA_LANGUAGES[sxng_ui_lang] = wiki_lang
|
|
|
|
|
2021-09-18 10:58:57 +02:00
|
|
|
LANGUAGES_SPARQL = ', '.join(f"'{l}'" for l in set(WIKIPEDIA_LANGUAGES.values()))
|
2021-01-13 11:31:25 +01:00
|
|
|
for engine_name, engine in searx.engines.engines.items():
|
|
|
|
descriptions[engine_name] = {}
|
|
|
|
wikidata_id = getattr(engine, "about", {}).get('wikidata_id')
|
|
|
|
if wikidata_id is not None:
|
|
|
|
wd_to_engine_name.setdefault(wikidata_id, set()).add(engine_name)
|
|
|
|
|
|
|
|
IDS = ' '.join(list(map(lambda wd_id: 'wd:' + wd_id, wd_to_engine_name.keys())))
|
|
|
|
|
|
|
|
|
|
|
|
def fetch_wikidata_descriptions():
|
2023-04-04 15:17:12 +02:00
|
|
|
print('Fetching wikidata descriptions')
|
2021-09-18 10:58:57 +02:00
|
|
|
searx.network.set_timeout_for_thread(60)
|
2021-09-19 11:10:02 +02:00
|
|
|
result = wikidata.send_wikidata_query(
|
2021-12-27 09:26:22 +01:00
|
|
|
SPARQL_DESCRIPTION.replace('%IDS%', IDS).replace('%LANGUAGES_SPARQL%', LANGUAGES_SPARQL)
|
2021-09-19 11:10:02 +02:00
|
|
|
)
|
2021-01-13 11:31:25 +01:00
|
|
|
if result is not None:
|
|
|
|
for binding in result['results']['bindings']:
|
|
|
|
wikidata_id = binding['item']['value'].replace('http://www.wikidata.org/entity/', '')
|
2021-09-18 10:58:57 +02:00
|
|
|
wikidata_lang = binding['itemDescription']['xml:lang']
|
2023-04-04 15:17:12 +02:00
|
|
|
desc = binding['itemDescription']['value']
|
2021-09-18 10:58:57 +02:00
|
|
|
for engine_name in wd_to_engine_name[wikidata_id]:
|
2023-04-08 09:12:12 +02:00
|
|
|
for searxng_locale in LOCALE_NAMES:
|
|
|
|
if WIKIPEDIA_LANGUAGES[searxng_locale] != wikidata_lang:
|
2023-04-04 15:17:12 +02:00
|
|
|
continue
|
2023-04-08 09:12:12 +02:00
|
|
|
print(
|
|
|
|
f" engine: {engine_name:20} / wikidata_lang: {wikidata_lang:5}",
|
|
|
|
f"/ len(wikidata_desc): {len(desc)}",
|
|
|
|
)
|
|
|
|
update_description(engine_name, searxng_locale, desc, 'wikidata')
|
2021-01-13 11:31:25 +01:00
|
|
|
|
|
|
|
|
|
|
|
def fetch_wikipedia_descriptions():
|
2023-04-04 15:17:12 +02:00
|
|
|
print('Fetching wikipedia descriptions')
|
2021-09-19 11:10:02 +02:00
|
|
|
result = wikidata.send_wikidata_query(
|
2021-12-27 09:26:22 +01:00
|
|
|
SPARQL_WIKIPEDIA_ARTICLE.replace('%IDS%', IDS).replace('%LANGUAGES_SPARQL%', LANGUAGES_SPARQL)
|
2021-09-19 11:10:02 +02:00
|
|
|
)
|
2021-01-13 11:31:25 +01:00
|
|
|
if result is not None:
|
|
|
|
for binding in result['results']['bindings']:
|
|
|
|
wikidata_id = binding['item']['value'].replace('http://www.wikidata.org/entity/', '')
|
2021-09-18 10:58:57 +02:00
|
|
|
wikidata_lang = binding['name']['xml:lang']
|
2023-04-08 09:12:12 +02:00
|
|
|
wikipedia_url = binding['article']['value'] # for example the URL https://de.wikipedia.org/wiki/PubMed
|
2021-09-18 10:58:57 +02:00
|
|
|
for engine_name in wd_to_engine_name[wikidata_id]:
|
2023-04-08 09:12:12 +02:00
|
|
|
for searxng_locale in LOCALE_NAMES:
|
|
|
|
if WIKIPEDIA_LANGUAGES[searxng_locale] != wikidata_lang:
|
2023-04-04 15:17:12 +02:00
|
|
|
continue
|
2023-04-08 09:12:12 +02:00
|
|
|
desc = get_wikipedia_summary(wikipedia_url, searxng_locale)
|
2023-04-04 15:17:12 +02:00
|
|
|
if not desc:
|
|
|
|
continue
|
2023-04-08 09:12:12 +02:00
|
|
|
print(
|
|
|
|
f" engine: {engine_name:20} / wikidata_lang: {wikidata_lang:5}",
|
|
|
|
f"/ len(wikipedia_desc): {len(desc)}",
|
|
|
|
)
|
|
|
|
update_description(engine_name, searxng_locale, desc, 'wikipedia')
|
2021-01-13 11:31:25 +01:00
|
|
|
|
|
|
|
|
|
|
|
def normalize_url(url):
|
|
|
|
url = url.replace('{language}', 'en')
|
|
|
|
url = urlparse(url)._replace(path='/', params='', query='', fragment='').geturl()
|
|
|
|
url = url.replace('https://api.', 'https://')
|
|
|
|
return url
|
|
|
|
|
|
|
|
|
|
|
|
def fetch_website_description(engine_name, website):
|
2023-04-04 15:17:12 +02:00
|
|
|
print(f"- fetch website descr: {engine_name} / {website}")
|
2021-01-13 11:31:25 +01:00
|
|
|
default_lang, default_description = get_website_description(website, None, None)
|
2023-04-04 15:17:12 +02:00
|
|
|
|
2021-01-13 11:31:25 +01:00
|
|
|
if default_lang is None or default_description is None:
|
2021-09-18 10:58:57 +02:00
|
|
|
# the front page can't be fetched: skip this engine
|
2021-01-13 11:31:25 +01:00
|
|
|
return
|
2021-09-18 10:58:57 +02:00
|
|
|
|
2023-04-04 15:17:12 +02:00
|
|
|
# to specify an order in where the most common languages are in front of the
|
|
|
|
# language list ..
|
2021-09-18 10:58:57 +02:00
|
|
|
languages = ['en', 'es', 'pt', 'ru', 'tr', 'fr']
|
2023-04-08 09:12:12 +02:00
|
|
|
languages = languages + [l for l in LOCALE_NAMES if l not in languages]
|
2021-09-18 10:58:57 +02:00
|
|
|
|
|
|
|
previous_matched_lang = None
|
|
|
|
previous_count = 0
|
2023-04-04 15:17:12 +02:00
|
|
|
|
2021-09-18 10:58:57 +02:00
|
|
|
for lang in languages:
|
2023-04-04 15:17:12 +02:00
|
|
|
|
|
|
|
if lang in descriptions[engine_name]:
|
|
|
|
continue
|
|
|
|
|
|
|
|
fetched_lang, desc = get_website_description(website, lang, WIKIPEDIA_LANGUAGES[lang])
|
|
|
|
if fetched_lang is None or desc is None:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# check if desc changed with the different lang values
|
|
|
|
|
|
|
|
if fetched_lang == previous_matched_lang:
|
|
|
|
previous_count += 1
|
|
|
|
if previous_count == 6:
|
|
|
|
# the website has returned the same description for 6 different languages in Accept-Language header
|
|
|
|
# stop now
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
previous_matched_lang = fetched_lang
|
|
|
|
previous_count = 0
|
|
|
|
|
|
|
|
# Don't trust in the value of fetched_lang, some websites return
|
|
|
|
# for some inappropriate values, by example bing-images::
|
|
|
|
#
|
|
|
|
# requested lang: zh-Hans-CN / fetched lang: ceb / desc: 查看根据您的兴趣量身定制的提要
|
|
|
|
#
|
|
|
|
# The lang ceb is "Cebuano" but the description is given in zh-Hans-CN
|
|
|
|
|
|
|
|
print(
|
|
|
|
f" engine: {engine_name:20} / requested lang:{lang:7}"
|
|
|
|
f" / fetched lang: {fetched_lang:7} / len(desc): {len(desc)}"
|
|
|
|
)
|
|
|
|
|
2023-04-08 09:12:12 +02:00
|
|
|
matched_lang = match_locale(fetched_lang, LOCALE_NAMES.keys(), fallback=lang)
|
2023-04-04 15:17:12 +02:00
|
|
|
update_description(engine_name, matched_lang, desc, website, replace=False)
|
2021-01-13 11:31:25 +01:00
|
|
|
|
|
|
|
|
|
|
|
def fetch_website_descriptions():
|
2023-04-04 15:17:12 +02:00
|
|
|
print('Fetching website descriptions')
|
2021-01-13 11:31:25 +01:00
|
|
|
for engine_name, engine in searx.engines.engines.items():
|
|
|
|
website = getattr(engine, "about", {}).get('website')
|
2021-09-18 10:58:57 +02:00
|
|
|
if website is None and hasattr(engine, "search_url"):
|
2021-01-13 11:31:25 +01:00
|
|
|
website = normalize_url(getattr(engine, "search_url"))
|
2021-09-18 10:58:57 +02:00
|
|
|
if website is None and hasattr(engine, "base_url"):
|
2021-01-13 11:31:25 +01:00
|
|
|
website = normalize_url(getattr(engine, "base_url"))
|
|
|
|
if website is not None:
|
|
|
|
fetch_website_description(engine_name, website)
|
|
|
|
|
|
|
|
|
2021-09-18 10:58:57 +02:00
|
|
|
def get_engine_descriptions_filename():
|
|
|
|
return join(join(searx_dir, "data"), "engine_descriptions.json")
|
|
|
|
|
|
|
|
|
|
|
|
def get_output():
|
|
|
|
"""
|
|
|
|
From descriptions[engine][language] = [description, source]
|
|
|
|
To
|
|
|
|
|
|
|
|
* output[language][engine] = description_and_source
|
|
|
|
* description_and_source can be:
|
|
|
|
* [description, source]
|
|
|
|
* description (if source = "wikipedia")
|
|
|
|
* [f"engine:lang", "ref"] (reference to another existing description)
|
|
|
|
"""
|
2021-12-27 09:26:22 +01:00
|
|
|
output = {locale: {} for locale in LOCALE_NAMES}
|
2021-09-18 10:58:57 +02:00
|
|
|
|
|
|
|
seen_descriptions = {}
|
|
|
|
|
|
|
|
for engine_name, lang_descriptions in descriptions.items():
|
|
|
|
for language, description in lang_descriptions.items():
|
|
|
|
if description[0] in seen_descriptions:
|
|
|
|
ref = seen_descriptions[description[0]]
|
|
|
|
description = [f'{ref[0]}:{ref[1]}', 'ref']
|
|
|
|
else:
|
|
|
|
seen_descriptions[description[0]] = (engine_name, language)
|
|
|
|
if description[1] == 'wikipedia':
|
|
|
|
description = description[0]
|
|
|
|
output.setdefault(language, {}).setdefault(engine_name, description)
|
|
|
|
|
|
|
|
return output
|
|
|
|
|
|
|
|
|
2021-01-13 11:31:25 +01:00
|
|
|
def main():
|
|
|
|
initialize()
|
|
|
|
fetch_wikidata_descriptions()
|
|
|
|
fetch_wikipedia_descriptions()
|
|
|
|
fetch_website_descriptions()
|
|
|
|
|
2021-09-18 10:58:57 +02:00
|
|
|
output = get_output()
|
|
|
|
with open(get_engine_descriptions_filename(), 'w', encoding='utf8') as f:
|
|
|
|
f.write(json.dumps(output, indent=1, separators=(',', ':'), ensure_ascii=False))
|
2021-01-13 11:31:25 +01:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|