1
0
mirror of https://github.com/searxng/searxng.git synced 2024-11-16 17:40:11 +01:00
searxng/searx/engines/mediawiki.py

91 lines
2.4 KiB
Python
Raw Normal View History

"""
general mediawiki-engine (Web)
@website websites built on mediawiki (https://www.mediawiki.org)
@provide-api yes (http://www.mediawiki.org/wiki/API:Search)
@using-api yes
@results JSON
@stable yes
@parse url, title
@todo content
"""
2013-10-23 23:53:27 +02:00
from json import loads
from string import Formatter
2016-11-30 18:43:03 +01:00
from searx.url_utils import urlencode, quote
2013-10-23 23:53:27 +02:00
# engine dependent config
categories = ['general']
language_support = True
paging = True
number_of_results = 1
2017-05-18 22:19:44 +02:00
search_type = 'nearmatch' # possible values: title, text, nearmatch
# search-url
base_url = 'https://{language}.wikipedia.org/'
search_postfix = 'w/api.php?action=query'\
'&list=search'\
'&{query}'\
'&format=json'\
'&sroffset={offset}'\
'&srlimit={limit}'\
'&srwhat={searchtype}'
2013-10-23 23:53:27 +02:00
2014-01-20 02:31:20 +01:00
# do search-request
def request(query, params):
offset = (params['pageno'] - 1) * number_of_results
2014-12-16 17:10:20 +01:00
string_args = dict(query=urlencode({'srsearch': query}),
2014-12-16 17:10:20 +01:00
offset=offset,
limit=number_of_results,
searchtype=search_type)
2014-12-16 17:10:20 +01:00
2014-09-04 23:53:13 +02:00
format_strings = list(Formatter().parse(base_url))
2013-10-23 23:53:27 +02:00
if params['language'] == 'all':
language = 'en'
else:
language = params['language'].split('-')[0]
# format_string [('https://', 'language', '', None), ('.wikipedia.org/', None, None, None)]
if any(x[1] == 'language' for x in format_strings):
string_args['language'] = language
# write search-language back to params, required in response
params['language'] = language
2014-01-20 02:31:20 +01:00
search_url = base_url + search_postfix
params['url'] = search_url.format(**string_args)
2013-10-23 23:53:27 +02:00
return params
# get response from search-request
2013-10-23 23:53:27 +02:00
def response(resp):
results = []
2013-10-23 23:53:27 +02:00
search_results = loads(resp.text)
# return empty array if there are no results
if not search_results.get('query', {}).get('search'):
return []
# parse results
for result in search_results['query']['search']:
if result.get('snippet', '').startswith('#REDIRECT'):
continue
2014-12-16 17:10:20 +01:00
url = base_url.format(language=resp.search_params['language']) +\
'wiki/' + quote(result['title'].replace(' ', '_').encode('utf-8'))
# append result
results.append({'url': url,
'title': result['title'],
'content': ''})
# return results
return results