mirror of
https://github.com/searxng/searxng.git
synced 2024-11-05 12:50:11 +01:00
Merge pull request #90 from pointhi/engines
using generic mediawiki engines
This commit is contained in:
commit
c38a743c6f
@ -1,22 +1,67 @@
|
||||
## general mediawiki-engine (Web)
|
||||
#
|
||||
# @website websites built on mediawiki (https://www.mediawiki.org)
|
||||
# @provide-api yes (http://www.mediawiki.org/wiki/API:Search)
|
||||
#
|
||||
# @using-api yes
|
||||
# @results JSON
|
||||
# @stable yes
|
||||
# @parse url, title
|
||||
#
|
||||
# @todo content
|
||||
|
||||
from json import loads
|
||||
from urllib import urlencode, quote
|
||||
|
||||
url = 'https://en.wikipedia.org/'
|
||||
|
||||
search_url = url + 'w/api.php?action=query&list=search&{query}&srprop=timestamp&format=json&sroffset={offset}' # noqa
|
||||
|
||||
number_of_results = 10
|
||||
# engine dependent config
|
||||
categories = ['general']
|
||||
language_support = True
|
||||
paging = True
|
||||
number_of_results = 1
|
||||
|
||||
# search-url
|
||||
base_url = 'https://{language}.wikipedia.org/'
|
||||
search_url = base_url + 'w/api.php?action=query&list=search&{query}&srprop=timestamp&format=json&sroffset={offset}&srlimit={limit}' # noqa
|
||||
|
||||
|
||||
# do search-request
|
||||
def request(query, params):
|
||||
offset = (params['pageno'] - 1) * 10
|
||||
offset = (params['pageno'] - 1) * number_of_results
|
||||
|
||||
if params['language'] == 'all':
|
||||
language = 'en'
|
||||
else:
|
||||
language = params['language'].split('_')[0]
|
||||
|
||||
# write search-language back to params, required in response
|
||||
params['language'] = language
|
||||
|
||||
params['url'] = search_url.format(query=urlencode({'srsearch': query}),
|
||||
offset=offset)
|
||||
offset=offset,
|
||||
limit=number_of_results,
|
||||
language=language)
|
||||
|
||||
return params
|
||||
|
||||
|
||||
# get response from search-request
|
||||
def response(resp):
|
||||
results = []
|
||||
|
||||
search_results = loads(resp.text)
|
||||
res = search_results.get('query', {}).get('search', [])
|
||||
return [{'url': url + 'wiki/' + quote(result['title'].replace(' ', '_').encode('utf-8')), # noqa
|
||||
'title': result['title']} for result in res[:int(number_of_results)]]
|
||||
|
||||
# return empty array if there are no results
|
||||
if not search_results.get('query', {}).get('search'):
|
||||
return []
|
||||
|
||||
# parse results
|
||||
for result in search_results['query']['search']:
|
||||
url = base_url.format(language=resp.search_params['language']) + 'wiki/' + quote(result['title'].replace(' ', '_').encode('utf-8'))
|
||||
|
||||
# append result
|
||||
results.append({'url': url,
|
||||
'title': result['title'],
|
||||
'content': ''})
|
||||
|
||||
# return results
|
||||
return results
|
||||
|
@ -1,67 +0,0 @@
|
||||
## Wikipedia (Web)
|
||||
#
|
||||
# @website http://www.wikipedia.org
|
||||
# @provide-api yes (http://www.mediawiki.org/wiki/API:Search)
|
||||
#
|
||||
# @using-api yes
|
||||
# @results JSON
|
||||
# @stable yes
|
||||
# @parse url, title
|
||||
#
|
||||
# @todo content
|
||||
|
||||
from json import loads
|
||||
from urllib import urlencode, quote
|
||||
|
||||
# engine dependent config
|
||||
categories = ['general']
|
||||
language_support = True
|
||||
paging = True
|
||||
number_of_results = 1
|
||||
|
||||
# search-url
|
||||
url = 'https://{language}.wikipedia.org/'
|
||||
search_url = url + 'w/api.php?action=query&list=search&{query}&srprop=timestamp&format=json&sroffset={offset}&srlimit={limit}' # noqa
|
||||
|
||||
|
||||
# do search-request
|
||||
def request(query, params):
|
||||
offset = (params['pageno'] - 1) * number_of_results
|
||||
|
||||
if params['language'] == 'all':
|
||||
language = 'en'
|
||||
else:
|
||||
language = params['language'].split('_')[0]
|
||||
|
||||
# write search-language back to params, required in response
|
||||
params['language'] = language
|
||||
|
||||
params['url'] = search_url.format(query=urlencode({'srsearch': query}),
|
||||
offset=offset,
|
||||
limit=number_of_results,
|
||||
language=language)
|
||||
|
||||
return params
|
||||
|
||||
|
||||
# get response from search-request
|
||||
def response(resp):
|
||||
results = []
|
||||
|
||||
search_results = loads(resp.text)
|
||||
|
||||
# return empty array if there are no results
|
||||
if not search_results.get('query', {}).get('search'):
|
||||
return []
|
||||
|
||||
# parse results
|
||||
for result in search_results['query']['search']:
|
||||
res_url = url.format(language=resp.search_params['language']) + 'wiki/' + quote(result['title'].replace(' ', '_').encode('utf-8'))
|
||||
|
||||
# append result
|
||||
results.append({'url': res_url,
|
||||
'title': result['title'],
|
||||
'content': ''})
|
||||
|
||||
# return results
|
||||
return results
|
@ -10,9 +10,10 @@ server:
|
||||
|
||||
engines:
|
||||
- name : wikipedia
|
||||
engine : wikipedia
|
||||
engine : mediawiki
|
||||
shortcut : wp
|
||||
# number_of_results : 1 # default is 1
|
||||
base_url : 'https://{language}.wikipedia.org/'
|
||||
number_of_results : 1
|
||||
|
||||
- name : bing
|
||||
engine : bing
|
||||
@ -108,9 +109,9 @@ engines:
|
||||
# maybe in a fun category
|
||||
# - name : uncyclopedia
|
||||
# engine : mediawiki
|
||||
# categories : general
|
||||
# shortcut : unc
|
||||
# url : https://uncyclopedia.wikia.com/
|
||||
# base_url : https://uncyclopedia.wikia.com/
|
||||
# number_of_results : 5
|
||||
|
||||
# tmp suspended - too slow, too many errors
|
||||
# - name : urbandictionary
|
||||
|
Loading…
Reference in New Issue
Block a user