1
0
mirror of https://github.com/searxng/searxng.git synced 2024-11-22 12:10:11 +01:00

Remove trailing whitespace

This commit is contained in:
stepshal 2016-07-08 23:43:28 +07:00
parent fbbb307f2e
commit 2f83750619
2 changed files with 25 additions and 26 deletions

View File

@ -22,4 +22,3 @@ def response(resp):
resp: requests response object resp: requests response object
''' '''
return [{'url': '', 'title': '', 'content': ''}] return [{'url': '', 'title': '', 'content': ''}]

View File

@ -5,17 +5,17 @@ import unicodedata
import string import string
from urllib import urlencode from urllib import urlencode
from requests import get from requests import get
languages = {'de', 'en', 'es', 'fr', 'hu', 'it', 'nl', 'jp'} languages = {'de', 'en', 'es', 'fr', 'hu', 'it', 'nl', 'jp'}
url_template = 'https://www.wikidata.org/w/api.php?action=wbgetentities&format=json&{query}&props=labels%7Cdatatype%7Cclaims%7Caliases&languages=' + '|'.join(languages) url_template = 'https://www.wikidata.org/w/api.php?action=wbgetentities&format=json&{query}&props=labels%7Cdatatype%7Cclaims%7Caliases&languages=' + '|'.join(languages)
url_wmflabs_template = 'http://wdq.wmflabs.org/api?q=' url_wmflabs_template = 'http://wdq.wmflabs.org/api?q='
url_wikidata_search_template='http://www.wikidata.org/w/api.php?action=query&list=search&format=json&srnamespace=0&srprop=sectiontitle&{query}' url_wikidata_search_template='http://www.wikidata.org/w/api.php?action=query&list=search&format=json&srnamespace=0&srprop=sectiontitle&{query}'
wmflabs_queries = [ wmflabs_queries = [
'CLAIM[31:8142]', # all devise 'CLAIM[31:8142]', # all devise
] ]
db = { db = {
'iso4217' : { 'iso4217' : {
}, },
@ -26,7 +26,7 @@ db = {
def remove_accents(data): def remove_accents(data):
return unicodedata.normalize('NFKD', data).lower() return unicodedata.normalize('NFKD', data).lower()
def normalize_name(name): def normalize_name(name):
return re.sub(' +',' ', remove_accents(name.lower()).replace('-', ' ')) return re.sub(' +',' ', remove_accents(name.lower()).replace('-', ' '))
@ -66,22 +66,22 @@ def get_property_value(data, name):
prop = data.get('claims', {}).get(name, {}) prop = data.get('claims', {}).get(name, {})
if len(prop) == 0: if len(prop) == 0:
return None return None
value = prop[0].get('mainsnak', {}).get('datavalue', {}).get('value', '') value = prop[0].get('mainsnak', {}).get('datavalue', {}).get('value', '')
if value == '': if value == '':
return None return None
return value return value
def parse_currency(data): def parse_currency(data):
iso4217 = get_property_value(data, 'P498') iso4217 = get_property_value(data, 'P498')
if iso4217 is not None: if iso4217 is not None:
unit = get_property_value(data, 'P558') unit = get_property_value(data, 'P558')
if unit is not None: if unit is not None:
add_currency_name(unit, iso4217) add_currency_name(unit, iso4217)
labels = data.get('labels', {}) labels = data.get('labels', {})
for language in languages: for language in languages:
name = labels.get(language, {}).get('value', None) name = labels.get(language, {}).get('value', None)
@ -95,22 +95,22 @@ def parse_currency(data):
alias = aliases[language][i].get('value', None) alias = aliases[language][i].get('value', None)
add_currency_name(alias, iso4217) add_currency_name(alias, iso4217)
def fetch_data(wikidata_ids): def fetch_data(wikidata_ids):
url = url_template.format(query=urlencode({'ids' : '|'.join(wikidata_ids)})) url = url_template.format(query=urlencode({'ids' : '|'.join(wikidata_ids)}))
htmlresponse = get(url) htmlresponse = get(url)
jsonresponse = json.loads(htmlresponse.content) jsonresponse = json.loads(htmlresponse.content)
entities = jsonresponse.get('entities', {}) entities = jsonresponse.get('entities', {})
for pname in entities: for pname in entities:
pvalue = entities.get(pname) pvalue = entities.get(pname)
parse_currency(pvalue) parse_currency(pvalue)
def add_q(i): def add_q(i):
return "Q" + str(i) return "Q" + str(i)
def fetch_data_batch(wikidata_ids): def fetch_data_batch(wikidata_ids):
while len(wikidata_ids) > 0: while len(wikidata_ids) > 0:
if len(wikidata_ids) > 50: if len(wikidata_ids) > 50:
@ -119,8 +119,8 @@ def fetch_data_batch(wikidata_ids):
else: else:
fetch_data(wikidata_ids) fetch_data(wikidata_ids)
wikidata_ids = [] wikidata_ids = []
def wdq_query(query): def wdq_query(query):
url = url_wmflabs_template + query url = url_wmflabs_template + query
htmlresponse = get(url) htmlresponse = get(url)
@ -131,23 +131,23 @@ def wdq_query(query):
print "error for query '" + query + "' :" + error print "error for query '" + query + "' :" + error
fetch_data_batch(qlist) fetch_data_batch(qlist)
def wd_query(query, offset=0): def wd_query(query, offset=0):
qlist = [] qlist = []
url = url_wikidata_search_template.format(query=urlencode({'srsearch': query, 'srlimit': 50, 'sroffset': offset})) url = url_wikidata_search_template.format(query=urlencode({'srsearch': query, 'srlimit': 50, 'sroffset': offset}))
htmlresponse = get(url) htmlresponse = get(url)
jsonresponse = json.loads(htmlresponse.content) jsonresponse = json.loads(htmlresponse.content)
for r in jsonresponse.get('query', {}).get('search', {}): for r in jsonresponse.get('query', {}).get('search', {}):
qlist.append(r.get('title', '')) qlist.append(r.get('title', ''))
fetch_data_batch(qlist) fetch_data_batch(qlist)
## fetch ## ## fetch ##
for q in wmflabs_queries: for q in wmflabs_queries:
wdq_query(q) wdq_query(q)
# static # static
add_currency_name(u"euro", 'EUR') add_currency_name(u"euro", 'EUR')
add_currency_name(u"euros", 'EUR') add_currency_name(u"euros", 'EUR')
add_currency_name(u"dollar", 'USD') add_currency_name(u"dollar", 'USD')