2013-10-15 19:11:43 +02:00
|
|
|
from json import loads
|
2013-10-23 23:55:37 +02:00
|
|
|
from urllib import urlencode
|
2013-11-09 18:39:20 +01:00
|
|
|
from searx.utils import html_to_text
|
2013-10-14 23:09:13 +02:00
|
|
|
|
2013-10-23 23:55:37 +02:00
|
|
|
url = 'https://duckduckgo.com/'
|
2014-01-30 01:03:19 +01:00
|
|
|
search_url = url + 'd.js?{query}&p=1&s={offset}'
|
2014-01-05 13:58:17 +01:00
|
|
|
locale = 'us-en'
|
2013-10-14 23:09:13 +02:00
|
|
|
|
2014-01-30 01:03:19 +01:00
|
|
|
paging = True
|
|
|
|
|
2014-01-20 02:31:20 +01:00
|
|
|
|
2013-10-14 23:09:13 +02:00
|
|
|
def request(query, params):
|
2014-01-30 01:03:19 +01:00
|
|
|
offset = (params['pageno'] - 1) * 30
|
2014-01-20 02:31:20 +01:00
|
|
|
q = urlencode({'q': query,
|
|
|
|
'l': locale})
|
2014-01-30 01:03:19 +01:00
|
|
|
params['url'] = search_url.format(query=q, offset=offset)
|
2013-10-14 23:09:13 +02:00
|
|
|
return params
|
|
|
|
|
|
|
|
|
|
|
|
def response(resp):
|
2013-10-15 19:11:43 +02:00
|
|
|
results = []
|
|
|
|
search_res = loads(resp.text[resp.text.find('[{'):-2])[:-1]
|
|
|
|
for r in search_res:
|
|
|
|
if not r.get('t'):
|
|
|
|
continue
|
2014-01-20 02:31:20 +01:00
|
|
|
results.append({'title': r['t'],
|
|
|
|
'content': html_to_text(r['a']),
|
|
|
|
'url': r['u']})
|
2013-10-15 19:11:43 +02:00
|
|
|
return results
|