1
0
mirror of https://github.com/searxng/searxng.git synced 2024-11-05 12:50:11 +01:00
searxng/searx/engines/google_images.py

69 lines
1.8 KiB
Python
Raw Normal View History

2014-09-01 15:10:05 +02:00
## Google (Images)
#
2014-09-01 15:10:05 +02:00
# @website https://www.google.com
# @provide-api yes (https://developers.google.com/web-search/docs/),
# deprecated!
#
2014-09-01 15:10:05 +02:00
# @using-api yes
# @results JSON
# @stable yes (but deprecated)
# @parse url, title, img_src
2013-10-19 22:19:14 +02:00
2014-12-16 17:26:16 +01:00
from urllib import urlencode, unquote
2013-10-19 23:12:18 +02:00
from json import loads
2013-10-19 22:19:14 +02:00
2014-09-01 15:10:05 +02:00
# engine dependent config
2013-10-19 22:19:31 +02:00
categories = ['images']
2014-09-01 15:10:05 +02:00
paging = True
2015-02-08 22:15:25 +01:00
safesearch = True
2013-10-19 22:19:14 +02:00
2014-09-01 15:10:05 +02:00
# search-url
2013-10-23 23:55:37 +02:00
url = 'https://ajax.googleapis.com/'
2015-02-08 22:15:25 +01:00
search_url = url + 'ajax/services/search/images?v=1.0&start={offset}&rsz=large&safe={safesearch}&filter=off&{query}'
2014-01-20 02:31:20 +01:00
2013-10-19 22:19:14 +02:00
2014-09-01 15:10:05 +02:00
# do search-request
2013-10-19 22:19:14 +02:00
def request(query, params):
2014-01-30 01:21:33 +01:00
offset = (params['pageno'] - 1) * 8
2014-09-01 15:10:05 +02:00
2015-02-08 22:15:25 +01:00
if params['safesearch'] == 2:
safesearch = 'on'
else:
safesearch = 'off'
2014-01-30 01:21:33 +01:00
params['url'] = search_url.format(query=urlencode({'q': query}),
2015-02-08 22:15:25 +01:00
offset=offset,
safesearch=safesearch)
2014-09-01 15:10:05 +02:00
2013-10-19 22:19:14 +02:00
return params
2014-01-20 02:31:20 +01:00
2014-09-01 15:10:05 +02:00
# get response from search-request
2013-10-19 22:19:14 +02:00
def response(resp):
results = []
2014-09-01 15:10:05 +02:00
2013-10-19 23:12:18 +02:00
search_res = loads(resp.text)
2014-09-01 15:10:05 +02:00
# return empty array if there are no results
if not search_res.get('responseData', {}).get('results'):
2013-10-20 19:45:13 +02:00
return []
2014-09-01 15:10:05 +02:00
# parse results
2013-10-19 23:12:18 +02:00
for result in search_res['responseData']['results']:
2013-10-23 23:55:37 +02:00
href = result['originalContextUrl']
2013-10-19 23:12:18 +02:00
title = result['title']
2015-01-31 16:16:30 +01:00
if 'url' not in result:
2013-10-22 23:35:17 +02:00
continue
thumbnail_src = result['tbUrl']
2014-09-01 15:10:05 +02:00
# append result
2014-01-20 02:31:20 +01:00
results.append({'url': href,
'title': title,
2015-01-31 16:16:30 +01:00
'content': result['content'],
'thumbnail_src': thumbnail_src,
'img_src': unquote(result['url']),
2014-01-20 02:31:20 +01:00
'template': 'images.html'})
2014-09-01 15:10:05 +02:00
# return results
2013-10-19 22:19:14 +02:00
return results