2021-01-13 11:31:25 +01:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
2021-12-17 12:24:28 +01:00
|
|
|
# lint: pylint
|
|
|
|
"""Bing (News)
|
2015-05-02 15:45:17 +02:00
|
|
|
"""
|
2014-09-01 14:38:59 +02:00
|
|
|
|
2021-12-17 12:24:28 +01:00
|
|
|
from urllib.parse import (
|
|
|
|
urlencode,
|
|
|
|
urlparse,
|
|
|
|
parse_qsl,
|
|
|
|
quote,
|
|
|
|
)
|
2015-06-04 18:30:08 +02:00
|
|
|
from datetime import datetime
|
2014-09-01 14:38:59 +02:00
|
|
|
from dateutil import parser
|
2015-06-04 18:30:08 +02:00
|
|
|
from lxml import etree
|
2020-11-26 15:38:07 +01:00
|
|
|
from lxml.etree import XPath
|
2021-12-27 09:26:22 +01:00
|
|
|
from searx.utils import match_language, eval_xpath_getindex
|
2021-12-17 12:24:28 +01:00
|
|
|
from searx.engines.bing import ( # pylint: disable=unused-import
|
|
|
|
language_aliases,
|
|
|
|
_fetch_supported_languages,
|
2022-10-03 10:06:17 +02:00
|
|
|
fetch_traits,
|
2021-12-17 12:24:28 +01:00
|
|
|
supported_languages_url,
|
|
|
|
)
|
2020-03-01 08:01:36 +01:00
|
|
|
|
2021-01-13 11:31:25 +01:00
|
|
|
# about
|
|
|
|
about = {
|
|
|
|
"website": 'https://www.bing.com/news',
|
|
|
|
"wikidata_id": 'Q2878637',
|
|
|
|
"official_api_documentation": 'https://www.microsoft.com/en-us/bing/apis/bing-news-search-api',
|
|
|
|
"use_official_api": False,
|
|
|
|
"require_api_key": False,
|
|
|
|
"results": 'RSS',
|
|
|
|
}
|
|
|
|
|
2014-09-01 14:38:59 +02:00
|
|
|
# engine dependent config
|
2014-03-04 13:10:04 +01:00
|
|
|
categories = ['news']
|
|
|
|
paging = True
|
2016-10-30 18:14:42 +01:00
|
|
|
time_range_support = True
|
2022-08-01 17:01:59 +02:00
|
|
|
send_accept_language_header = True
|
2014-03-04 13:10:04 +01:00
|
|
|
|
2014-09-01 14:38:59 +02:00
|
|
|
# search-url
|
|
|
|
base_url = 'https://www.bing.com/'
|
2015-06-04 18:30:08 +02:00
|
|
|
search_string = 'news/search?{query}&first={offset}&format=RSS'
|
2016-10-30 18:14:42 +01:00
|
|
|
search_string_with_time = 'news/search?{query}&first={offset}&qft=interval%3d"{interval}"&format=RSS'
|
2021-12-27 09:26:22 +01:00
|
|
|
time_range_dict = {'day': '7', 'week': '8', 'month': '9'}
|
|
|
|
|
2015-06-04 18:30:08 +02:00
|
|
|
|
|
|
|
def url_cleanup(url_string):
|
2021-12-17 12:24:28 +01:00
|
|
|
"""remove click"""
|
|
|
|
|
2015-06-04 18:30:08 +02:00
|
|
|
parsed_url = urlparse(url_string)
|
|
|
|
if parsed_url.netloc == 'www.bing.com' and parsed_url.path == '/news/apiclick.aspx':
|
|
|
|
query = dict(parse_qsl(parsed_url.query))
|
2021-12-17 12:24:28 +01:00
|
|
|
url_string = query.get('url', None)
|
2015-06-04 18:30:08 +02:00
|
|
|
return url_string
|
|
|
|
|
2021-12-27 09:26:22 +01:00
|
|
|
|
2015-06-04 18:30:08 +02:00
|
|
|
def image_url_cleanup(url_string):
|
2021-12-17 12:24:28 +01:00
|
|
|
"""replace the http://*bing.com/th?id=... by https://www.bing.com/th?id=..."""
|
|
|
|
|
2015-06-04 18:30:08 +02:00
|
|
|
parsed_url = urlparse(url_string)
|
2021-12-17 12:24:28 +01:00
|
|
|
if parsed_url.netloc.endswith('bing.com') and parsed_url.path == '/th':
|
2015-06-04 18:30:08 +02:00
|
|
|
query = dict(parse_qsl(parsed_url.query))
|
2021-12-17 12:24:28 +01:00
|
|
|
url_string = "https://www.bing.com/th?id=" + quote(query.get('id'))
|
2015-06-04 18:30:08 +02:00
|
|
|
return url_string
|
2014-03-04 13:10:04 +01:00
|
|
|
|
2021-12-27 09:26:22 +01:00
|
|
|
|
2016-10-30 18:14:42 +01:00
|
|
|
def _get_url(query, language, offset, time_range):
|
|
|
|
if time_range in time_range_dict:
|
|
|
|
search_path = search_string_with_time.format(
|
2021-12-27 09:16:03 +01:00
|
|
|
# fmt: off
|
2021-12-17 12:24:28 +01:00
|
|
|
query = urlencode({
|
|
|
|
'q': query,
|
|
|
|
'setmkt': language
|
|
|
|
}),
|
|
|
|
offset = offset,
|
|
|
|
interval = time_range_dict[time_range]
|
2021-12-27 09:16:03 +01:00
|
|
|
# fmt: on
|
2021-12-17 12:24:28 +01:00
|
|
|
)
|
2016-10-30 18:14:42 +01:00
|
|
|
else:
|
2020-02-25 18:44:28 +01:00
|
|
|
# e.g. setmkt=de-de&setlang=de
|
2016-10-30 18:14:42 +01:00
|
|
|
search_path = search_string.format(
|
2021-12-27 09:16:03 +01:00
|
|
|
# fmt: off
|
2021-12-17 12:24:28 +01:00
|
|
|
query = urlencode({
|
|
|
|
'q': query,
|
|
|
|
'setmkt': language
|
|
|
|
}),
|
|
|
|
offset = offset
|
2021-12-27 09:16:03 +01:00
|
|
|
# fmt: on
|
2021-12-17 12:24:28 +01:00
|
|
|
)
|
2016-10-30 18:14:42 +01:00
|
|
|
return base_url + search_path
|
|
|
|
|
2021-12-27 09:26:22 +01:00
|
|
|
|
2014-03-04 13:10:04 +01:00
|
|
|
def request(query, params):
|
2021-12-17 12:24:28 +01:00
|
|
|
|
2016-12-11 16:41:14 +01:00
|
|
|
if params['time_range'] and params['time_range'] not in time_range_dict:
|
|
|
|
return params
|
|
|
|
|
2014-03-04 13:10:04 +01:00
|
|
|
offset = (params['pageno'] - 1) * 10 + 1
|
2019-01-06 15:27:46 +01:00
|
|
|
if params['language'] == 'all':
|
|
|
|
language = 'en-US'
|
|
|
|
else:
|
|
|
|
language = match_language(params['language'], supported_languages, language_aliases)
|
2016-10-30 18:14:42 +01:00
|
|
|
params['url'] = _get_url(query, language, offset, params['time_range'])
|
2015-01-29 20:56:57 +01:00
|
|
|
|
2014-03-04 13:10:04 +01:00
|
|
|
return params
|
|
|
|
|
2021-12-27 09:26:22 +01:00
|
|
|
|
2014-03-04 13:10:04 +01:00
|
|
|
def response(resp):
|
2014-09-01 14:38:59 +02:00
|
|
|
|
2021-12-17 12:24:28 +01:00
|
|
|
results = []
|
2017-05-22 15:36:52 +02:00
|
|
|
rss = etree.fromstring(resp.content)
|
2021-12-17 12:24:28 +01:00
|
|
|
namespaces = rss.nsmap
|
2015-06-04 18:30:08 +02:00
|
|
|
|
|
|
|
for item in rss.xpath('./channel/item'):
|
|
|
|
# url / title / content
|
2020-11-26 15:38:07 +01:00
|
|
|
url = url_cleanup(eval_xpath_getindex(item, './link/text()', 0, default=None))
|
|
|
|
title = eval_xpath_getindex(item, './title/text()', 0, default=url)
|
|
|
|
content = eval_xpath_getindex(item, './description/text()', 0, default='')
|
2015-06-04 18:30:08 +02:00
|
|
|
|
|
|
|
# publishedDate
|
2020-11-26 15:38:07 +01:00
|
|
|
publishedDate = eval_xpath_getindex(item, './pubDate/text()', 0, default=None)
|
2015-06-04 18:30:08 +02:00
|
|
|
try:
|
|
|
|
publishedDate = parser.parse(publishedDate, dayfirst=False)
|
|
|
|
except TypeError:
|
|
|
|
publishedDate = datetime.now()
|
|
|
|
except ValueError:
|
|
|
|
publishedDate = datetime.now()
|
|
|
|
|
|
|
|
# thumbnail
|
2021-12-27 09:26:22 +01:00
|
|
|
thumbnail = eval_xpath_getindex(item, XPath('./News:Image/text()', namespaces=namespaces), 0, default=None)
|
2015-06-04 18:30:08 +02:00
|
|
|
if thumbnail is not None:
|
|
|
|
thumbnail = image_url_cleanup(thumbnail)
|
2014-12-07 16:37:56 +01:00
|
|
|
|
2014-09-01 14:38:59 +02:00
|
|
|
# append result
|
2015-06-04 18:30:08 +02:00
|
|
|
if thumbnail is not None:
|
2021-12-27 09:26:22 +01:00
|
|
|
results.append(
|
|
|
|
{'url': url, 'title': title, 'publishedDate': publishedDate, 'content': content, 'img_src': thumbnail}
|
|
|
|
)
|
2015-06-04 18:30:08 +02:00
|
|
|
else:
|
2021-12-27 09:26:22 +01:00
|
|
|
results.append({'url': url, 'title': title, 'publishedDate': publishedDate, 'content': content})
|
2014-09-01 14:38:59 +02:00
|
|
|
|
2014-03-04 13:10:04 +01:00
|
|
|
return results
|