1
0
mirror of https://github.com/searxng/searxng.git synced 2024-11-23 04:27:46 +01:00
searxng/searx/engines/microsoft_academic.py

78 lines
1.8 KiB
Python
Raw Normal View History

# SPDX-License-Identifier: AGPL-3.0-or-later
2018-02-17 21:36:34 +01:00
"""
Microsoft Academic (Science)
2018-02-17 21:36:34 +01:00
"""
2021-03-15 20:21:28 +01:00
from json import dumps, loads
2018-02-17 21:36:34 +01:00
from searx.utils import html_to_text
# about
about = {
"website": 'https://academic.microsoft.com',
"wikidata_id": 'Q28136779',
"official_api_documentation": 'http://ma-graph.org/',
"use_official_api": False,
"require_api_key": False,
"results": 'JSON',
}
2018-02-17 21:36:34 +01:00
categories = ['images']
paging = True
2021-03-15 20:21:28 +01:00
search_url = 'https://academic.microsoft.com/api/search'
_paper_url = 'https://academic.microsoft.com/paper/{id}/reference'
2018-02-17 21:36:34 +01:00
def request(query, params):
2021-03-15 20:21:28 +01:00
params['url'] = search_url
2018-02-17 21:36:34 +01:00
params['method'] = 'POST'
2021-03-15 20:21:28 +01:00
params['headers']['content-type'] = 'application/json; charset=utf-8'
params['data'] = dumps(
{
'query': query,
'queryExpression': '',
'filters': [],
'orderBy': 0,
'skip': (params['pageno'] - 1) * 10,
'sortAscending': True,
'take': 10,
'includeCitationContexts': False,
'profileId': '',
}
)
2018-02-17 21:36:34 +01:00
return params
def response(resp):
results = []
response_data = loads(resp.text)
2020-01-02 22:29:55 +01:00
if not response_data:
return results
2018-02-17 21:36:34 +01:00
for result in response_data.get('pr', {}):
2021-03-15 20:21:28 +01:00
if 'dn' not in result['paper']:
continue
title = result['paper']['dn']
content = _get_content(result['paper'])
url = _paper_url.format(id=result['paper']['id'])
results.append(
{
'url': url,
'title': html_to_text(title),
'content': html_to_text(content),
}
)
2018-02-17 21:36:34 +01:00
return results
def _get_content(result):
2021-03-15 20:21:28 +01:00
if 'd' in result:
content = result['d']
2018-02-17 21:36:34 +01:00
if len(content) > 300:
return content[:300] + '...'
return content
return ''