1
0
Fork 0

Compare commits

...

5 Commits

Author SHA1 Message Date
Markus Heiser 8762863ebf [mod] presearch: set WEB timeout to 4sec & single network for all request
timeout: 4.0
  The timeout of presearch-WEB is left up from the default of 3sec to 4sec.  The
  engine has to send two HTTP requests, they often exceed the default timeout of
  3sec. Since all other presearch categories (images, videos, news) also have a
  timeout of 4 sec, the WEB search should also have the same timeout.

network: presearch
  Place all HTTP requests in the same network, named ``presearch``.

Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
2024-01-15 19:23:26 +01:00
Markus Heiser e560d7e373 [mod] presearch: add language & region support
In Presearch there are languages for the UI and regions for narrowing down the
search.  With this change the SearXNG engine supports a search by region.  The
details can be found in the documentation of the source code.

To test, you can search terms like::

   !presearch bmw :zh-TW
   !presearch bmw :en-CA

1. You should get results corresponding to the region (Taiwan, Canada)
2. and in the language (Chinese, Englisch).
3. The context in info box content is in the same language.

Exceptions:

1. Region or language is not supported by Presearch or
2. SearXNG user did not selected a region tag, example::

    !presearch bmw :en

Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
2024-01-15 19:23:26 +01:00
Markus Heiser a2c269bbac [mod] presearch: hardening engine's response against KeyErrors
Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
2024-01-15 19:23:26 +01:00
Bnyro 0a78f59aba [fix] presearch: safesearch, time ranges, crash when no result found 2024-01-15 19:23:26 +01:00
Markus Heiser 87f18b98ec [fix] SyntaxWarning: invalid escape sequence '\>'
This patch fixes issue reported by ``make test.unit``::

   searx/search/checker/impl.py:39: SyntaxWarning: invalid escape sequence '\>'
      rep = ['<' + tag + '[^\>]*>' for tag in HTML_TAGS]

Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
2024-01-15 18:27:21 +01:00
4 changed files with 132 additions and 29 deletions

View File

@ -0,0 +1,13 @@
.. _engine presearch:
================
Presearch Engine
================
.. contents::
:depth: 2
:local:
:backlinks: entry
.. automodule:: searx.engines.presearch
:members:

View File

@ -1,23 +1,72 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""Presearch (general, images, videos, news)
"""Presearch supports the search types listed in :py:obj:`search_type` (general,
images, videos, news).
Configured ``presarch`` engines:
.. code:: yaml
- name: presearch
engine: presearch
search_type: search
categories: [general, web]
- name: presearch images
...
search_type: images
categories: [images, web]
- name: presearch videos
...
search_type: videos
categories: [general, web]
- name: presearch news
...
search_type: news
categories: [news, web]
.. hint::
The results in the video category are most often links to pages that contain
a video, for instance many links from preasearch's video category link
content from facebook (aka Meta) or Twitter (aka X). Since these are not
real links to video streams SearXNG can't use the video template for this and
if SearXNG can't use this template, then the user doesn't want to see these
hits in the videos category.
TL;DR; by default presearch's video category is placed into categories::
By default Presearch's video category is intentionally placed into::
categories: [general, web]
Search type ``video``
=====================
The results in the video category are most often links to pages that contain a
video, for instance many links from Preasearch's video category link content
from facebook (aka Meta) or Twitter (aka X). Since these are not real links to
video streams SearXNG can't use the video template for this and if SearXNG can't
use this template, then the user doesn't want to see these hits in the videos
category.
Languages & Regions
===================
In Presearch there are languages for the UI and regions for narrowing down the
search. If we set "auto" for the region in the WEB-UI of Presearch and cookie
``use_local_search_results=false``, then the defaults are set for both (the
language and the region) from the ``Accept-Language`` header.
Since the region is already "auto" by default, we only need to set the
``use_local_search_results`` cookie and send the ``Accept-Language`` header. We
have to set these values in both requests we send to Presearch; in the first
request to get the request-ID from Presearch and in the final request to get the
result list (see ``send_accept_language_header``).
Implementations
===============
"""
from urllib.parse import urlencode
from searx import locales
from searx.network import get
from searx.utils import gen_useragent, html_to_text
@ -30,7 +79,9 @@ about = {
"results": "JSON",
}
paging = True
safesearch = True
time_range_support = True
send_accept_language_header = True
categories = ["general", "web"] # general, images, videos, news
search_type = "search"
@ -45,19 +96,43 @@ def init(_):
raise ValueError(f'presearch search_type: {search_type}')
def _get_request_id(query, page, time_range, safesearch):
def _get_request_id(query, params):
args = {
"q": query,
"page": page,
"page": params["pageno"],
}
if time_range:
args["time_range"] = time_range
if params["time_range"]:
args["time"] = params["time_range"]
url = f"{base_url}/{search_type}?{urlencode(args)}"
headers = {
'User-Agent': gen_useragent(),
'Cookie': f"b=1;presearch_session=;use_safe_search={safesearch_map[safesearch]}",
'Cookie': (
f"b=1;"
f" presearch_session=;"
f" use_local_search_results=false;"
f" use_safe_search={safesearch_map[params['safesearch']]}"
),
}
if params['searxng_locale'] != 'all':
l = locales.get_locale(params['searxng_locale'])
# Presearch narrows down the search by region. In SearXNG when the user
# does not set a region (e.g. 'en-CA' / canada) we cannot hand over a
# region.
# We could possibly use searx.locales.get_official_locales to determine
# in which regions this language is an official one, but then we still
# wouldn't know which region should be given more weight / Presearch
# performs an IP-based geolocation of the user, we don't want that in
# SearXNG ;-)
if l.territory:
headers['Accept-Language'] = f"{l.language}-{l.territory},{l.language};" "q=0.9,*;" "q=0.5"
resp_text = get(url, headers=headers).text # type: ignore
for line in resp_text.split("\n"):
@ -68,8 +143,7 @@ def _get_request_id(query, page, time_range, safesearch):
def request(query, params):
request_id = _get_request_id(query, params["pageno"], params["time_range"], params["safesearch"])
request_id = _get_request_id(query, params)
params["headers"]["Accept"] = "application/json"
params["url"] = f"{base_url}/results?id={request_id}"
@ -108,11 +182,23 @@ def parse_search_query(json_results):
if info:
attributes = []
for item in info.get('about', []):
label, value = html_to_text(item).split(':', 1)
text = html_to_text(item)
if ':' in text:
# split text into key / value
label, value = text.split(':', 1)
else:
# In other languages (tested with zh-TW) a colon is represented
# by a different symbol --> then we split at the first space.
label, value = text.split(' ', 1)
label = label[:-1]
value = _strip_leading_strings(value)
attributes.append({'label': label, 'value': value})
content = []
for item in [info['subtitle'], info['description']]:
for item in [info.get('subtitle'), info.get('description')]:
if not item:
continue
item = _strip_leading_strings(html_to_text(item))
if item:
content.append(item)
@ -134,17 +220,17 @@ def response(resp):
json_resp = resp.json()
if search_type == 'search':
results = parse_search_query(json_resp['results'])
results = parse_search_query(json_resp.get('results'))
elif search_type == 'images':
for item in json_resp['images']:
for item in json_resp.get('images', []):
results.append(
{
'template': 'images.html',
'title': item['title'],
'url': item['link'],
'img_src': item['image'],
'thumbnail_src': item['thumbnail'],
'url': item.get('link'),
'img_src': item.get('image'),
'thumbnail_src': item.get('thumbnail'),
}
)
@ -152,12 +238,12 @@ def response(resp):
# The results in the video category are most often links to pages that contain
# a video and not to a video stream --> SearXNG can't use the video template.
for item in json_resp['videos']:
for item in json_resp.get('videos', []):
metadata = [x for x in [item.get('description'), item.get('duration')] if x]
results.append(
{
'title': item['title'],
'url': item['link'],
'url': item.get('link'),
'content': '',
'metadata': ' / '.join(metadata),
'img_src': item.get('image'),
@ -165,13 +251,13 @@ def response(resp):
)
elif search_type == 'news':
for item in json_resp['news']:
for item in json_resp.get('news', []):
metadata = [x for x in [item.get('source'), item.get('time')] if x]
results.append(
{
'title': item['title'],
'url': item['link'],
'content': item['description'],
'url': item.get('link'),
'content': item.get('description', ''),
'metadata': ' / '.join(metadata),
'img_src': item.get('image'),
}

View File

@ -36,7 +36,7 @@ HTML_TAGS = [
def get_check_no_html():
rep = ['<' + tag + '[^\>]*>' for tag in HTML_TAGS]
rep = ['<' + tag + r'[^\>]*>' for tag in HTML_TAGS]
rep += ['</' + tag + '>' for tag in HTML_TAGS]
pattern = re.compile('|'.join(rep))

View File

@ -1347,10 +1347,12 @@ engines:
search_type: search
categories: [general, web]
shortcut: ps
timeout: 4.0
disabled: true
- name: presearch images
engine: presearch
network: presearch
search_type: images
categories: [images, web]
timeout: 4.0
@ -1359,6 +1361,7 @@ engines:
- name: presearch videos
engine: presearch
network: presearch
search_type: videos
categories: [general, web]
timeout: 4.0
@ -1367,6 +1370,7 @@ engines:
- name: presearch news
engine: presearch
network: presearch
search_type: news
categories: [news, web]
timeout: 4.0