mirror of
https://github.com/searxng/searxng.git
synced 2024-11-15 01:00:12 +01:00
[refactor] unit tests to utilize paramaterized and break down monolithic tests
- for tests which perform the same arrange/act/assert pattern but with different data, the data portion has been moved to the ``paramaterized.expand`` fields - for monolithic tests which performed multiple arrange/act/asserts, they have been broken up into different unit tests. - when possible, change generic assert statements to more concise asserts (i.e. ``assertIsNone``) This work ultimately is focused on creating smaller and more concise tests. While paramaterized may make adding new configurations for existing tests easier, that is just a beneficial side effect. The main benefit is that smaller tests are easier to reason about, meaning they are easier to debug when they start failing. This improves the developer experience in debugging what went wrong when refactoring the project. Total number of tests went from 192 -> 259; or, broke apart larger tests into 69 more concise ones.
This commit is contained in:
parent
042c7190e6
commit
44a06190bb
@ -1,6 +1,7 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
# pylint: disable=invalid-name, missing-module-docstring, missing-class-docstring
|
||||
|
||||
from __future__ import annotations
|
||||
from abc import abstractmethod, ABC
|
||||
import re
|
||||
|
||||
@ -258,7 +259,7 @@ class RawTextQuery:
|
||||
FeelingLuckyParser, # redirect to the first link in the results list
|
||||
]
|
||||
|
||||
def __init__(self, query, disabled_engines):
|
||||
def __init__(self, query: str, disabled_engines: list):
|
||||
assert isinstance(query, str)
|
||||
# input parameters
|
||||
self.query = query
|
||||
|
@ -2,15 +2,16 @@
|
||||
# pylint: disable=missing-module-docstring
|
||||
|
||||
from mock import Mock
|
||||
from parameterized import parameterized
|
||||
|
||||
from searx.answerers import answerers
|
||||
from tests import SearxTestCase
|
||||
|
||||
|
||||
class AnswererTest(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||
def test_unicode_input(self):
|
||||
@parameterized.expand(answerers)
|
||||
def test_unicode_input(self, answerer):
|
||||
query = Mock()
|
||||
unicode_payload = 'árvíztűrő tükörfúrógép'
|
||||
for answerer in answerers:
|
||||
query.query = '{} {}'.format(answerer.keywords[0], unicode_payload)
|
||||
self.assertTrue(isinstance(answerer.answer(query), list))
|
||||
self.assertIsInstance(answerer.answer(query), list)
|
||||
|
@ -1,42 +1,36 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
# pylint: disable=missing-module-docstring
|
||||
|
||||
from parameterized import parameterized
|
||||
from tests import SearxTestCase
|
||||
import searx.exceptions
|
||||
from searx import get_setting
|
||||
|
||||
|
||||
class TestExceptions(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||
def test_default_suspend_time(self):
|
||||
with self.assertRaises(searx.exceptions.SearxEngineAccessDeniedException) as e:
|
||||
raise searx.exceptions.SearxEngineAccessDeniedException()
|
||||
@parameterized.expand(
|
||||
[
|
||||
searx.exceptions.SearxEngineAccessDeniedException,
|
||||
searx.exceptions.SearxEngineCaptchaException,
|
||||
searx.exceptions.SearxEngineTooManyRequestsException,
|
||||
]
|
||||
)
|
||||
def test_default_suspend_time(self, exception):
|
||||
with self.assertRaises(exception) as e:
|
||||
raise exception()
|
||||
self.assertEqual(
|
||||
e.exception.suspended_time,
|
||||
get_setting(searx.exceptions.SearxEngineAccessDeniedException.SUSPEND_TIME_SETTING),
|
||||
get_setting(exception.SUSPEND_TIME_SETTING),
|
||||
)
|
||||
|
||||
with self.assertRaises(searx.exceptions.SearxEngineCaptchaException) as e:
|
||||
raise searx.exceptions.SearxEngineCaptchaException()
|
||||
self.assertEqual(
|
||||
e.exception.suspended_time, get_setting(searx.exceptions.SearxEngineCaptchaException.SUSPEND_TIME_SETTING)
|
||||
@parameterized.expand(
|
||||
[
|
||||
searx.exceptions.SearxEngineAccessDeniedException,
|
||||
searx.exceptions.SearxEngineCaptchaException,
|
||||
searx.exceptions.SearxEngineTooManyRequestsException,
|
||||
]
|
||||
)
|
||||
|
||||
with self.assertRaises(searx.exceptions.SearxEngineTooManyRequestsException) as e:
|
||||
raise searx.exceptions.SearxEngineTooManyRequestsException()
|
||||
self.assertEqual(
|
||||
e.exception.suspended_time,
|
||||
get_setting(searx.exceptions.SearxEngineTooManyRequestsException.SUSPEND_TIME_SETTING),
|
||||
)
|
||||
|
||||
def test_custom_suspend_time(self):
|
||||
with self.assertRaises(searx.exceptions.SearxEngineAccessDeniedException) as e:
|
||||
raise searx.exceptions.SearxEngineAccessDeniedException(suspended_time=1337)
|
||||
def test_custom_suspend_time(self, exception):
|
||||
with self.assertRaises(exception) as e:
|
||||
raise exception(suspended_time=1337)
|
||||
self.assertEqual(e.exception.suspended_time, 1337)
|
||||
|
||||
with self.assertRaises(searx.exceptions.SearxEngineCaptchaException) as e:
|
||||
raise searx.exceptions.SearxEngineCaptchaException(suspended_time=1409)
|
||||
self.assertEqual(e.exception.suspended_time, 1409)
|
||||
|
||||
with self.assertRaises(searx.exceptions.SearxEngineTooManyRequestsException) as e:
|
||||
raise searx.exceptions.SearxEngineTooManyRequestsException(suspended_time=1543)
|
||||
self.assertEqual(e.exception.suspended_time, 1543)
|
||||
|
@ -90,7 +90,7 @@ class TestGetBangDefinitionAndAutocomplete(SearxTestCase): # pylint:disable=mis
|
||||
|
||||
def test_partial(self):
|
||||
bang_definition, new_autocomplete = get_bang_definition_and_autocomplete('examp', external_bangs_db=TEST_DB)
|
||||
self.assertEqual(bang_definition, None)
|
||||
self.assertIsNone(bang_definition)
|
||||
self.assertEqual(new_autocomplete, ['example'])
|
||||
|
||||
def test_partial2(self):
|
||||
@ -100,7 +100,7 @@ class TestGetBangDefinitionAndAutocomplete(SearxTestCase): # pylint:disable=mis
|
||||
|
||||
def test_error(self):
|
||||
bang_definition, new_autocomplete = get_bang_definition_and_autocomplete('error', external_bangs_db=TEST_DB)
|
||||
self.assertEqual(bang_definition, None)
|
||||
self.assertIsNone(bang_definition)
|
||||
self.assertEqual(new_autocomplete, [])
|
||||
|
||||
def test_actual_data(self):
|
||||
@ -112,7 +112,7 @@ class TestGetBangDefinitionAndAutocomplete(SearxTestCase): # pylint:disable=mis
|
||||
class TestExternalBangJson(SearxTestCase): # pylint:disable=missing-class-docstring
|
||||
def test_no_external_bang_query(self):
|
||||
result = get_bang_url(SearchQuery('test', engineref_list=[EngineRef('wikipedia', 'general')]))
|
||||
self.assertEqual(result, None)
|
||||
self.assertIsNone(result)
|
||||
|
||||
def test_get_bang_url(self):
|
||||
url = get_bang_url(SearchQuery('test', engineref_list=[], external_bang='example'), external_bangs_db=TEST_DB)
|
||||
|
@ -2,6 +2,8 @@
|
||||
# pylint: disable=missing-module-docstring
|
||||
"""Test some code from module :py:obj:`searx.locales`"""
|
||||
|
||||
from __future__ import annotations
|
||||
from parameterized import parameterized
|
||||
from searx import locales
|
||||
from searx.sxng_locales import sxng_locales
|
||||
from tests import SearxTestCase
|
||||
@ -13,98 +15,104 @@ class TestLocales(SearxTestCase):
|
||||
- :py:obj:`searx.locales.match_locale`
|
||||
"""
|
||||
|
||||
def test_match_locale(self):
|
||||
|
||||
locale_tag_list = [x[0] for x in sxng_locales]
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.locale_tag_list = [x[0] for x in sxng_locales]
|
||||
|
||||
@parameterized.expand(
|
||||
[
|
||||
'de',
|
||||
'fr',
|
||||
'zh',
|
||||
]
|
||||
)
|
||||
def test_locale_languages(self, locale: str):
|
||||
# Test SearXNG search languages
|
||||
self.assertEqual(locales.match_locale(locale, self.locale_tag_list), locale)
|
||||
|
||||
self.assertEqual(locales.match_locale('de', locale_tag_list), 'de')
|
||||
self.assertEqual(locales.match_locale('fr', locale_tag_list), 'fr')
|
||||
self.assertEqual(locales.match_locale('zh', locale_tag_list), 'zh')
|
||||
|
||||
@parameterized.expand(
|
||||
[
|
||||
('ca-es', 'ca-ES'),
|
||||
('de-at', 'de-AT'),
|
||||
('de-de', 'de-DE'),
|
||||
('en-UK', 'en-GB'),
|
||||
('fr-be', 'fr-BE'),
|
||||
('fr-be', 'fr-BE'),
|
||||
('fr-ca', 'fr-CA'),
|
||||
('fr-ch', 'fr-CH'),
|
||||
('zh-cn', 'zh-CN'),
|
||||
('zh-tw', 'zh-TW'),
|
||||
('zh-hk', 'zh-HK'),
|
||||
]
|
||||
)
|
||||
def test_match_region(self, locale: str, expected_locale: str):
|
||||
# Test SearXNG search regions
|
||||
self.assertEqual(locales.match_locale(locale, self.locale_tag_list), expected_locale)
|
||||
|
||||
self.assertEqual(locales.match_locale('ca-es', locale_tag_list), 'ca-ES')
|
||||
self.assertEqual(locales.match_locale('de-at', locale_tag_list), 'de-AT')
|
||||
self.assertEqual(locales.match_locale('de-de', locale_tag_list), 'de-DE')
|
||||
self.assertEqual(locales.match_locale('en-UK', locale_tag_list), 'en-GB')
|
||||
self.assertEqual(locales.match_locale('fr-be', locale_tag_list), 'fr-BE')
|
||||
self.assertEqual(locales.match_locale('fr-be', locale_tag_list), 'fr-BE')
|
||||
self.assertEqual(locales.match_locale('fr-ca', locale_tag_list), 'fr-CA')
|
||||
self.assertEqual(locales.match_locale('fr-ch', locale_tag_list), 'fr-CH')
|
||||
self.assertEqual(locales.match_locale('zh-cn', locale_tag_list), 'zh-CN')
|
||||
self.assertEqual(locales.match_locale('zh-tw', locale_tag_list), 'zh-TW')
|
||||
self.assertEqual(locales.match_locale('zh-hk', locale_tag_list), 'zh-HK')
|
||||
|
||||
@parameterized.expand(
|
||||
[
|
||||
('zh-hans', 'zh-CN'),
|
||||
('zh-hans-cn', 'zh-CN'),
|
||||
('zh-hant', 'zh-TW'),
|
||||
('zh-hant-tw', 'zh-TW'),
|
||||
]
|
||||
)
|
||||
def test_match_lang_script_code(self, locale: str, expected_locale: str):
|
||||
# Test language script code
|
||||
self.assertEqual(locales.match_locale(locale, self.locale_tag_list), expected_locale)
|
||||
|
||||
self.assertEqual(locales.match_locale('zh-hans', locale_tag_list), 'zh-CN')
|
||||
self.assertEqual(locales.match_locale('zh-hans-cn', locale_tag_list), 'zh-CN')
|
||||
self.assertEqual(locales.match_locale('zh-hant', locale_tag_list), 'zh-TW')
|
||||
self.assertEqual(locales.match_locale('zh-hant-tw', locale_tag_list), 'zh-TW')
|
||||
|
||||
# Test individual locale lists
|
||||
def test_locale_de(self):
|
||||
self.assertEqual(locales.match_locale('de', ['de-CH', 'de-DE']), 'de-DE')
|
||||
self.assertEqual(locales.match_locale('de', ['de-CH', 'de-DE']), 'de-DE')
|
||||
|
||||
def test_locale_es(self):
|
||||
self.assertEqual(locales.match_locale('es', [], fallback='fallback'), 'fallback')
|
||||
|
||||
self.assertEqual(locales.match_locale('de', ['de-CH', 'de-DE']), 'de-DE')
|
||||
self.assertEqual(locales.match_locale('de', ['de-CH', 'de-DE']), 'de-DE')
|
||||
self.assertEqual(locales.match_locale('es', ['ES']), 'ES')
|
||||
self.assertEqual(locales.match_locale('es', ['es-AR', 'es-ES', 'es-MX']), 'es-ES')
|
||||
self.assertEqual(locales.match_locale('es-AR', ['es-AR', 'es-ES', 'es-MX']), 'es-AR')
|
||||
self.assertEqual(locales.match_locale('es-CO', ['es-AR', 'es-ES']), 'es-ES')
|
||||
self.assertEqual(locales.match_locale('es-CO', ['es-AR']), 'es-AR')
|
||||
|
||||
# Tests from the commit message of 9ae409a05a
|
||||
|
||||
# Assumption:
|
||||
# A. When a user selects a language the results should be optimized according to
|
||||
# the selected language.
|
||||
#
|
||||
# B. When user selects a language and a territory the results should be
|
||||
# optimized with first priority on territory and second on language.
|
||||
|
||||
# Assume we have an engine that supports the following locales:
|
||||
locale_tag_list = ['zh-CN', 'zh-HK', 'nl-BE', 'fr-CA']
|
||||
|
||||
# Examples (Assumption A.)
|
||||
# ------------------------
|
||||
|
||||
# A user selects region 'zh-TW' which should end in zh_HK.
|
||||
@parameterized.expand(
|
||||
[
|
||||
('zh-TW', ['zh-HK'], 'zh-HK'), # A user selects region 'zh-TW' which should end in zh_HK.
|
||||
# hint: CN is 'Hans' and HK ('Hant') fits better to TW ('Hant')
|
||||
self.assertEqual(locales.match_locale('zh-TW', locale_tag_list), 'zh-HK')
|
||||
('zh', ['zh-CN'], 'zh-CN'), # A user selects only the language 'zh' which should end in CN
|
||||
('fr', ['fr-CA'], 'fr-CA'), # A user selects only the language 'fr' which should end in fr_CA
|
||||
('nl', ['nl-BE'], 'nl-BE'), # A user selects only the language 'fr' which should end in fr_CA
|
||||
# Territory tests
|
||||
('en', ['en-GB'], 'en-GB'), # A user selects only a language
|
||||
(
|
||||
'fr',
|
||||
['fr-FR', 'fr-CA'],
|
||||
'fr-FR',
|
||||
), # the engine supports fr_FR and fr_CA since no territory is given, fr_FR takes priority
|
||||
]
|
||||
)
|
||||
def test_locale_optimized_selected(self, locale: str, locale_list: list[str], expected_locale: str):
|
||||
"""
|
||||
Tests from the commit message of 9ae409a05a
|
||||
|
||||
# A user selects only the language 'zh' which should end in CN
|
||||
self.assertEqual(locales.match_locale('zh', locale_tag_list), 'zh-CN')
|
||||
Assumption:
|
||||
A. When a user selects a language the results should be optimized according to
|
||||
the selected language.
|
||||
"""
|
||||
self.assertEqual(locales.match_locale(locale, locale_list), expected_locale)
|
||||
|
||||
# A user selects only the language 'fr' which should end in fr_CA
|
||||
self.assertEqual(locales.match_locale('fr', locale_tag_list), 'fr-CA')
|
||||
|
||||
# The difference in priority on the territory is best shown with a
|
||||
# engine that supports the following locales:
|
||||
locale_tag_list = ['fr-FR', 'fr-CA', 'en-GB', 'nl-BE']
|
||||
|
||||
# A user selects only a language
|
||||
self.assertEqual(locales.match_locale('en', locale_tag_list), 'en-GB')
|
||||
|
||||
# hint: the engine supports fr_FR and fr_CA since no territory is given,
|
||||
# fr_FR takes priority ..
|
||||
self.assertEqual(locales.match_locale('fr', locale_tag_list), 'fr-FR')
|
||||
|
||||
# Examples (Assumption B.)
|
||||
# ------------------------
|
||||
|
||||
# A user selects region 'fr-BE' which should end in nl-BE
|
||||
self.assertEqual(locales.match_locale('fr-BE', locale_tag_list), 'nl-BE')
|
||||
|
||||
# If the user selects a language and there are two locales like the
|
||||
# following:
|
||||
|
||||
locale_tag_list = ['fr-BE', 'fr-CH']
|
||||
|
||||
# The get_engine_locale selects the locale by looking at the "population
|
||||
@parameterized.expand(
|
||||
[
|
||||
('fr-BE', ['fr-FR', 'fr-CA', 'nl-BE'], 'nl-BE'), # A user selects region 'fr-BE' which should end in nl-BE
|
||||
('fr', ['fr-BE', 'fr-CH'], 'fr-BE'), # A user selects fr with 2 locales,
|
||||
# the get_engine_locale selects the locale by looking at the "population
|
||||
# percent" and this percentage has an higher amount in BE (68.%)
|
||||
# compared to CH (21%)
|
||||
]
|
||||
)
|
||||
def test_locale_optimized_territory(self, locale: str, locale_list: list[str], expected_locale: str):
|
||||
"""
|
||||
Tests from the commit message of 9ae409a05a
|
||||
|
||||
self.assertEqual(locales.match_locale('fr', locale_tag_list), 'fr-BE')
|
||||
B. When user selects a language and a territory the results should be
|
||||
optimized with first priority on territory and second on language.
|
||||
"""
|
||||
self.assertEqual(locales.match_locale(locale, locale_list), expected_locale)
|
||||
|
@ -2,6 +2,7 @@
|
||||
# pylint: disable=missing-module-docstring
|
||||
|
||||
from mock import Mock
|
||||
from parameterized.parameterized import parameterized
|
||||
|
||||
from searx import (
|
||||
plugins,
|
||||
@ -23,143 +24,125 @@ class PluginMock: # pylint: disable=missing-class-docstring, too-few-public-met
|
||||
|
||||
|
||||
class PluginStoreTest(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||
def test_PluginStore_init(self):
|
||||
store = plugins.PluginStore()
|
||||
self.assertTrue(isinstance(store.plugins, list) and len(store.plugins) == 0)
|
||||
def setUp(self):
|
||||
self.store = plugins.PluginStore()
|
||||
|
||||
def test_PluginStore_register(self):
|
||||
store = plugins.PluginStore()
|
||||
def test_init(self):
|
||||
self.assertEqual(0, len(self.store.plugins))
|
||||
self.assertIsInstance(self.store.plugins, list)
|
||||
|
||||
def test_register(self):
|
||||
testplugin = PluginMock()
|
||||
store.register(testplugin)
|
||||
self.store.register(testplugin)
|
||||
self.assertEqual(1, len(self.store.plugins))
|
||||
|
||||
self.assertTrue(len(store.plugins) == 1)
|
||||
def test_call_empty(self):
|
||||
testplugin = PluginMock()
|
||||
self.store.register(testplugin)
|
||||
setattr(testplugin, 'asdf', Mock())
|
||||
request = Mock()
|
||||
self.store.call([], 'asdf', request, Mock())
|
||||
self.assertFalse(getattr(testplugin, 'asdf').called) # pylint: disable=E1101
|
||||
|
||||
def test_PluginStore_call(self):
|
||||
def test_call_with_plugin(self):
|
||||
store = plugins.PluginStore()
|
||||
testplugin = PluginMock()
|
||||
store.register(testplugin)
|
||||
setattr(testplugin, 'asdf', Mock())
|
||||
request = Mock()
|
||||
store.call([], 'asdf', request, Mock())
|
||||
|
||||
self.assertFalse(testplugin.asdf.called) # pylint: disable=E1101
|
||||
|
||||
store.call([testplugin], 'asdf', request, Mock())
|
||||
self.assertTrue(testplugin.asdf.called) # pylint: disable=E1101
|
||||
self.assertTrue(getattr(testplugin, 'asdf').called) # pylint: disable=E1101
|
||||
|
||||
|
||||
class SelfIPTest(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||
def test_PluginStore_init(self):
|
||||
class PluginIPSelfInfo(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||
def setUp(self):
|
||||
plugin = plugins.load_and_initialize_plugin('searx.plugins.self_info', False, (None, {}))
|
||||
store = plugins.PluginStore()
|
||||
store.register(plugin)
|
||||
self.store = plugins.PluginStore()
|
||||
self.store.register(plugin)
|
||||
cfg = limiter.get_cfg()
|
||||
botdetection.init(cfg, None)
|
||||
|
||||
self.assertTrue(len(store.plugins) == 1)
|
||||
def test_plugin_store_init(self):
|
||||
self.assertEqual(1, len(self.store.plugins))
|
||||
|
||||
# IP test
|
||||
def test_ip_in_answer(self):
|
||||
request = Mock()
|
||||
request.remote_addr = '127.0.0.1'
|
||||
request.headers = {'X-Forwarded-For': '1.2.3.4, 127.0.0.1', 'X-Real-IP': '127.0.0.1'}
|
||||
search = get_search_mock(
|
||||
query='ip',
|
||||
pageno=1,
|
||||
)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertTrue('127.0.0.1' in search.result_container.answers["ip"]["answer"])
|
||||
search = get_search_mock(query='ip', pageno=1)
|
||||
self.store.call(self.store.plugins, 'post_search', request, search)
|
||||
self.assertIn('127.0.0.1', search.result_container.answers["ip"]["answer"])
|
||||
|
||||
def test_ip_not_in_answer(self):
|
||||
request = Mock()
|
||||
request.remote_addr = '127.0.0.1'
|
||||
request.headers = {'X-Forwarded-For': '1.2.3.4, 127.0.0.1', 'X-Real-IP': '127.0.0.1'}
|
||||
search = get_search_mock(query='ip', pageno=2)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertFalse('ip' in search.result_container.answers)
|
||||
self.store.call(self.store.plugins, 'post_search', request, search)
|
||||
self.assertNotIn('ip', search.result_container.answers)
|
||||
|
||||
# User agent test
|
||||
@parameterized.expand(
|
||||
[
|
||||
'user-agent',
|
||||
'What is my User-Agent?',
|
||||
]
|
||||
)
|
||||
def test_user_agent_in_answer(self, query: str):
|
||||
request = Mock(user_agent=Mock(string='Mock'))
|
||||
search = get_search_mock(query=query, pageno=1)
|
||||
self.store.call(self.store.plugins, 'post_search', request, search)
|
||||
self.assertIn('Mock', search.result_container.answers["user-agent"]["answer"])
|
||||
|
||||
search = get_search_mock(query='user-agent', pageno=1)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertTrue('Mock' in search.result_container.answers["user-agent"]["answer"])
|
||||
|
||||
search = get_search_mock(query='user-agent', pageno=2)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertFalse('user-agent' in search.result_container.answers)
|
||||
|
||||
search = get_search_mock(query='user-agent', pageno=1)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertTrue('Mock' in search.result_container.answers["user-agent"]["answer"])
|
||||
|
||||
search = get_search_mock(query='user-agent', pageno=2)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertFalse('user-agent' in search.result_container.answers)
|
||||
|
||||
search = get_search_mock(query='What is my User-Agent?', pageno=1)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertTrue('Mock' in search.result_container.answers["user-agent"]["answer"])
|
||||
|
||||
search = get_search_mock(query='What is my User-Agent?', pageno=2)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertFalse('user-agent' in search.result_container.answers)
|
||||
@parameterized.expand(
|
||||
[
|
||||
'user-agent',
|
||||
'What is my User-Agent?',
|
||||
]
|
||||
)
|
||||
def test_user_agent_not_in_answer(self, query: str):
|
||||
request = Mock(user_agent=Mock(string='Mock'))
|
||||
search = get_search_mock(query=query, pageno=2)
|
||||
self.store.call(self.store.plugins, 'post_search', request, search)
|
||||
self.assertNotIn('user-agent', search.result_container.answers)
|
||||
|
||||
|
||||
class HashPluginTest(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||
def test_PluginStore_init(self):
|
||||
store = plugins.PluginStore()
|
||||
class PluginHashTest(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||
def setUp(self):
|
||||
self.store = plugins.PluginStore()
|
||||
plugin = plugins.load_and_initialize_plugin('searx.plugins.hash_plugin', False, (None, {}))
|
||||
store.register(plugin)
|
||||
self.store.register(plugin)
|
||||
|
||||
self.assertTrue(len(store.plugins) == 1)
|
||||
def test_plugin_store_init(self):
|
||||
self.assertEqual(1, len(self.store.plugins))
|
||||
|
||||
request = Mock(remote_addr='127.0.0.1')
|
||||
|
||||
# MD5
|
||||
search = get_search_mock(query='md5 test', pageno=1)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertTrue(
|
||||
'md5 hash digest: 098f6bcd4621d373cade4e832627b4f6' in search.result_container.answers['hash']['answer']
|
||||
)
|
||||
|
||||
search = get_search_mock(query=b'md5 test', pageno=2)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertFalse('hash' in search.result_container.answers)
|
||||
|
||||
# SHA1
|
||||
search = get_search_mock(query='sha1 test', pageno=1)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertTrue(
|
||||
'sha1 hash digest: a94a8fe5ccb19ba61c4c0873d391e9879'
|
||||
'82fbbd3' in search.result_container.answers['hash']['answer']
|
||||
)
|
||||
|
||||
# SHA224
|
||||
search = get_search_mock(query='sha224 test', pageno=1)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertTrue(
|
||||
'sha224 hash digest: 90a3ed9e32b2aaf4c61c410eb9254261'
|
||||
'19e1a9dc53d4286ade99a809' in search.result_container.answers['hash']['answer']
|
||||
)
|
||||
|
||||
# SHA256
|
||||
search = get_search_mock(query='sha256 test', pageno=1)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertTrue(
|
||||
'sha256 hash digest: 9f86d081884c7d659a2feaa0c55ad015a'
|
||||
'3bf4f1b2b0b822cd15d6c15b0f00a08' in search.result_container.answers['hash']['answer']
|
||||
)
|
||||
|
||||
# SHA384
|
||||
search = get_search_mock(query='sha384 test', pageno=1)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertTrue(
|
||||
@parameterized.expand(
|
||||
[
|
||||
('md5 test', 'md5 hash digest: 098f6bcd4621d373cade4e832627b4f6'),
|
||||
('sha1 test', 'sha1 hash digest: a94a8fe5ccb19ba61c4c0873d391e987982fbbd3'),
|
||||
('sha224 test', 'sha224 hash digest: 90a3ed9e32b2aaf4c61c410eb925426119e1a9dc53d4286ade99a809'),
|
||||
('sha256 test', 'sha256 hash digest: 9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08'),
|
||||
(
|
||||
'sha384 test',
|
||||
'sha384 hash digest: 768412320f7b0aa5812fce428dc4706b3c'
|
||||
'ae50e02a64caa16a782249bfe8efc4b7ef1ccb126255d196047dfedf1'
|
||||
'7a0a9' in search.result_container.answers['hash']['answer']
|
||||
)
|
||||
|
||||
# SHA512
|
||||
search = get_search_mock(query='sha512 test', pageno=1)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertTrue(
|
||||
'7a0a9',
|
||||
),
|
||||
(
|
||||
'sha512 test',
|
||||
'sha512 hash digest: ee26b0dd4af7e749aa1a8ee3c10ae9923f6'
|
||||
'18980772e473f8819a5d4940e0db27ac185f8a0e1d5f84f88bc887fd67b143732c304cc5'
|
||||
'fa9ad8e6f57f50028a8ff' in search.result_container.answers['hash']['answer']
|
||||
'fa9ad8e6f57f50028a8ff',
|
||||
),
|
||||
]
|
||||
)
|
||||
def test_hash_digest_new(self, query: str, hash_str: str):
|
||||
request = Mock(remote_addr='127.0.0.1')
|
||||
search = get_search_mock(query=query, pageno=1)
|
||||
self.store.call(self.store.plugins, 'post_search', request, search)
|
||||
self.assertIn(hash_str, search.result_container.answers['hash']['answer'])
|
||||
|
||||
def test_md5_bytes_no_answer(self):
|
||||
request = Mock(remote_addr='127.0.0.1')
|
||||
search = get_search_mock(query=b'md5 test', pageno=2)
|
||||
self.store.call(self.store.plugins, 'post_search', request, search)
|
||||
self.assertNotIn('hash', search.result_container.answers)
|
||||
|
@ -1,6 +1,7 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
# pylint: disable=missing-module-docstring, invalid-name
|
||||
|
||||
from tests import SearxTestCase
|
||||
from searx.locales import locales_initialize
|
||||
from searx.preferences import (
|
||||
EnumStringSetting,
|
||||
@ -10,12 +11,12 @@ from searx.preferences import (
|
||||
PluginsSetting,
|
||||
ValidationException,
|
||||
)
|
||||
from tests import SearxTestCase
|
||||
from searx.plugins import Plugin
|
||||
|
||||
locales_initialize()
|
||||
|
||||
|
||||
class PluginStub: # pylint: disable=missing-class-docstring, too-few-public-methods
|
||||
class PluginStub(Plugin): # pylint: disable=missing-class-docstring, too-few-public-methods
|
||||
def __init__(self, plugin_id, default_on):
|
||||
self.id = plugin_id
|
||||
self.default_on = default_on
|
||||
@ -47,22 +48,22 @@ class TestSettings(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||
|
||||
def test_enum_setting_invalid_default_value(self):
|
||||
with self.assertRaises(ValidationException):
|
||||
EnumStringSetting(3, choices=[0, 1, 2])
|
||||
EnumStringSetting('3', choices=['0', '1', '2'])
|
||||
|
||||
def test_enum_setting_invalid_choice(self):
|
||||
setting = EnumStringSetting(0, choices=[0, 1, 2])
|
||||
setting = EnumStringSetting('0', choices=['0', '1', '2'])
|
||||
with self.assertRaises(ValidationException):
|
||||
setting.parse(3)
|
||||
setting.parse('3')
|
||||
|
||||
def test_enum_setting_valid_default(self):
|
||||
setting = EnumStringSetting(3, choices=[1, 2, 3])
|
||||
self.assertEqual(setting.get_value(), 3)
|
||||
setting = EnumStringSetting('3', choices=['1', '2', '3'])
|
||||
self.assertEqual(setting.get_value(), '3')
|
||||
|
||||
def test_enum_setting_valid_choice(self):
|
||||
setting = EnumStringSetting(3, choices=[1, 2, 3])
|
||||
self.assertEqual(setting.get_value(), 3)
|
||||
setting.parse(2)
|
||||
self.assertEqual(setting.get_value(), 2)
|
||||
setting = EnumStringSetting('3', choices=['1', '2', '3'])
|
||||
self.assertEqual(setting.get_value(), '3')
|
||||
setting.parse('2')
|
||||
self.assertEqual(setting.get_value(), '2')
|
||||
|
||||
# multiple choice settings
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
# pylint: disable=missing-module-docstring
|
||||
|
||||
from parameterized.parameterized import parameterized
|
||||
from searx.engines import load_engines
|
||||
from searx.query import RawTextQuery
|
||||
from tests import SearxTestCase
|
||||
@ -129,49 +130,32 @@ class TestLanguageParser(SearxTestCase): # pylint:disable=missing-class-docstri
|
||||
query = RawTextQuery(query_text, [])
|
||||
self.assertEqual(query.autocomplete_list, [":en", ":en_us", ":english", ":united_kingdom"])
|
||||
|
||||
def test_autocomplete(self):
|
||||
query = RawTextQuery(':englis', [])
|
||||
self.assertEqual(query.autocomplete_list, [":english"])
|
||||
|
||||
query = RawTextQuery(':deutschla', [])
|
||||
self.assertEqual(query.autocomplete_list, [":deutschland"])
|
||||
|
||||
query = RawTextQuery(':new_zea', [])
|
||||
self.assertEqual(query.autocomplete_list, [":new_zealand"])
|
||||
|
||||
query = RawTextQuery(':hu-H', [])
|
||||
self.assertEqual(query.autocomplete_list, [":hu-hu"])
|
||||
|
||||
query = RawTextQuery(':zh-', [])
|
||||
self.assertEqual(query.autocomplete_list, [':zh-cn', ':zh-hk', ':zh-tw'])
|
||||
@parameterized.expand(
|
||||
[
|
||||
(':englis', [":english"]),
|
||||
(':deutschla', [":deutschland"]),
|
||||
(':new_zea', [":new_zealand"]),
|
||||
(':zh-', [':zh-cn', ':zh-hk', ':zh-tw']),
|
||||
]
|
||||
)
|
||||
def test_autocomplete(self, query: str, autocomplete_list: list):
|
||||
query = RawTextQuery(query, [])
|
||||
self.assertEqual(query.autocomplete_list, autocomplete_list)
|
||||
|
||||
|
||||
class TestTimeoutParser(SearxTestCase): # pylint:disable=missing-class-docstring
|
||||
def test_timeout_below100(self):
|
||||
query_text = '<3 the query'
|
||||
@parameterized.expand(
|
||||
[
|
||||
('<3 the query', 3),
|
||||
('<350 the query', 0.35),
|
||||
('<3500 the query', 3.5),
|
||||
]
|
||||
)
|
||||
def test_timeout_limit(self, query_text: str, timeout_limit: float):
|
||||
query = RawTextQuery(query_text, [])
|
||||
|
||||
self.assertEqual(query.getFullQuery(), query_text)
|
||||
self.assertEqual(len(query.query_parts), 1)
|
||||
self.assertEqual(query.timeout_limit, 3)
|
||||
self.assertFalse(query.specific)
|
||||
|
||||
def test_timeout_above100(self):
|
||||
query_text = '<350 the query'
|
||||
query = RawTextQuery(query_text, [])
|
||||
|
||||
self.assertEqual(query.getFullQuery(), query_text)
|
||||
self.assertEqual(len(query.query_parts), 1)
|
||||
self.assertEqual(query.timeout_limit, 0.35)
|
||||
self.assertFalse(query.specific)
|
||||
|
||||
def test_timeout_above1000(self):
|
||||
query_text = '<3500 the query'
|
||||
query = RawTextQuery(query_text, [])
|
||||
|
||||
self.assertEqual(query.getFullQuery(), query_text)
|
||||
self.assertEqual(len(query.query_parts), 1)
|
||||
self.assertEqual(query.timeout_limit, 3.5)
|
||||
self.assertEqual(query.timeout_limit, timeout_limit)
|
||||
self.assertFalse(query.specific)
|
||||
|
||||
def test_timeout_invalid(self):
|
||||
@ -182,7 +166,7 @@ class TestTimeoutParser(SearxTestCase): # pylint:disable=missing-class-docstrin
|
||||
self.assertEqual(query.getFullQuery(), query_text)
|
||||
self.assertEqual(len(query.query_parts), 0)
|
||||
self.assertEqual(query.getQuery(), query_text)
|
||||
self.assertEqual(query.timeout_limit, None)
|
||||
self.assertIsNone(query.timeout_limit)
|
||||
self.assertFalse(query.specific)
|
||||
|
||||
def test_timeout_autocomplete(self):
|
||||
@ -193,7 +177,7 @@ class TestTimeoutParser(SearxTestCase): # pylint:disable=missing-class-docstrin
|
||||
self.assertEqual(query.getFullQuery(), query_text)
|
||||
self.assertEqual(len(query.query_parts), 0)
|
||||
self.assertEqual(query.getQuery(), query_text)
|
||||
self.assertEqual(query.timeout_limit, None)
|
||||
self.assertIsNone(query.timeout_limit)
|
||||
self.assertFalse(query.specific)
|
||||
self.assertEqual(query.autocomplete_list, ['<3', '<850'])
|
||||
|
||||
@ -212,7 +196,7 @@ class TestExternalBangParser(SearxTestCase): # pylint:disable=missing-class-doc
|
||||
query = RawTextQuery(query_text, [])
|
||||
|
||||
self.assertEqual(query.getFullQuery(), query_text)
|
||||
self.assertEqual(query.external_bang, None)
|
||||
self.assertIsNone(query.external_bang)
|
||||
self.assertFalse(query.specific)
|
||||
|
||||
def test_external_bang_autocomplete(self):
|
||||
@ -239,9 +223,8 @@ class TestBang(SearxTestCase): # pylint:disable=missing-class-docstring
|
||||
def tearDown(self):
|
||||
load_engines([])
|
||||
|
||||
def test_bang(self):
|
||||
|
||||
for bang in TestBang.SPECIFIC_BANGS:
|
||||
@parameterized.expand(SPECIFIC_BANGS)
|
||||
def test_bang(self, bang: str):
|
||||
with self.subTest(msg="Check bang", bang=bang):
|
||||
query_text = TestBang.THE_QUERY + ' ' + bang
|
||||
query = RawTextQuery(query_text, [])
|
||||
@ -250,8 +233,8 @@ class TestBang(SearxTestCase): # pylint:disable=missing-class-docstring
|
||||
self.assertEqual(query.query_parts, [bang])
|
||||
self.assertEqual(query.user_query_parts, TestBang.THE_QUERY.split(' '))
|
||||
|
||||
def test_specific(self):
|
||||
for bang in TestBang.SPECIFIC_BANGS:
|
||||
@parameterized.expand(SPECIFIC_BANGS)
|
||||
def test_specific(self, bang: str):
|
||||
with self.subTest(msg="Check bang is specific", bang=bang):
|
||||
query_text = TestBang.THE_QUERY + ' ' + bang
|
||||
query = RawTextQuery(query_text, [])
|
||||
|
@ -110,7 +110,7 @@ class SearchTestCase(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||
search.search()
|
||||
self.assertEqual(search.actual_timeout, 10.0)
|
||||
|
||||
def test_external_bang(self):
|
||||
def test_external_bang_valid(self):
|
||||
search_query = SearchQuery(
|
||||
'yes yes',
|
||||
[EngineRef(PUBLIC_ENGINE_NAME, 'general')],
|
||||
@ -124,8 +124,9 @@ class SearchTestCase(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||
search = searx.search.Search(search_query)
|
||||
results = search.search()
|
||||
# For checking if the user redirected with the youtube external bang
|
||||
self.assertTrue(results.redirect_url is not None)
|
||||
self.assertIsNotNone(results.redirect_url)
|
||||
|
||||
def test_external_bang_none(self):
|
||||
search_query = SearchQuery(
|
||||
'youtube never gonna give you up',
|
||||
[EngineRef(PUBLIC_ENGINE_NAME, 'general')],
|
||||
@ -140,4 +141,4 @@ class SearchTestCase(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||
with self.app.test_request_context('/search'):
|
||||
results = search.search()
|
||||
# This should not redirect
|
||||
self.assertTrue(results.redirect_url is None)
|
||||
self.assertIsNone(results.redirect_url)
|
||||
|
@ -6,6 +6,8 @@ from pathlib import Path
|
||||
import os
|
||||
from unittest.mock import patch
|
||||
|
||||
from parameterized import parameterized
|
||||
|
||||
from searx.exceptions import SearxSettingsException
|
||||
from searx import settings_loader
|
||||
from tests import SearxTestCase
|
||||
@ -31,13 +33,13 @@ class TestDefaultSettings(SearxTestCase): # pylint: disable=missing-class-docst
|
||||
settings, msg = settings_loader.load_settings(load_user_settings=False)
|
||||
self.assertTrue(msg.startswith('load the default settings from'))
|
||||
self.assertFalse(settings['general']['debug'])
|
||||
self.assertTrue(isinstance(settings['general']['instance_name'], str))
|
||||
self.assertIsInstance(settings['general']['instance_name'], str)
|
||||
self.assertEqual(settings['server']['secret_key'], "ultrasecretkey")
|
||||
self.assertTrue(isinstance(settings['server']['port'], int))
|
||||
self.assertTrue(isinstance(settings['server']['bind_address'], str))
|
||||
self.assertTrue(isinstance(settings['engines'], list))
|
||||
self.assertTrue(isinstance(settings['doi_resolvers'], dict))
|
||||
self.assertTrue(isinstance(settings['default_doi_resolver'], str))
|
||||
self.assertIsInstance(settings['server']['port'], int)
|
||||
self.assertIsInstance(settings['server']['bind_address'], str)
|
||||
self.assertIsInstance(settings['engines'], list)
|
||||
self.assertIsInstance(settings['doi_resolvers'], dict)
|
||||
self.assertIsInstance(settings['default_doi_resolver'], str)
|
||||
|
||||
|
||||
class TestUserSettings(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||
@ -50,11 +52,14 @@ class TestUserSettings(SearxTestCase): # pylint: disable=missing-class-docstrin
|
||||
with self.assertRaises(ValueError):
|
||||
self.assertFalse(settings_loader.is_use_default_settings({'use_default_settings': 0}))
|
||||
|
||||
def test_user_settings_not_found(self):
|
||||
with patch.dict(os.environ, {'SEARXNG_SETTINGS_PATH': _settings("not_exists.yml")}):
|
||||
with self.assertRaises(EnvironmentError):
|
||||
_s, _m = settings_loader.load_settings()
|
||||
with patch.dict(os.environ, {'SEARXNG_SETTINGS_PATH': "/folder/not/exists"}):
|
||||
@parameterized.expand(
|
||||
[
|
||||
_settings("not_exists.yml"),
|
||||
"/folder/not/exists",
|
||||
]
|
||||
)
|
||||
def test_user_settings_not_found(self, path: str):
|
||||
with patch.dict(os.environ, {'SEARXNG_SETTINGS_PATH': path}):
|
||||
with self.assertRaises(EnvironmentError):
|
||||
_s, _m = settings_loader.load_settings()
|
||||
|
||||
|
@ -3,6 +3,7 @@
|
||||
|
||||
import lxml.etree
|
||||
from lxml import html
|
||||
from parameterized.parameterized import parameterized
|
||||
|
||||
from searx.exceptions import SearxXPathSyntaxException, SearxEngineXPathException
|
||||
from searx import utils
|
||||
@ -66,9 +67,15 @@ class TestUtils(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||
self.assertEqual(utils.extract_text(dom.xpath('boolean(//span)')), 'True')
|
||||
self.assertEqual(utils.extract_text(dom.xpath('//img/@src')), 'test.jpg')
|
||||
self.assertEqual(utils.extract_text(dom.xpath('//unexistingtag')), '')
|
||||
|
||||
def test_extract_text_allow_none(self):
|
||||
self.assertEqual(utils.extract_text(None, allow_none=True), None)
|
||||
|
||||
def test_extract_text_error_none(self):
|
||||
with self.assertRaises(ValueError):
|
||||
utils.extract_text(None)
|
||||
|
||||
def test_extract_text_error_empty(self):
|
||||
with self.assertRaises(ValueError):
|
||||
utils.extract_text({})
|
||||
|
||||
@ -103,14 +110,16 @@ class TestHTMLTextExtractor(SearxTestCase): # pylint: disable=missing-class-doc
|
||||
def test__init__(self):
|
||||
self.assertEqual(self.html_text_extractor.result, [])
|
||||
|
||||
def test_handle_charref(self):
|
||||
self.html_text_extractor.handle_charref('xF')
|
||||
self.assertIn('\x0f', self.html_text_extractor.result)
|
||||
self.html_text_extractor.handle_charref('XF')
|
||||
self.assertIn('\x0f', self.html_text_extractor.result)
|
||||
|
||||
self.html_text_extractor.handle_charref('97')
|
||||
self.assertIn('a', self.html_text_extractor.result)
|
||||
@parameterized.expand(
|
||||
[
|
||||
('xF', '\x0f'),
|
||||
('XF', '\x0f'),
|
||||
('97', 'a'),
|
||||
]
|
||||
)
|
||||
def test_handle_charref(self, charref: str, expected: str):
|
||||
self.html_text_extractor.handle_charref(charref)
|
||||
self.assertIn(expected, self.html_text_extractor.result)
|
||||
|
||||
def test_handle_entityref(self):
|
||||
entity = 'test'
|
||||
@ -191,7 +200,7 @@ class TestXPathUtils(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||
self.assertEqual(utils.eval_xpath_getindex(doc, '//i/text()', 1, default='something'), 'something')
|
||||
|
||||
# default is None
|
||||
self.assertEqual(utils.eval_xpath_getindex(doc, '//i/text()', 1, default=None), None)
|
||||
self.assertIsNone(utils.eval_xpath_getindex(doc, '//i/text()', 1, default=None))
|
||||
|
||||
# index not found
|
||||
with self.assertRaises(SearxEngineXPathException) as context:
|
||||
|
@ -2,52 +2,59 @@
|
||||
# pylint: disable=missing-module-docstring
|
||||
|
||||
import mock
|
||||
from parameterized.parameterized import parameterized
|
||||
from searx import webutils
|
||||
from tests import SearxTestCase
|
||||
|
||||
|
||||
class TestWebUtils(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||
def test_prettify_url(self):
|
||||
data = (
|
||||
|
||||
@parameterized.expand(
|
||||
[
|
||||
('https://searx.me/', 'https://searx.me/'),
|
||||
('https://searx.me/ű', 'https://searx.me/ű'),
|
||||
('https://searx.me/' + (100 * 'a'), 'https://searx.me/[...]aaaaaaaaaaaaaaaaa'),
|
||||
('https://searx.me/' + (100 * 'ű'), 'https://searx.me/[...]űűűűűűűűűűűűűűűűű'),
|
||||
]
|
||||
)
|
||||
|
||||
for test_url, expected in data:
|
||||
def test_prettify_url(self, test_url: str, expected: str):
|
||||
self.assertEqual(webutils.prettify_url(test_url, max_length=32), expected)
|
||||
|
||||
def test_highlight_content(self):
|
||||
self.assertEqual(webutils.highlight_content(0, None), None)
|
||||
self.assertEqual(webutils.highlight_content(None, None), None)
|
||||
self.assertEqual(webutils.highlight_content('', None), None)
|
||||
self.assertEqual(webutils.highlight_content(False, None), None)
|
||||
@parameterized.expand(
|
||||
[
|
||||
(0, None, None),
|
||||
(None, None, None),
|
||||
('', None, None),
|
||||
(False, None, None),
|
||||
]
|
||||
)
|
||||
def test_highlight_content_none(self, content, query, expected):
|
||||
self.assertEqual(webutils.highlight_content(content, query), expected)
|
||||
|
||||
contents = ['<html></html>not<']
|
||||
for content in contents:
|
||||
def test_highlight_content_same(self):
|
||||
content = '<html></html>not<'
|
||||
self.assertEqual(webutils.highlight_content(content, None), content)
|
||||
|
||||
content = 'a'
|
||||
query = 'test'
|
||||
self.assertEqual(webutils.highlight_content(content, query), 'a')
|
||||
query = 'a test'
|
||||
self.assertEqual(webutils.highlight_content(content, query), '<span class="highlight">a</span>')
|
||||
|
||||
# pylint: disable=line-too-long
|
||||
data = (
|
||||
@parameterized.expand(
|
||||
[
|
||||
('test', 'a', 'a'),
|
||||
('a test', 'a', '<span class="highlight">a</span>'),
|
||||
('" test "', 'a test string', 'a <span class="highlight">test</span> string'),
|
||||
('"a"', 'this is a test string', 'this is <span class="highlight">a</span> test string'),
|
||||
(
|
||||
'a test',
|
||||
'this is a test string that matches entire query',
|
||||
'this is <span class="highlight">a</span> <span class="highlight">test</span> string that matches entire query',
|
||||
'this is <span class="highlight">a</span>'
|
||||
' <span class="highlight">test</span>'
|
||||
' string that matches entire query',
|
||||
),
|
||||
(
|
||||
'this a test',
|
||||
'this is a string to test.',
|
||||
(
|
||||
'<span class="highlight">this</span> is <span class="highlight">a</span> string to <span class="highlight">test</span>.'
|
||||
'<span class="highlight">this</span>'
|
||||
' is <span class="highlight">a</span>'
|
||||
' string to <span class="highlight">test</span>.'
|
||||
),
|
||||
),
|
||||
(
|
||||
@ -65,8 +72,9 @@ class TestWebUtils(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||
'a string with class.',
|
||||
'<span class="highlight">a</span> string with <span class="highlight">class</span>.',
|
||||
),
|
||||
]
|
||||
)
|
||||
for query, content, expected in data:
|
||||
def test_highlight_content_equal(self, query: str, content: str, expected: str):
|
||||
self.assertEqual(webutils.highlight_content(content, query), expected)
|
||||
|
||||
|
||||
@ -76,7 +84,7 @@ class TestUnicodeWriter(SearxTestCase): # pylint: disable=missing-class-docstri
|
||||
|
||||
def test_write_row(self):
|
||||
row = [1, 2, 3]
|
||||
self.assertEqual(self.unicode_writer.writerow(row), None)
|
||||
self.assertIsNone(self.unicode_writer.writerow(row))
|
||||
|
||||
def test_write_rows(self):
|
||||
self.unicode_writer.writerow = mock.MagicMock()
|
||||
@ -86,13 +94,18 @@ class TestUnicodeWriter(SearxTestCase): # pylint: disable=missing-class-docstri
|
||||
|
||||
|
||||
class TestNewHmac(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||
def test_bytes(self):
|
||||
@parameterized.expand(
|
||||
[
|
||||
b'secret',
|
||||
1,
|
||||
]
|
||||
)
|
||||
def test_attribute_error(self, secret_key):
|
||||
data = b'http://example.com'
|
||||
with self.assertRaises(AttributeError):
|
||||
webutils.new_hmac(b'secret', data)
|
||||
|
||||
with self.assertRaises(AttributeError):
|
||||
webutils.new_hmac(1, data)
|
||||
webutils.new_hmac(secret_key, data)
|
||||
|
||||
def test_bytes(self):
|
||||
data = b'http://example.com'
|
||||
res = webutils.new_hmac('secret', data)
|
||||
self.assertEqual(res, '23e2baa2404012a5cc8e4a18b4aabf0dde4cb9b56f679ddc0fd6d7c24339d819')
|
||||
|
Loading…
Reference in New Issue
Block a user