From e75af78d7aacdd545825ebb2aee902276e9d55a6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Natal=20Ng=C3=A9tal?= Date: Wed, 5 Dec 2018 18:52:36 +0100 Subject: [PATCH 01/14] [Scripts] Force upgrade requirements. Force the upgrade of requirements with pip. At the moment, if the version in the requirement file is changed, there will be no effect, because the dependencies is already present. --- manage.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/manage.sh b/manage.sh index a352ccc6e..20d2cba3d 100755 --- a/manage.sh +++ b/manage.sh @@ -18,12 +18,12 @@ ACTION="$1" update_packages() { pip install --upgrade pip pip install --upgrade setuptools - pip install -r "$BASE_DIR/requirements.txt" + pip install -Ur "$BASE_DIR/requirements.txt" } update_dev_packages() { update_packages - pip install -r "$BASE_DIR/requirements-dev.txt" + pip install -Ur "$BASE_DIR/requirements-dev.txt" } install_geckodriver() { From 8850036ded3af2ba7455cef53a8134022e1b544d Mon Sep 17 00:00:00 2001 From: Adam Tauber Date: Sat, 21 Dec 2019 20:25:39 +0100 Subject: [PATCH 02/14] [fix] add explicit useragent header to requests - closes #1459 --- searx/engines/qwant.py | 1 + 1 file changed, 1 insertion(+) diff --git a/searx/engines/qwant.py b/searx/engines/qwant.py index de12955c6..54e9dafad 100644 --- a/searx/engines/qwant.py +++ b/searx/engines/qwant.py @@ -50,6 +50,7 @@ def request(query, params): language = match_language(params['language'], supported_languages, language_aliases) params['url'] += '&locale=' + language.replace('-', '_').lower() + params['headers']['User-Agent'] = 'Mozilla/5.0 (X11; Linux x86_64; rv:69.0) Gecko/20100101 Firefox/69.0' return params From e5305f886c0d7d5fb3f34d1fbd7f9a545c14c284 Mon Sep 17 00:00:00 2001 From: Adam Tauber Date: Sat, 21 Dec 2019 20:51:30 +0100 Subject: [PATCH 03/14] [fix] fetch extra search param of gigablast - fixes #1293 --- searx/engines/gigablast.py | 41 ++++++++++++++++++++++++++------------ 1 file changed, 28 insertions(+), 13 deletions(-) diff --git a/searx/engines/gigablast.py b/searx/engines/gigablast.py index a84f3f69d..2a5067bc3 100644 --- a/searx/engines/gigablast.py +++ b/searx/engines/gigablast.py @@ -14,6 +14,7 @@ import random from json import loads from time import time from lxml.html import fromstring +from searx.poolrequests import get from searx.url_utils import urlencode from searx.utils import eval_xpath @@ -31,13 +32,9 @@ search_string = 'search?{query}'\ '&c=main'\ '&s={offset}'\ '&format=json'\ - '&qh=0'\ - '&qlang={lang}'\ + '&langcountry={lang}'\ '&ff={safesearch}'\ - '&rxiec={rxieu}'\ - '&ulse={ulse}'\ - '&rand={rxikd}'\ - '&dbez={dbez}' + '&rand={rxikd}' # specific xpath variables results_xpath = '//response//result' url_xpath = './/url' @@ -46,9 +43,26 @@ content_xpath = './/sum' supported_languages_url = 'https://gigablast.com/search?&rxikd=1' +extra_param = '' # gigablast requires a random extra parameter +# which can be extracted from the source code of the search page + + +def parse_extra_param(text): + global extra_param + param_lines = [x for x in text.splitlines() if x.startswith('var url=') or x.startswith('url=url+')] + extra_param = '' + for l in param_lines: + extra_param += l.split("'")[1] + extra_param = extra_param.split('&')[-1] + + +def init(engine_settings=None): + parse_extra_param(get('http://gigablast.com/search?c=main&qlangcountry=en-us&q=south&s=10').text) + # do search-request def request(query, params): + print("EXTRAPARAM:", extra_param) offset = (params['pageno'] - 1) * number_of_results if params['language'] == 'all': @@ -67,14 +81,11 @@ def request(query, params): search_path = search_string.format(query=urlencode({'q': query}), offset=offset, number_of_results=number_of_results, - rxikd=int(time() * 1000), - rxieu=random.randint(1000000000, 9999999999), - ulse=random.randint(100000000, 999999999), lang=language, - safesearch=safesearch, - dbez=random.randint(100000000, 999999999)) + rxikd=int(time() * 1000), + safesearch=safesearch) - params['url'] = base_url + search_path + params['url'] = base_url + search_path + '&' + extra_param return params @@ -84,7 +95,11 @@ def response(resp): results = [] # parse results - response_json = loads(resp.text) + try: + response_json = loads(resp.text) + except: + parse_extra_param(resp.text) + return results for result in response_json['results']: # append result From f8713512bedf19d4495e0b9a0fd86679daaf7f79 Mon Sep 17 00:00:00 2001 From: Adam Tauber Date: Sat, 21 Dec 2019 20:56:38 +0100 Subject: [PATCH 04/14] [fix] convert byte query to string in osm engine - fixes #1220 --- searx/engines/openstreetmap.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/searx/engines/openstreetmap.py b/searx/engines/openstreetmap.py index 733ba6203..cec10a3c7 100644 --- a/searx/engines/openstreetmap.py +++ b/searx/engines/openstreetmap.py @@ -24,7 +24,7 @@ result_base_url = 'https://openstreetmap.org/{osm_type}/{osm_id}' # do search-request def request(query, params): - params['url'] = base_url + search_string.format(query=query) + params['url'] = base_url + search_string.format(query=query.decode('utf-8')) return params From 00512e36c133312eb74a82f6a2dec6d06214c42b Mon Sep 17 00:00:00 2001 From: Adam Tauber Date: Sat, 21 Dec 2019 21:01:08 +0100 Subject: [PATCH 05/14] [fix] handle empty response from wikipedia engine - closes #1114 --- searx/engines/wikipedia.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/searx/engines/wikipedia.py b/searx/engines/wikipedia.py index 4dae735d1..690da72fe 100644 --- a/searx/engines/wikipedia.py +++ b/searx/engines/wikipedia.py @@ -79,6 +79,9 @@ def response(resp): # wikipedia article's unique id # first valid id is assumed to be the requested article + if 'pages' not in search_result['query']: + return results + for article_id in search_result['query']['pages']: page = search_result['query']['pages'][article_id] if int(article_id) > 0: From fc457569f757dd10ff55393f472ea9ed49a42374 Mon Sep 17 00:00:00 2001 From: Adam Tauber Date: Sat, 21 Dec 2019 21:13:43 +0100 Subject: [PATCH 06/14] [fix] pep8 --- searx/engines/gigablast.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/searx/engines/gigablast.py b/searx/engines/gigablast.py index 2a5067bc3..5af593e36 100644 --- a/searx/engines/gigablast.py +++ b/searx/engines/gigablast.py @@ -43,7 +43,7 @@ content_xpath = './/sum' supported_languages_url = 'https://gigablast.com/search?&rxikd=1' -extra_param = '' # gigablast requires a random extra parameter +extra_param = '' # gigablast requires a random extra parameter # which can be extracted from the source code of the search page From 52ccaa7acc0eeeb452938a2a8758906430ece077 Mon Sep 17 00:00:00 2001 From: Adam Tauber Date: Sat, 21 Dec 2019 21:15:09 +0100 Subject: [PATCH 07/14] [mod] remove useless engine unit tests These tests are not able to detect engine errors if the upstream site changes. --- tests/unit/engines/__init__.py | 0 tests/unit/engines/pubmed.py | 37 -- tests/unit/engines/seedpeer_fixture.html | 110 ---- tests/unit/engines/test_acgsou.py | 78 --- tests/unit/engines/test_archlinux.py | 111 ---- tests/unit/engines/test_arxiv.py | 58 -- tests/unit/engines/test_base.py | 91 ---- tests/unit/engines/test_bing.py | 178 ------ tests/unit/engines/test_bing_images.py | 132 ----- tests/unit/engines/test_bing_news.py | 147 ----- tests/unit/engines/test_bing_videos.py | 72 --- tests/unit/engines/test_btdigg.py | 112 ---- tests/unit/engines/test_currency_convert.py | 56 -- tests/unit/engines/test_dailymotion.py | 112 ---- tests/unit/engines/test_deezer.py | 57 -- tests/unit/engines/test_deviantart.py | 24 - tests/unit/engines/test_digbt.py | 61 --- tests/unit/engines/test_digg.py | 16 - tests/unit/engines/test_doku.py | 79 --- tests/unit/engines/test_duckduckgo.py | 106 ---- .../engines/test_duckduckgo_definitions.py | 255 --------- tests/unit/engines/test_duckduckgo_images.py | 75 --- tests/unit/engines/test_duden.py | 47 -- tests/unit/engines/test_dummy.py | 26 - tests/unit/engines/test_faroo.py | 113 ---- tests/unit/engines/test_fdroid.py | 60 -- tests/unit/engines/test_flickr.py | 142 ----- tests/unit/engines/test_flickr_noapi.py | 357 ------------ tests/unit/engines/test_framalibre.py | 103 ---- tests/unit/engines/test_frinkiac.py | 50 -- tests/unit/engines/test_genius.py | 231 -------- tests/unit/engines/test_gigablast.py | 119 ---- tests/unit/engines/test_github.py | 61 --- tests/unit/engines/test_google.py | 194 ------- tests/unit/engines/test_google_images.py | 27 - tests/unit/engines/test_google_news.py | 102 ---- tests/unit/engines/test_google_videos.py | 79 --- tests/unit/engines/test_ina.py | 64 --- tests/unit/engines/test_kickass.py | 397 -------------- tests/unit/engines/test_mediawiki.py | 130 ----- tests/unit/engines/test_mixcloud.py | 67 --- tests/unit/engines/test_nyaa.py | 124 ----- tests/unit/engines/test_openstreetmap.py | 199 ------- tests/unit/engines/test_pdbe.py | 109 ---- tests/unit/engines/test_photon.py | 166 ------ tests/unit/engines/test_piratebay.py | 166 ------ tests/unit/engines/test_qwant.py | 339 ------------ tests/unit/engines/test_reddit.py | 71 --- tests/unit/engines/test_scanr_structures.py | 175 ------ tests/unit/engines/test_searchcode_code.py | 75 --- tests/unit/engines/test_searchcode_doc.py | 70 --- tests/unit/engines/test_seedpeer.py | 66 --- tests/unit/engines/test_soundcloud.py | 192 ------- tests/unit/engines/test_spotify.py | 124 ----- tests/unit/engines/test_stackoverflow.py | 106 ---- tests/unit/engines/test_startpage.py | 67 --- tests/unit/engines/test_tokyotoshokan.py | 110 ---- tests/unit/engines/test_torrentz.py | 87 --- tests/unit/engines/test_twitter.py | 502 ----------------- tests/unit/engines/test_unsplash.py | 38 -- tests/unit/engines/test_vimeo.py | 36 -- tests/unit/engines/test_wikidata.py | 514 ------------------ tests/unit/engines/test_wikipedia.py | 263 --------- tests/unit/engines/test_wolframalpha_api.py | 166 ------ tests/unit/engines/test_wolframalpha_noapi.py | 224 -------- tests/unit/engines/test_www1x.py | 14 - tests/unit/engines/test_yacy.py | 96 ---- tests/unit/engines/test_yahoo.py | 190 ------- tests/unit/engines/test_yahoo_news.py | 150 ----- tests/unit/engines/test_youtube_api.py | 111 ---- tests/unit/engines/test_youtube_noapi.py | 124 ----- tests/unit/engines/unsplash_fixture.json | 241 -------- 72 files changed, 9251 deletions(-) delete mode 100644 tests/unit/engines/__init__.py delete mode 100644 tests/unit/engines/pubmed.py delete mode 100644 tests/unit/engines/seedpeer_fixture.html delete mode 100644 tests/unit/engines/test_acgsou.py delete mode 100644 tests/unit/engines/test_archlinux.py delete mode 100644 tests/unit/engines/test_arxiv.py delete mode 100644 tests/unit/engines/test_base.py delete mode 100644 tests/unit/engines/test_bing.py delete mode 100644 tests/unit/engines/test_bing_images.py delete mode 100644 tests/unit/engines/test_bing_news.py delete mode 100644 tests/unit/engines/test_bing_videos.py delete mode 100644 tests/unit/engines/test_btdigg.py delete mode 100644 tests/unit/engines/test_currency_convert.py delete mode 100644 tests/unit/engines/test_dailymotion.py delete mode 100644 tests/unit/engines/test_deezer.py delete mode 100644 tests/unit/engines/test_deviantart.py delete mode 100644 tests/unit/engines/test_digbt.py delete mode 100644 tests/unit/engines/test_digg.py delete mode 100644 tests/unit/engines/test_doku.py delete mode 100644 tests/unit/engines/test_duckduckgo.py delete mode 100644 tests/unit/engines/test_duckduckgo_definitions.py delete mode 100644 tests/unit/engines/test_duckduckgo_images.py delete mode 100644 tests/unit/engines/test_duden.py delete mode 100644 tests/unit/engines/test_dummy.py delete mode 100644 tests/unit/engines/test_faroo.py delete mode 100644 tests/unit/engines/test_fdroid.py delete mode 100644 tests/unit/engines/test_flickr.py delete mode 100644 tests/unit/engines/test_flickr_noapi.py delete mode 100644 tests/unit/engines/test_framalibre.py delete mode 100644 tests/unit/engines/test_frinkiac.py delete mode 100644 tests/unit/engines/test_genius.py delete mode 100644 tests/unit/engines/test_gigablast.py delete mode 100644 tests/unit/engines/test_github.py delete mode 100644 tests/unit/engines/test_google.py delete mode 100644 tests/unit/engines/test_google_images.py delete mode 100644 tests/unit/engines/test_google_news.py delete mode 100644 tests/unit/engines/test_google_videos.py delete mode 100644 tests/unit/engines/test_ina.py delete mode 100644 tests/unit/engines/test_kickass.py delete mode 100644 tests/unit/engines/test_mediawiki.py delete mode 100644 tests/unit/engines/test_mixcloud.py delete mode 100644 tests/unit/engines/test_nyaa.py delete mode 100644 tests/unit/engines/test_openstreetmap.py delete mode 100644 tests/unit/engines/test_pdbe.py delete mode 100644 tests/unit/engines/test_photon.py delete mode 100644 tests/unit/engines/test_piratebay.py delete mode 100644 tests/unit/engines/test_qwant.py delete mode 100644 tests/unit/engines/test_reddit.py delete mode 100644 tests/unit/engines/test_scanr_structures.py delete mode 100644 tests/unit/engines/test_searchcode_code.py delete mode 100644 tests/unit/engines/test_searchcode_doc.py delete mode 100644 tests/unit/engines/test_seedpeer.py delete mode 100644 tests/unit/engines/test_soundcloud.py delete mode 100644 tests/unit/engines/test_spotify.py delete mode 100644 tests/unit/engines/test_stackoverflow.py delete mode 100644 tests/unit/engines/test_startpage.py delete mode 100644 tests/unit/engines/test_tokyotoshokan.py delete mode 100644 tests/unit/engines/test_torrentz.py delete mode 100644 tests/unit/engines/test_twitter.py delete mode 100644 tests/unit/engines/test_unsplash.py delete mode 100644 tests/unit/engines/test_vimeo.py delete mode 100644 tests/unit/engines/test_wikidata.py delete mode 100644 tests/unit/engines/test_wikipedia.py delete mode 100644 tests/unit/engines/test_wolframalpha_api.py delete mode 100644 tests/unit/engines/test_wolframalpha_noapi.py delete mode 100644 tests/unit/engines/test_www1x.py delete mode 100644 tests/unit/engines/test_yacy.py delete mode 100644 tests/unit/engines/test_yahoo.py delete mode 100644 tests/unit/engines/test_yahoo_news.py delete mode 100644 tests/unit/engines/test_youtube_api.py delete mode 100644 tests/unit/engines/test_youtube_noapi.py delete mode 100644 tests/unit/engines/unsplash_fixture.json diff --git a/tests/unit/engines/__init__.py b/tests/unit/engines/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/unit/engines/pubmed.py b/tests/unit/engines/pubmed.py deleted file mode 100644 index 370efe067..000000000 --- a/tests/unit/engines/pubmed.py +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -from searx.engines import pubmed -from searx.testing import SearxTestCase - - -class TestPubmedEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - params = pubmed.request(query, dicto) - self.assertIn('url', params) - self.assertIn('eutils.ncbi.nlm.nih.gov/', params['url']) - self.assertIn('term', params['url']) - - def test_response(self): - self.assertRaises(AttributeError, pubmed.response, None) - self.assertRaises(AttributeError, pubmed.response, []) - self.assertRaises(AttributeError, pubmed.response, '') - self.assertRaises(AttributeError, pubmed.response, '[]') - - response = mock.Mock(text='') - self.assertEqual(pubmed.response(response), []) - - xml_mock = """110 -1 - -""" - - response = mock.Mock(text=xml_mock.encode('utf-8')) - results = pubmed.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['content'], 'No abstract is available for this publication.') diff --git a/tests/unit/engines/seedpeer_fixture.html b/tests/unit/engines/seedpeer_fixture.html deleted file mode 100644 index 28207bfad..000000000 --- a/tests/unit/engines/seedpeer_fixture.html +++ /dev/null @@ -1,110 +0,0 @@ - - - - - - - - - - \ No newline at end of file diff --git a/tests/unit/engines/test_acgsou.py b/tests/unit/engines/test_acgsou.py deleted file mode 100644 index c01acf5de..000000000 --- a/tests/unit/engines/test_acgsou.py +++ /dev/null @@ -1,78 +0,0 @@ -# coding=utf-8 -from collections import defaultdict -import mock -from searx.engines import acgsou -from searx.testing import SearxTestCase - - -class TestAcgsouEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dic = defaultdict(dict) - dic['pageno'] = 1 - params = acgsou.request(query, dic) - self.assertTrue('url' in params) - self.assertTrue(query in params['url']) - self.assertTrue('acgsou.com' in params['url']) - - def test_response(self): - resp = mock.Mock(text='') - self.assertEqual(acgsou.response(resp), []) - - html = u""" - - - - - - - - - - - - - - - - - - - - - - - - - - -
testtesttesttesttesttesttesttest
datetestcategory テスト - torrentname テスト - 1MB - - 29 - - - - 211 - - - - 168 - - user
- - """ - - resp = mock.Mock(text=html) - results = acgsou.response(resp) - - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - - r = results[0] - self.assertEqual(r['url'], 'http://www.acgsou.com/show-torrentid.html') - self.assertEqual(r['content'], u'Category: "testcategory テスト".') - self.assertEqual(r['title'], u'torrentname テスト') - self.assertEqual(r['filesize'], 1048576) diff --git a/tests/unit/engines/test_archlinux.py b/tests/unit/engines/test_archlinux.py deleted file mode 100644 index 062f023bd..000000000 --- a/tests/unit/engines/test_archlinux.py +++ /dev/null @@ -1,111 +0,0 @@ -from collections import defaultdict -import mock -from searx.engines import archlinux -from searx.testing import SearxTestCase - -domains = { - 'all': 'https://wiki.archlinux.org', - 'de': 'https://wiki.archlinux.de', - 'fr': 'https://wiki.archlinux.fr', - 'ja': 'https://wiki.archlinuxjp.org', - 'ro': 'http://wiki.archlinux.ro', - 'tr': 'http://archtr.org/wiki' -} - - -class TestArchLinuxEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dic = defaultdict(dict) - dic['pageno'] = 1 - dic['language'] = 'en-US' - params = archlinux.request(query, dic) - self.assertTrue('url' in params) - self.assertTrue(query in params['url']) - self.assertTrue('wiki.archlinux.org' in params['url']) - - for lang, name in archlinux.main_langs: - dic['language'] = lang - params = archlinux.request(query, dic) - self.assertTrue(name in params['url']) - - for lang, domain in domains.items(): - dic['language'] = lang - params = archlinux.request(query, dic) - self.assertTrue(domain in params['url']) - - def test_response(self): - response = mock.Mock(text='', - search_params={'language': 'en_US'}) - self.assertEqual(archlinux.response(response), []) - - html = """ -
    -
  • -
    - ATI -
    -
    - Lorem ipsum dolor sit amet -
    -
    - 30 KB (4,630 words) - 19:04, 17 March 2016
    -
  • -
  • - -
    - CPUs with AMDs instruction set "AMD64" -
    -
    - 17 KB (2,722 words) - 20:13, 21 March 2016 -
    -
  • -
  • - -
    - ondemand for AMD and older Intel CPU -
    -
    - 15 KB (2,319 words) - 23:46, 16 March 2016 -
    -
  • -
- """ - - expected = [ - { - 'title': 'ATI', - 'url': 'https://wiki.archlinux.org/index.php/ATI' - }, - { - 'title': 'Frequently asked questions', - 'url': 'https://wiki.archlinux.org/index.php/Frequently_asked_questions' - }, - { - 'title': 'CPU frequency scaling', - 'url': 'https://wiki.archlinux.org/index.php/CPU_frequency_scaling' - } - ] - - response = mock.Mock(text=html) - response.search_params = { - 'language': 'en_US' - } - results = archlinux.response(response) - - self.assertEqual(type(results), list) - self.assertEqual(len(results), len(expected)) - - i = 0 - for exp in expected: - res = results[i] - i += 1 - for key, value in exp.items(): - self.assertEqual(res[key], value) diff --git a/tests/unit/engines/test_arxiv.py b/tests/unit/engines/test_arxiv.py deleted file mode 100644 index 83c4f8595..000000000 --- a/tests/unit/engines/test_arxiv.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -from searx.engines import arxiv -from searx.testing import SearxTestCase - - -class TestBaseEngine(SearxTestCase): - - def test_request(self): - query = 'test_query'.encode('utf-8') - dicto = defaultdict(dict) - dicto['pageno'] = 1 - params = arxiv.request(query, dicto) - self.assertIn('url', params) - self.assertIn('export.arxiv.org/api/', params['url']) - - def test_response(self): - self.assertRaises(AttributeError, arxiv.response, None) - self.assertRaises(AttributeError, arxiv.response, []) - self.assertRaises(AttributeError, arxiv.response, '') - self.assertRaises(AttributeError, arxiv.response, '[]') - - response = mock.Mock(content=b''' -''') - self.assertEqual(arxiv.response(response), []) - - xml_mock = b''' - - ArXiv Query: search_query=all:test_query&id_list=&start=0&max_results=1 - http://arxiv.org/api/1 - 2000-01-21T00:00:00-01:00 - 1 - 0 - 1 - - http://arxiv.org/1 - 2000-01-01T00:00:01Z - 2000-01-01T00:00:01Z - Mathematical proof. - Mathematical formula. - - A. B. - - - - - - - -''' - - response = mock.Mock(content=xml_mock) - results = arxiv.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'Mathematical proof.') - self.assertEqual(results[0]['content'], 'Mathematical formula.') diff --git a/tests/unit/engines/test_base.py b/tests/unit/engines/test_base.py deleted file mode 100644 index b5da5bde7..000000000 --- a/tests/unit/engines/test_base.py +++ /dev/null @@ -1,91 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -from searx.engines import base -from searx.testing import SearxTestCase - - -class TestBaseEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - params = base.request(query, dicto) - self.assertIn('url', params) - self.assertIn('base-search.net', params['url']) - - def test_response(self): - self.assertRaises(AttributeError, base.response, None) - self.assertRaises(AttributeError, base.response, []) - self.assertRaises(AttributeError, base.response, '') - self.assertRaises(AttributeError, base.response, '[]') - - response = mock.Mock(content=b'') - self.assertEqual(base.response(response), []) - - xml_mock = b""" - - - 0 - 1 - - - - 2000-01-01T01:01:01Z - 1 - cna - us - ftciteseerx - CiteSeerX - Science and more - - Someone - - - Someone - - - Science and more - - Science, and even more. - - The neighbour - - 2001 - 2001 - - text - - - 1 - - - application/pdf - - - application/pdf - - - http://example.org/ - - http://example.org - http://example.org - - en - - Under the example.org licence - 1 - - eng - - - -""" - - response = mock.Mock(content=xml_mock) - results = base.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'Science and more') - self.assertEqual(results[0]['content'], 'Science, and even more.') diff --git a/tests/unit/engines/test_bing.py b/tests/unit/engines/test_bing.py deleted file mode 100644 index 387034735..000000000 --- a/tests/unit/engines/test_bing.py +++ /dev/null @@ -1,178 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -from searx.engines import bing -from searx.testing import SearxTestCase - - -class TestBingEngine(SearxTestCase): - - def test_request(self): - bing.supported_languages = ['en', 'fr', 'zh-CHS', 'zh-CHT', 'pt-PT', 'pt-BR'] - query = u'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - dicto['language'] = 'fr-FR' - params = bing.request(query.encode('utf-8'), dicto) - self.assertTrue('url' in params) - self.assertTrue(query in params['url']) - self.assertTrue('language%3AFR' in params['url']) - self.assertTrue('bing.com' in params['url']) - - dicto['language'] = 'all' - params = bing.request(query.encode('utf-8'), dicto) - self.assertTrue('language' in params['url']) - - def test_response(self): - dicto = defaultdict(dict) - dicto['pageno'] = 1 - dicto['language'] = 'fr-FR' - self.assertRaises(AttributeError, bing.response, None) - self.assertRaises(AttributeError, bing.response, []) - self.assertRaises(AttributeError, bing.response, '') - self.assertRaises(AttributeError, bing.response, '[]') - - response = mock.Mock(text='') - response.search_params = dicto - self.assertEqual(bing.response(response), []) - - response = mock.Mock(text='') - response.search_params = dicto - self.assertEqual(bing.response(response), []) - - html = """ -
-
- 23 900 000 résultats -
-
    -
    -
    - -
    this.meta.com - - - - -
    -

    This should be the content.

    -
    -
    -
-
- """ - response = mock.Mock(text=html) - response.search_params = dicto - results = bing.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 2) - self.assertEqual(results[0]['title'], 'This should be the title') - self.assertEqual(results[0]['url'], 'http://this.should.be.the.link/') - self.assertEqual(results[0]['content'], 'This should be the content.') - self.assertEqual(results[-1]['number_of_results'], 23900000) - - html = """ -
-
- 9-18 résultats sur 23 900 000 -
-
    -
  1. -
    - -
    this.meta.com - - - - -
    -

    This should be the content.

    -
    -
  2. -
-
- """ - dicto['pageno'] = 2 - response = mock.Mock(text=html) - response.search_params = dicto - results = bing.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 2) - self.assertEqual(results[0]['title'], 'This should be the title') - self.assertEqual(results[0]['url'], 'http://this.should.be.the.link/') - self.assertEqual(results[0]['content'], 'This should be the content.') - self.assertEqual(results[-1]['number_of_results'], 23900000) - - html = """ -
-
- 23 900 000 résultats -
-
    -
  1. -
    - -
    this.meta.com - - - - -
    -

    This should be the content.

    -
    -
  2. -
-
- """ - dicto['pageno'] = 33900000 - response = mock.Mock(text=html) - response.search_params = dicto - results = bing.response(response) - self.assertEqual(bing.response(response), []) - - def test_fetch_supported_languages(self): - html = """""" - response = mock.Mock(text=html) - results = bing._fetch_supported_languages(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) - - html = """ - - -
-
-
-
-
-
-
-
-
-
-
- - - """ - response = mock.Mock(text=html) - languages = bing._fetch_supported_languages(response) - self.assertEqual(type(languages), list) - self.assertEqual(len(languages), 3) - self.assertIn('es', languages) - self.assertIn('pt-BR', languages) - self.assertIn('pt-PT', languages) diff --git a/tests/unit/engines/test_bing_images.py b/tests/unit/engines/test_bing_images.py deleted file mode 100644 index a4efcab58..000000000 --- a/tests/unit/engines/test_bing_images.py +++ /dev/null @@ -1,132 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -from searx.engines import bing_images -from searx.testing import SearxTestCase - - -class TestBingImagesEngine(SearxTestCase): - - def test_request(self): - bing_images.supported_languages = ['fr-FR', 'en-US'] - bing_images.language_aliases = {} - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - dicto['language'] = 'fr-FR' - dicto['safesearch'] = 1 - dicto['time_range'] = '' - params = bing_images.request(query, dicto) - self.assertTrue('url' in params) - self.assertTrue(query in params['url']) - self.assertTrue('bing.com' in params['url']) - self.assertTrue('SRCHHPGUSR' in params['cookies']) - self.assertTrue('DEMOTE' in params['cookies']['SRCHHPGUSR']) - self.assertTrue('_EDGE_S' in params['cookies']) - self.assertTrue('fr-fr' in params['cookies']['_EDGE_S']) - - dicto['language'] = 'fr' - params = bing_images.request(query, dicto) - self.assertTrue('_EDGE_S' in params['cookies']) - self.assertTrue('fr-fr' in params['cookies']['_EDGE_S']) - - dicto['language'] = 'all' - params = bing_images.request(query, dicto) - self.assertTrue('_EDGE_S' in params['cookies']) - self.assertTrue('en-us' in params['cookies']['_EDGE_S']) - - def test_response(self): - self.assertRaises(AttributeError, bing_images.response, None) - self.assertRaises(AttributeError, bing_images.response, []) - self.assertRaises(AttributeError, bing_images.response, '') - self.assertRaises(AttributeError, bing_images.response, '[]') - - response = mock.Mock(text='') - self.assertEqual(bing_images.response(response), []) - - response = mock.Mock(text='') - self.assertEqual(bing_images.response(response), []) - - html = """ -
- - -
- """ - html = html.replace('\r\n', '').replace('\n', '').replace('\r', '') - response = mock.Mock(text=html) - results = bing_images.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 3) - self.assertEqual(results[0]['title'], 'Page 1 title') - self.assertEqual(results[0]['url'], 'page_url') - self.assertEqual(results[0]['content'], '') - self.assertEqual(results[0]['thumbnail_src'], 'thumb_url') - self.assertEqual(results[0]['img_src'], 'img_url') - self.assertEqual(results[0]['img_format'], '1 x 1 - jpeg') - self.assertEqual(results[0]['source'], '1.example.org') - - def test_fetch_supported_languages(self): - html = """ -
-
- - -
-
- """ - response = mock.Mock(text=html) - languages = list(bing_images._fetch_supported_languages(response)) - self.assertEqual(len(languages), 3) - self.assertIn('de-DE', languages) - self.assertIn('no-NO', languages) - self.assertIn('es-AR', languages) diff --git a/tests/unit/engines/test_bing_news.py b/tests/unit/engines/test_bing_news.py deleted file mode 100644 index 1155e79c4..000000000 --- a/tests/unit/engines/test_bing_news.py +++ /dev/null @@ -1,147 +0,0 @@ -from collections import defaultdict -import mock -from searx.engines import bing_news -from searx.testing import SearxTestCase -import lxml - - -class TestBingNewsEngine(SearxTestCase): - - def test_request(self): - bing_news.supported_languages = ['en', 'fr'] - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - dicto['language'] = 'fr-FR' - dicto['time_range'] = '' - params = bing_news.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('bing.com', params['url']) - self.assertIn('fr', params['url']) - - dicto['language'] = 'all' - params = bing_news.request(query, dicto) - self.assertIn('en', params['url']) - - def test_no_url_in_request_year_time_range(self): - dicto = defaultdict(dict) - query = 'test_query' - dicto['time_range'] = 'year' - params = bing_news.request(query, dicto) - self.assertEqual({}, params['url']) - - def test_response(self): - self.assertRaises(AttributeError, bing_news.response, None) - self.assertRaises(AttributeError, bing_news.response, []) - self.assertRaises(AttributeError, bing_news.response, '') - self.assertRaises(AttributeError, bing_news.response, '[]') - - response = mock.Mock(content='') - self.assertEqual(bing_news.response(response), []) - - response = mock.Mock(content='') - self.assertEqual(bing_news.response(response), []) - - html = """ - - - python - Bing News - https://www.bing.com:443/news/search?q=python&setmkt=en-US&first=1&format=RSS - Search results - - http://10.53.64.9/rsslogo.gif - test - https://www.bing.com:443/news/search?q=test&setmkt=en-US&first=1&format=RSS - - Copyright - - Title - https://www.bing.com/news/apiclick.aspx?ref=FexRss&aid=&tid=c237eccc50bd4758b106a5e3c94fce09&url=http%3a%2f%2furl.of.article%2f&c=xxxxxxxxx&mkt=en-us - Article Content - Tue, 02 Jun 2015 13:37:00 GMT - Infoworld - http://a1.bing4.com/th?id=ON.13371337133713371337133713371337&pid=News - w={0}&h={1}&c=7 - - 620 - 413 - - - Another Title - https://www.bing.com/news/apiclick.aspx?ref=FexRss&aid=&tid=c237eccc50bd4758b106a5e3c94fce09&url=http%3a%2f%2fanother.url.of.article%2f&c=xxxxxxxxx&mkt=en-us - Another Article Content - Tue, 02 Jun 2015 13:37:00 GMT - - -""" # noqa - response = mock.Mock(content=html.encode('utf-8')) - results = bing_news.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 2) - self.assertEqual(results[0]['title'], 'Title') - self.assertEqual(results[0]['url'], 'http://url.of.article/') - self.assertEqual(results[0]['content'], 'Article Content') - self.assertEqual(results[0]['img_src'], 'https://www.bing.com/th?id=ON.13371337133713371337133713371337') - self.assertEqual(results[1]['title'], 'Another Title') - self.assertEqual(results[1]['url'], 'http://another.url.of.article/') - self.assertEqual(results[1]['content'], 'Another Article Content') - self.assertNotIn('img_src', results[1]) - - html = """ - - - python - Bing News - https://www.bing.com:443/news/search?q=python&setmkt=en-US&first=1&format=RSS - Search results - - http://10.53.64.9/rsslogo.gif - test - https://www.bing.com:443/news/search?q=test&setmkt=en-US&first=1&format=RSS - - Copyright - - Title - http://another.url.of.article/ - Article Content - garbage - Infoworld - http://another.bing.com/image - w={0}&h={1}&c=7 - - 620 - 413 - - -""" # noqa - response = mock.Mock(content=html.encode('utf-8')) - results = bing_news.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'Title') - self.assertEqual(results[0]['url'], 'http://another.url.of.article/') - self.assertEqual(results[0]['content'], 'Article Content') - self.assertEqual(results[0]['img_src'], 'http://another.bing.com/image') - - html = """ - - - python - Bing News - https://www.bing.com:443/news/search?q=python&setmkt=en-US&first=1&format=RSS - Search results - - http://10.53.64.9/rsslogo.gif - test - https://www.bing.com:443/news/search?q=test&setmkt=en-US&first=1&format=RSS - - -""" # noqa - - response = mock.Mock(content=html.encode('utf-8')) - results = bing_news.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) - - html = """gabarge""" - response = mock.Mock(content=html.encode('utf-8')) - self.assertRaises(lxml.etree.XMLSyntaxError, bing_news.response, response) diff --git a/tests/unit/engines/test_bing_videos.py b/tests/unit/engines/test_bing_videos.py deleted file mode 100644 index 5e171eb53..000000000 --- a/tests/unit/engines/test_bing_videos.py +++ /dev/null @@ -1,72 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -from searx.engines import bing_videos -from searx.testing import SearxTestCase - - -class TestBingVideosEngine(SearxTestCase): - - def test_request(self): - bing_videos.supported_languages = ['fr-FR', 'en-US'] - bing_videos.language_aliases = {} - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - dicto['language'] = 'fr-FR' - dicto['safesearch'] = 0 - dicto['time_range'] = '' - params = bing_videos.request(query, dicto) - self.assertTrue('url' in params) - self.assertTrue(query in params['url']) - self.assertTrue('bing.com' in params['url']) - self.assertTrue('SRCHHPGUSR' in params['cookies']) - self.assertTrue('OFF' in params['cookies']['SRCHHPGUSR']) - self.assertTrue('_EDGE_S' in params['cookies']) - self.assertTrue('fr-fr' in params['cookies']['_EDGE_S']) - - dicto['pageno'] = 2 - dicto['time_range'] = 'day' - dicto['safesearch'] = 2 - params = bing_videos.request(query, dicto) - self.assertTrue('first=29' in params['url']) - self.assertTrue('1440' in params['url']) - self.assertIn('SRCHHPGUSR', params['cookies']) - self.assertTrue('STRICT' in params['cookies']['SRCHHPGUSR']) - - def test_response(self): - self.assertRaises(AttributeError, bing_videos.response, None) - self.assertRaises(AttributeError, bing_videos.response, []) - self.assertRaises(AttributeError, bing_videos.response, '') - self.assertRaises(AttributeError, bing_videos.response, '[]') - - response = mock.Mock(text='') - self.assertEqual(bing_videos.response(response), []) - - response = mock.Mock(text='') - self.assertEqual(bing_videos.response(response), []) - - html = """ - - """ - response = mock.Mock(text=html) - results = bing_videos.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'Title 1') - self.assertEqual(results[0]['url'], 'https://www.example.com/watch?v=DEADBEEF') - self.assertEqual(results[0]['content'], '01:11 - 100 views - 1 year ago - ExampleTube - Channel 1') - self.assertEqual(results[0]['thumbnail'], 'https://www.bing.com/th?id=OVP.BINGTHUMB1') diff --git a/tests/unit/engines/test_btdigg.py b/tests/unit/engines/test_btdigg.py deleted file mode 100644 index 45ddaa6e3..000000000 --- a/tests/unit/engines/test_btdigg.py +++ /dev/null @@ -1,112 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -from searx.engines import btdigg -from searx.testing import SearxTestCase - - -class TestBtdiggEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 0 - params = btdigg.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('btdig.com', params['url']) - - def test_response(self): - self.assertRaises(AttributeError, btdigg.response, None) - self.assertRaises(AttributeError, btdigg.response, []) - self.assertRaises(AttributeError, btdigg.response, '') - self.assertRaises(AttributeError, btdigg.response, '[]') - - response = mock.Mock(text='') - self.assertEqual(btdigg.response(response), []) - - html = u""" -
-
- -
-
-
- 4217 files 1 GBfound 3 years ago -
-
-
-
-
- -
- found 3 years ago -
-
-
-
-
3.9GBdeLibrosByHuasoFromHell(3de4)

-
Libros H-Z

-
H

H.H. Hollis - El truco de la espada-pdf.zip
17 KB
-
Hagakure - El Libro del Samurai-pdf.zip
95 KB
-
Hamsun, Knut (1859-1952)

-
Hamsun, Knut - Hambre-pdf.zip
786 KB
- -
-
-
- """ - response = mock.Mock(text=html.encode('utf-8')) - results = btdigg.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], '3.9GBdeLibrosByHuasoFromHell(3de4)') - self.assertEqual(results[0]['url'], - 'http://btdig.com/a72f35b7ee3a10928f02bb799e40ae5db701ed1c/pdf?q=pdf&p=1&order=0') - self.assertEqual(results[0]['content'], - '3.9GBdeLibrosByHuasoFromHell(3de4) | ' + - 'Libros H-Z | ' + - 'H H.H. Hollis - El truco de la espada-pdf.zip17 KB | ' + - 'Hagakure - El Libro del Samurai-pdf.zip95 KB | ' + - 'Hamsun, Knut (1859-1952) | Hamsun, Knut - Hambre-pdf.zip786 KB | ' + - '4214 hidden files1 GB') - self.assertEqual(results[0]['filesize'], 1 * 1024 * 1024 * 1024) - self.assertEqual(results[0]['files'], 4217) - self.assertEqual(results[0]['magnetlink'], - 'magnet:?xt=urn:btih:a72f35b7ee3a10928f02bb799e40ae5db701ed1c&dn=3.9GBdeLibrosBy...') - - html = """ -
- -
- """ - response = mock.Mock(text=html.encode('utf-8')) - results = btdigg.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) diff --git a/tests/unit/engines/test_currency_convert.py b/tests/unit/engines/test_currency_convert.py deleted file mode 100644 index fec194103..000000000 --- a/tests/unit/engines/test_currency_convert.py +++ /dev/null @@ -1,56 +0,0 @@ -from collections import defaultdict -from datetime import datetime -import mock -from searx.engines import currency_convert -from searx.testing import SearxTestCase - - -class TestCurrencyConvertEngine(SearxTestCase): - - def test_request(self): - query = b'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - params = currency_convert.request(query, dicto) - self.assertNotIn('url', params) - - query = b'convert 10 Pound Sterlings to United States Dollars' - params = currency_convert.request(query, dicto) - self.assertIn('url', params) - self.assertIn('duckduckgo.com', params['url']) - self.assertIn('GBP', params['url']) - self.assertIn('USD', params['url']) - - def test_response(self): - dicto = defaultdict(dict) - dicto['amount'] = float(10) - dicto['from'] = "GBP" - dicto['to'] = "USD" - dicto['from_name'] = "pound sterling" - dicto['to_name'] = "United States dollar" - response = mock.Mock(text='a,b,c,d', search_params=dicto) - self.assertEqual(currency_convert.response(response), []) - body = """ddg_spice_currency( - { - "conversion":{ - "converted-amount": "0.5" - }, - "topConversions":[ - { - }, - { - } - ] - } - ); - """ - response = mock.Mock(text=body, search_params=dicto) - results = currency_convert.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['answer'], '10.0 GBP = 5.0 USD, 1 GBP (pound sterling)' + - ' = 0.5 USD (United States dollar)') - - target_url = 'https://duckduckgo.com/js/spice/currency/1/{}/{}'.format( - dicto['from'], dicto['to']) - self.assertEqual(results[0]['url'], target_url) diff --git a/tests/unit/engines/test_dailymotion.py b/tests/unit/engines/test_dailymotion.py deleted file mode 100644 index ad7f3d283..000000000 --- a/tests/unit/engines/test_dailymotion.py +++ /dev/null @@ -1,112 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -from searx.engines import dailymotion -from searx.testing import SearxTestCase - - -class TestDailymotionEngine(SearxTestCase): - - def test_request(self): - dailymotion.supported_languages = ['en', 'fr'] - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 0 - dicto['language'] = 'fr-FR' - params = dailymotion.request(query, dicto) - self.assertTrue('url' in params) - self.assertTrue(query in params['url']) - self.assertTrue('dailymotion.com' in params['url']) - self.assertTrue('fr' in params['url']) - - dicto['language'] = 'all' - params = dailymotion.request(query, dicto) - self.assertTrue('en' in params['url']) - - def test_response(self): - self.assertRaises(AttributeError, dailymotion.response, None) - self.assertRaises(AttributeError, dailymotion.response, []) - self.assertRaises(AttributeError, dailymotion.response, '') - self.assertRaises(AttributeError, dailymotion.response, '[]') - - response = mock.Mock(text='{}') - self.assertEqual(dailymotion.response(response), []) - - response = mock.Mock(text='{"data": []}') - self.assertEqual(dailymotion.response(response), []) - - json = """ - { - "page": 1, - "limit": 5, - "explicit": false, - "total": 289487, - "has_more": true, - "list": [ - { - "created_time": 1422173451, - "title": "Title", - "description": "Description", - "duration": 81, - "url": "http://www.url", - "thumbnail_360_url": "http://thumbnail", - "id": "x2fit7q" - } - ] - } - """ - response = mock.Mock(text=json) - results = dailymotion.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'Title') - self.assertEqual(results[0]['url'], 'http://www.url') - self.assertEqual(results[0]['content'], 'Description') - self.assertIn('x2fit7q', results[0]['embedded']) - - json = r""" - {"toto":[ - {"id":200,"name":"Artist Name", - "link":"http:\/\/www.dailymotion.com\/artist\/1217","type":"artist"} - ]} - """ - response = mock.Mock(text=json) - results = dailymotion.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) - - def test_fetch_supported_languages(self): - json = r""" - {"list":[{"code":"af","name":"Afrikaans","native_name":"Afrikaans", - "localized_name":"Afrikaans","display_name":"Afrikaans"}, - {"code":"ar","name":"Arabic","native_name":"\u0627\u0644\u0639\u0631\u0628\u064a\u0629", - "localized_name":"Arabic","display_name":"Arabic"}, - {"code":"la","name":"Latin","native_name":null, - "localized_name":"Latin","display_name":"Latin"} - ]} - """ - response = mock.Mock(text=json) - languages = dailymotion._fetch_supported_languages(response) - self.assertEqual(type(languages), dict) - self.assertEqual(len(languages), 3) - self.assertIn('af', languages) - self.assertIn('ar', languages) - self.assertIn('la', languages) - - self.assertEqual(type(languages['af']), dict) - self.assertEqual(type(languages['ar']), dict) - self.assertEqual(type(languages['la']), dict) - - self.assertIn('name', languages['af']) - self.assertIn('name', languages['ar']) - self.assertNotIn('name', languages['la']) - - self.assertIn('english_name', languages['af']) - self.assertIn('english_name', languages['ar']) - self.assertIn('english_name', languages['la']) - - self.assertEqual(languages['af']['name'], 'Afrikaans') - self.assertEqual(languages['af']['english_name'], 'Afrikaans') - self.assertEqual(languages['ar']['name'], u'العربية') - self.assertEqual(languages['ar']['english_name'], 'Arabic') - self.assertEqual(languages['la']['english_name'], 'Latin') diff --git a/tests/unit/engines/test_deezer.py b/tests/unit/engines/test_deezer.py deleted file mode 100644 index 5b9f55c33..000000000 --- a/tests/unit/engines/test_deezer.py +++ /dev/null @@ -1,57 +0,0 @@ -from collections import defaultdict -import mock -from searx.engines import deezer -from searx.testing import SearxTestCase - - -class TestDeezerEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 0 - params = deezer.request(query, dicto) - self.assertTrue('url' in params) - self.assertTrue(query in params['url']) - self.assertTrue('deezer.com' in params['url']) - - def test_response(self): - self.assertRaises(AttributeError, deezer.response, None) - self.assertRaises(AttributeError, deezer.response, []) - self.assertRaises(AttributeError, deezer.response, '') - self.assertRaises(AttributeError, deezer.response, '[]') - - response = mock.Mock(text='{}') - self.assertEqual(deezer.response(response), []) - - response = mock.Mock(text='{"data": []}') - self.assertEqual(deezer.response(response), []) - - json = r""" - {"data":[ - {"id":100, "title":"Title of track", - "link":"https:\/\/www.deezer.com\/track\/1094042","duration":232, - "artist":{"id":200,"name":"Artist Name", - "link":"https:\/\/www.deezer.com\/artist\/1217","type":"artist"}, - "album":{"id":118106,"title":"Album Title","type":"album"},"type":"track"} - ]} - """ - response = mock.Mock(text=json) - results = deezer.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'Title of track') - self.assertEqual(results[0]['url'], 'https://www.deezer.com/track/1094042') - self.assertEqual(results[0]['content'], 'Artist Name - Album Title - Title of track') - self.assertTrue('100' in results[0]['embedded']) - - json = r""" - {"data":[ - {"id":200,"name":"Artist Name", - "link":"https:\/\/www.deezer.com\/artist\/1217","type":"artist"} - ]} - """ - response = mock.Mock(text=json) - results = deezer.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) diff --git a/tests/unit/engines/test_deviantart.py b/tests/unit/engines/test_deviantart.py deleted file mode 100644 index a31151037..000000000 --- a/tests/unit/engines/test_deviantart.py +++ /dev/null @@ -1,24 +0,0 @@ -from collections import defaultdict -import mock -from searx.engines import deviantart -from searx.testing import SearxTestCase - - -class TestDeviantartEngine(SearxTestCase): - - def test_request(self): - dicto = defaultdict(dict) - query = 'test_query' - dicto['pageno'] = 0 - dicto['time_range'] = '' - params = deviantart.request(query, dicto) - self.assertTrue('url' in params) - self.assertTrue(query in params['url']) - self.assertTrue('deviantart.com' in params['url']) - - def test_no_url_in_request_year_time_range(self): - dicto = defaultdict(dict) - query = 'test_query' - dicto['time_range'] = 'year' - params = deviantart.request(query, dicto) - self.assertEqual({}, params['url']) diff --git a/tests/unit/engines/test_digbt.py b/tests/unit/engines/test_digbt.py deleted file mode 100644 index 31c2ecabb..000000000 --- a/tests/unit/engines/test_digbt.py +++ /dev/null @@ -1,61 +0,0 @@ -from collections import defaultdict -import mock -from searx.engines import digbt -from searx.testing import SearxTestCase - - -class TestDigBTEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 0 - params = digbt.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('digbt.org', params['url']) - - def test_response(self): - self.assertRaises(AttributeError, digbt.response, None) - self.assertRaises(AttributeError, digbt.response, []) - self.assertRaises(AttributeError, digbt.response, '') - self.assertRaises(AttributeError, digbt.response, '[]') - - response = mock.Mock(text='') - self.assertEqual(digbt.response(response), []) - - html = """ - - -
-
- - The Big Bang Theory - - 4 hours ago -
-
-
    -
  • The Big Bang Theory 2.9 GB
  • -
  • ....
  • -
-
-
- Files: 1 Size: 2.9 GB Downloads: 1 Updated: 4 hours ago -     - - magnet-link - -     -
-
- """ - response = mock.Mock(text=html.encode('utf-8')) - results = digbt.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'The Big Bang Theory') - self.assertEqual(results[0]['url'], 'https://digbt.org/The-Big-Bang-Theory-d2.html') - self.assertEqual(results[0]['content'], 'The Big Bang Theory 2.9 GB ....') - self.assertEqual(results[0]['filesize'], 3113851289) - self.assertEqual(results[0]['magnetlink'], 'magnet:?xt=urn:btih:a&dn=The+Big+Bang+Theory') diff --git a/tests/unit/engines/test_digg.py b/tests/unit/engines/test_digg.py deleted file mode 100644 index 8bc4c67c2..000000000 --- a/tests/unit/engines/test_digg.py +++ /dev/null @@ -1,16 +0,0 @@ -from collections import defaultdict -import mock -from searx.engines import digg -from searx.testing import SearxTestCase - - -class TestDiggEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - params = digg.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('digg.com', params['url']) diff --git a/tests/unit/engines/test_doku.py b/tests/unit/engines/test_doku.py deleted file mode 100644 index 22ddb7a7f..000000000 --- a/tests/unit/engines/test_doku.py +++ /dev/null @@ -1,79 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -from searx.engines import doku -from searx.testing import SearxTestCase - - -class TestDokuEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - params = doku.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - - def test_response(self): - self.assertRaises(AttributeError, doku.response, None) - self.assertRaises(AttributeError, doku.response, []) - self.assertRaises(AttributeError, doku.response, '') - self.assertRaises(AttributeError, doku.response, '[]') - - response = mock.Mock(text='') - self.assertEqual(doku.response(response), []) - - html = u""" -
-

Pages trouvées :

- -
-
- """ - response = mock.Mock(text=html) - results = doku.response(response) - expected = [{'content': '', 'title': 'xfconf-query', 'url': 'http://localhost:8090/xfconf-query'}] - self.assertEqual(doku.response(response), expected) - - html = u""" -
-
xvnc: 40 Occurrences trouvées
-
er = /usr/bin/Xvnc - server_args = -inetd -query localhost -geometry 640x480 ... er = /usr/bin/Xvnc - server_args = -inetd -query localhost -geometry 800x600 ... er = /usr/bin/Xvnc - server_args = -inetd -query localhost -geometry 1024x768 ... er = /usr/bin/Xvnc - server_args = -inetd -query localhost -geometry 1280x1024 -depth 8 -Sec
-
postfix_mysql_tls_sasl_1404: 14 Occurrences trouvées
-
tdepasse - hosts = 127.0.0.1 - dbname = postfix - query = SELECT goto FROM alias WHERE address='%s' AND a... tdepasse - hosts = 127.0.0.1 - dbname = postfix - query = SELECT domain FROM domain WHERE domain='%s' - #optional query to use when relaying for backup MX - #query = SELECT domain FROM domain WHERE domain='%s' and backupmx =
-
bind9: 12 Occurrences trouvées
-
printcmd -;; Got answer: -;; ->>HEADER<<- opcode: QUERY, status: NOERROR, id: 13427 -;; flags: qr aa rd ra; QUERY: 1, ANSWER: 1, AUTHORITY: 1, ADDITIONAL: 1 - -[...] - -;; Query time: 1 msec -;; SERVER: 127.0.0.1#53(127.0.0.1) -;... par la requête (Query time) , entre la première et la deuxième requête.
-
- """ - response = mock.Mock(text=html) - results = doku.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 3) - self.assertEqual(results[0]['title'], 'xvnc') -# FIXME self.assertEqual(results[0]['url'], u'http://this.should.be.the.link/ű') -# FIXME self.assertEqual(results[0]['content'], 'This should be the content.') diff --git a/tests/unit/engines/test_duckduckgo.py b/tests/unit/engines/test_duckduckgo.py deleted file mode 100644 index eb316a404..000000000 --- a/tests/unit/engines/test_duckduckgo.py +++ /dev/null @@ -1,106 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -from searx.engines import load_engine, duckduckgo -from searx.testing import SearxTestCase - - -class TestDuckduckgoEngine(SearxTestCase): - - def test_request(self): - duckduckgo = load_engine({'engine': 'duckduckgo', 'name': 'duckduckgo'}) - - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - dicto['time_range'] = '' - - dicto['language'] = 'de-CH' - params = duckduckgo.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('duckduckgo.com', params['url']) - self.assertIn('ch-de', params['url']) - self.assertIn('s=0', params['url']) - - # when ddg uses non standard codes - dicto['language'] = 'zh-HK' - params = duckduckgo.request(query, dicto) - self.assertIn('hk-tzh', params['url']) - - dicto['language'] = 'en-GB' - params = duckduckgo.request(query, dicto) - self.assertIn('uk-en', params['url']) - - # no country given - dicto['language'] = 'en' - params = duckduckgo.request(query, dicto) - self.assertIn('us-en', params['url']) - - def test_no_url_in_request_year_time_range(self): - dicto = defaultdict(dict) - query = 'test_query' - dicto['time_range'] = 'year' - params = duckduckgo.request(query, dicto) - self.assertEqual({}, params['url']) - - def test_response(self): - self.assertRaises(AttributeError, duckduckgo.response, None) - self.assertRaises(AttributeError, duckduckgo.response, []) - self.assertRaises(AttributeError, duckduckgo.response, '') - self.assertRaises(AttributeError, duckduckgo.response, '[]') - - response = mock.Mock(text='') - self.assertEqual(duckduckgo.response(response), []) - - html = u""" - - """ - response = mock.Mock(text=html) - results = duckduckgo.response(response) - self.assertEqual(duckduckgo.response(response), []) - - html = u""" - - """ - response = mock.Mock(text=html) - results = duckduckgo.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'This is the title') - self.assertEqual(results[0]['url'], u'http://this.should.be.the.link/ű') - self.assertEqual(results[0]['content'], 'This should be the content.') - - def test_fetch_supported_languages(self): - js = """some code...regions:{ - "wt-wt":"All Results","ar-es":"Argentina","au-en":"Australia","at-de":"Austria","be-fr":"Belgium (fr)" - }some more code...""" - response = mock.Mock(text=js) - languages = list(duckduckgo._fetch_supported_languages(response)) - self.assertEqual(len(languages), 5) - self.assertIn('wt-WT', languages) - self.assertIn('es-AR', languages) - self.assertIn('en-AU', languages) - self.assertIn('de-AT', languages) - self.assertIn('fr-BE', languages) diff --git a/tests/unit/engines/test_duckduckgo_definitions.py b/tests/unit/engines/test_duckduckgo_definitions.py deleted file mode 100644 index 37587ed8d..000000000 --- a/tests/unit/engines/test_duckduckgo_definitions.py +++ /dev/null @@ -1,255 +0,0 @@ -from collections import defaultdict -import mock -from searx.engines import duckduckgo_definitions -from searx.testing import SearxTestCase - - -class TestDDGDefinitionsEngine(SearxTestCase): - - def test_result_to_text(self): - url = '' - text = 'Text' - html_result = 'Html' - result = duckduckgo_definitions.result_to_text(url, text, html_result) - self.assertEqual(result, text) - - html_result = 'Text in link' - result = duckduckgo_definitions.result_to_text(url, text, html_result) - self.assertEqual(result, 'Text in link') - - def test_request(self): - duckduckgo_definitions.supported_languages = ['en-US', 'es-ES'] - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - dicto['language'] = 'es' - params = duckduckgo_definitions.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('duckduckgo.com', params['url']) - self.assertIn('headers', params) - self.assertIn('Accept-Language', params['headers']) - self.assertIn('es', params['headers']['Accept-Language']) - - def test_response(self): - self.assertRaises(AttributeError, duckduckgo_definitions.response, None) - self.assertRaises(AttributeError, duckduckgo_definitions.response, []) - self.assertRaises(AttributeError, duckduckgo_definitions.response, '') - self.assertRaises(AttributeError, duckduckgo_definitions.response, '[]') - - response = mock.Mock(text='{}') - self.assertEqual(duckduckgo_definitions.response(response), []) - - response = mock.Mock(text='{"data": []}') - self.assertEqual(duckduckgo_definitions.response(response), []) - - json = """ - { - "DefinitionSource": "definition source", - "Heading": "heading", - "ImageWidth": 0, - "RelatedTopics": [ - { - "Result": "Top-level domains", - "Icon": { - "URL": "", - "Height": "", - "Width": "" - }, - "FirstURL": "https://first.url", - "Text": "text" - }, - { - "Topics": [ - { - "Result": "result topic", - "Icon": { - "URL": "", - "Height": "", - "Width": "" - }, - "FirstURL": "https://duckduckgo.com/?q=2%2F2", - "Text": "result topic text" - } - ], - "Name": "name" - } - ], - "Entity": "Entity", - "Type": "A", - "Redirect": "", - "DefinitionURL": "http://definition.url", - "AbstractURL": "https://abstract.url", - "Definition": "this is the definition", - "AbstractSource": "abstract source", - "Infobox": { - "content": [ - { - "data_type": "string", - "value": "1999", - "label": "Introduced", - "wiki_order": 0 - } - ], - "meta": [ - { - "data_type": "string", - "value": ".test", - "label": "article_title" - } - ] - }, - "Image": "image.png", - "ImageIsLogo": 0, - "Abstract": "abstract", - "AbstractText": "abstract text", - "AnswerType": "", - "ImageHeight": 0, - "Results": [{ - "Result" : "result title", - "Icon" : { - "URL" : "result url", - "Height" : 16, - "Width" : 16 - }, - "FirstURL" : "result first url", - "Text" : "result text" - } - ], - "Answer": "answer" - } - """ - response = mock.Mock(text=json) - results = duckduckgo_definitions.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 4) - self.assertEqual(results[0]['answer'], 'answer') - self.assertEqual(results[1]['title'], 'heading') - self.assertEqual(results[1]['url'], 'result first url') - self.assertEqual(results[2]['suggestion'], 'text') - self.assertEqual(results[3]['infobox'], 'heading') - self.assertEqual(results[3]['id'], 'https://definition.url') - self.assertEqual(results[3]['entity'], 'Entity') - self.assertIn('abstract', results[3]['content']) - self.assertIn('this is the definition', results[3]['content']) - self.assertEqual(results[3]['img_src'], 'image.png') - self.assertIn('Introduced', results[3]['attributes'][0]['label']) - self.assertIn('1999', results[3]['attributes'][0]['value']) - self.assertIn({'url': 'https://abstract.url', 'title': 'abstract source'}, results[3]['urls']) - self.assertIn({'url': 'http://definition.url', 'title': 'definition source'}, results[3]['urls']) - self.assertIn({'name': 'name', 'suggestions': ['result topic text']}, results[3]['relatedTopics']) - - json = """ - { - "DefinitionSource": "definition source", - "Heading": "heading", - "ImageWidth": 0, - "RelatedTopics": [], - "Entity": "Entity", - "Type": "A", - "Redirect": "", - "DefinitionURL": "", - "AbstractURL": "https://abstract.url", - "Definition": "", - "AbstractSource": "abstract source", - "Image": "", - "ImageIsLogo": 0, - "Abstract": "", - "AbstractText": "abstract text", - "AnswerType": "", - "ImageHeight": 0, - "Results": [], - "Answer": "" - } - """ - response = mock.Mock(text=json) - results = duckduckgo_definitions.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['url'], 'https://abstract.url') - self.assertEqual(results[0]['title'], 'heading') - self.assertEqual(results[0]['content'], '') - - json = """ - { - "DefinitionSource": "definition source", - "Heading": "heading", - "ImageWidth": 0, - "RelatedTopics": [ - { - "Result": "Top-level domains", - "Icon": { - "URL": "", - "Height": "", - "Width": "" - }, - "FirstURL": "https://first.url", - "Text": "heading" - }, - { - "Name": "name" - }, - { - "Topics": [ - { - "Result": "result topic", - "Icon": { - "URL": "", - "Height": "", - "Width": "" - }, - "FirstURL": "https://duckduckgo.com/?q=2%2F2", - "Text": "heading" - } - ], - "Name": "name" - } - ], - "Entity": "Entity", - "Type": "A", - "Redirect": "", - "DefinitionURL": "http://definition.url", - "AbstractURL": "https://abstract.url", - "Definition": "this is the definition", - "AbstractSource": "abstract source", - "Infobox": { - "meta": [ - { - "data_type": "string", - "value": ".test", - "label": "article_title" - } - ] - }, - "Image": "image.png", - "ImageIsLogo": 0, - "Abstract": "abstract", - "AbstractText": "abstract text", - "AnswerType": "", - "ImageHeight": 0, - "Results": [{ - "Result" : "result title", - "Icon" : { - "URL" : "result url", - "Height" : 16, - "Width" : 16 - }, - "Text" : "result text" - } - ], - "Answer": "" - } - """ - response = mock.Mock(text=json) - results = duckduckgo_definitions.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['infobox'], 'heading') - self.assertEqual(results[0]['id'], 'https://definition.url') - self.assertEqual(results[0]['entity'], 'Entity') - self.assertIn('abstract', results[0]['content']) - self.assertIn('this is the definition', results[0]['content']) - self.assertEqual(results[0]['img_src'], 'image.png') - self.assertIn({'url': 'https://abstract.url', 'title': 'abstract source'}, results[0]['urls']) - self.assertIn({'url': 'http://definition.url', 'title': 'definition source'}, results[0]['urls']) - self.assertIn({'name': 'name', 'suggestions': []}, results[0]['relatedTopics']) diff --git a/tests/unit/engines/test_duckduckgo_images.py b/tests/unit/engines/test_duckduckgo_images.py deleted file mode 100644 index 0d152bec1..000000000 --- a/tests/unit/engines/test_duckduckgo_images.py +++ /dev/null @@ -1,75 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -from searx.engines import duckduckgo_images -from searx.testing import SearxTestCase - - -class TestDuckduckgoImagesEngine(SearxTestCase): - - def test_request(self): - duckduckgo_images.supported_languages = ['de-CH', 'en-US'] - query = 'test_query' - dicto = defaultdict(dict) - dicto['is_test'] = True - dicto['pageno'] = 1 - dicto['safesearch'] = 0 - dicto['language'] = 'all' - params = duckduckgo_images.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('duckduckgo.com', params['url']) - self.assertIn('s=0', params['url']) - self.assertIn('p=-1', params['url']) - self.assertIn('vqd=12345', params['url']) - - # test paging, safe search and language - dicto['pageno'] = 2 - dicto['safesearch'] = 2 - dicto['language'] = 'de' - params = duckduckgo_images.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('s=50', params['url']) - self.assertIn('p=1', params['url']) - self.assertIn('ch-de', params['url']) - - def test_response(self): - self.assertRaises(AttributeError, duckduckgo_images.response, None) - self.assertRaises(AttributeError, duckduckgo_images.response, []) - self.assertRaises(AttributeError, duckduckgo_images.response, '') - self.assertRaises(AttributeError, duckduckgo_images.response, '[]') - - response = mock.Mock(text='If this error persists, please let us know: ops@duckduckgo.com') - self.assertRaises(Exception, duckduckgo_images.response, response) - - json = u""" - { - "query": "test_query", - "results": [ - { - "title": "Result 1", - "url": "https://site1.url", - "thumbnail": "https://thumb1.nail", - "image": "https://image1" - }, - { - "title": "Result 2", - "url": "https://site2.url", - "thumbnail": "https://thumb2.nail", - "image": "https://image2" - } - ] - } - """ - response = mock.Mock(text=json) - results = duckduckgo_images.response(response) - self.assertEqual(len(results), 2) - self.assertEqual(results[0]['title'], 'Result 1') - self.assertEqual(results[0]['url'], 'https://site1.url') - self.assertEqual(results[0]['thumbnail_src'], 'https://thumb1.nail') - self.assertEqual(results[0]['img_src'], 'https://image1') - self.assertEqual(results[1]['title'], 'Result 2') - self.assertEqual(results[1]['url'], 'https://site2.url') - self.assertEqual(results[1]['thumbnail_src'], 'https://thumb2.nail') - self.assertEqual(results[1]['img_src'], 'https://image2') diff --git a/tests/unit/engines/test_duden.py b/tests/unit/engines/test_duden.py deleted file mode 100644 index 52fc513d0..000000000 --- a/tests/unit/engines/test_duden.py +++ /dev/null @@ -1,47 +0,0 @@ -from collections import defaultdict -import mock -from searx.engines import duden -from searx.testing import SearxTestCase -from datetime import datetime - - -class TestDudenEngine(SearxTestCase): - - def test_request(self): - query = 'Haus' - dic = defaultdict(dict) - data = [ - [1, 'https://www.duden.de/suchen/dudenonline/Haus'], - [2, 'https://www.duden.de/suchen/dudenonline/Haus?search_api_fulltext=&page=1'] - ] - for page_no, exp_res in data: - dic['pageno'] = page_no - params = duden.request(query, dic) - self.assertTrue('url' in params) - self.assertTrue(query in params['url']) - self.assertTrue('duden.de' in params['url']) - self.assertEqual(params['url'], exp_res) - - def test_response(self): - resp = mock.Mock(text='') - self.assertEqual(duden.response(resp), []) - - html = """ -
- - This is the title also here - -

This is the content

-
- """ - resp = mock.Mock(text=html) - results = duden.response(resp) - - self.assertEqual(len(results), 1) - self.assertEqual(type(results), list) - - # testing result (dictionary entry) - r = results[0] - self.assertEqual(r['url'], 'https://www.duden.de/rechtschreibung/Haus') - self.assertEqual(r['title'], 'This is the title also here') - self.assertEqual(r['content'], 'This is the content') diff --git a/tests/unit/engines/test_dummy.py b/tests/unit/engines/test_dummy.py deleted file mode 100644 index 9399beaaf..000000000 --- a/tests/unit/engines/test_dummy.py +++ /dev/null @@ -1,26 +0,0 @@ -from searx.engines import dummy -from searx.testing import SearxTestCase - - -class TestDummyEngine(SearxTestCase): - - def test_request(self): - test_params = [ - [1, 2, 3], - ['a'], - [], - 1 - ] - for params in test_params: - self.assertEqual(dummy.request(None, params), params) - - def test_response(self): - responses = [ - None, - [], - True, - dict(), - tuple() - ] - for response in responses: - self.assertEqual(dummy.response(response), []) diff --git a/tests/unit/engines/test_faroo.py b/tests/unit/engines/test_faroo.py deleted file mode 100644 index 1bd9f51c3..000000000 --- a/tests/unit/engines/test_faroo.py +++ /dev/null @@ -1,113 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -from searx.engines import faroo -from searx.testing import SearxTestCase - - -class TestFarooEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - dicto['language'] = 'fr-FR' - dicto['category'] = 'general' - params = faroo.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('faroo.com', params['url']) - self.assertIn('en', params['url']) - self.assertIn('web', params['url']) - - dicto['language'] = 'all' - params = faroo.request(query, dicto) - self.assertIn('en', params['url']) - - dicto['language'] = 'de-DE' - params = faroo.request(query, dicto) - self.assertIn('de', params['url']) - - def test_response(self): - self.assertRaises(AttributeError, faroo.response, None) - self.assertRaises(AttributeError, faroo.response, []) - self.assertRaises(AttributeError, faroo.response, '') - self.assertRaises(AttributeError, faroo.response, '[]') - - response = mock.Mock(text='{}') - self.assertEqual(faroo.response(response), []) - - response = mock.Mock(text='{"data": []}') - self.assertEqual(faroo.response(response), []) - - response = mock.Mock(text='{"data": []}', status_code=429) - self.assertRaises(Exception, faroo.response, response) - - json = """ - { - "results": [ - { - "title": "This is the title", - "kwic": "This is the content", - "content": "", - "url": "http://this.is.the.url/", - "iurl": "", - "domain": "css3test.com", - "author": "Jim Dalrymple", - "news": true, - "votes": "10", - "date": 1360622563000, - "related": [] - }, - { - "title": "This is the title2", - "kwic": "This is the content2", - "content": "", - "url": "http://this.is.the.url2/", - "iurl": "", - "domain": "css3test.com", - "author": "Jim Dalrymple", - "news": false, - "votes": "10", - "related": [] - }, - { - "title": "This is the title3", - "kwic": "This is the content3", - "content": "", - "url": "http://this.is.the.url3/", - "iurl": "http://upload.wikimedia.org/optimized.jpg", - "domain": "css3test.com", - "author": "Jim Dalrymple", - "news": false, - "votes": "10", - "related": [] - } - ], - "query": "test", - "suggestions": [], - "count": 100, - "start": 1, - "length": 10, - "time": "15" - } - """ - response = mock.Mock(text=json) - results = faroo.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 3) - self.assertEqual(results[0]['title'], 'This is the title') - self.assertEqual(results[0]['url'], 'http://this.is.the.url/') - self.assertEqual(results[0]['content'], 'This is the content') - self.assertEqual(results[1]['title'], 'This is the title2') - self.assertEqual(results[1]['url'], 'http://this.is.the.url2/') - self.assertEqual(results[1]['content'], 'This is the content2') - self.assertEqual(results[2]['thumbnail'], 'http://upload.wikimedia.org/optimized.jpg') - - json = """ - {} - """ - response = mock.Mock(text=json) - results = faroo.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) diff --git a/tests/unit/engines/test_fdroid.py b/tests/unit/engines/test_fdroid.py deleted file mode 100644 index 42a0a7148..000000000 --- a/tests/unit/engines/test_fdroid.py +++ /dev/null @@ -1,60 +0,0 @@ -import mock -from collections import defaultdict -from searx.engines import fdroid -from searx.testing import SearxTestCase - - -class TestFdroidEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dic = defaultdict(dict) - dic['pageno'] = 1 - params = fdroid.request(query, dic) - self.assertTrue('url' in params) - self.assertTrue(query in params['url']) - self.assertTrue('search.f-droid.org' in params['url']) - - def test_response_empty(self): - resp = mock.Mock(text='') - self.assertEqual(fdroid.response(resp), []) - - def test_response_oneresult(self): - html = """ - - - - test - - - - - - """ - - resp = mock.Mock(text=html) - results = fdroid.response(resp) - - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['url'], 'https://example.com/app.url') - self.assertEqual(results[0]['title'], 'App Example 1') - self.assertEqual(results[0]['content'], 'Description App Example 1 - GPL-3.0-only') - self.assertEqual(results[0]['img_src'], 'https://example.com/appexample.logo.png') diff --git a/tests/unit/engines/test_flickr.py b/tests/unit/engines/test_flickr.py deleted file mode 100644 index be97647ce..000000000 --- a/tests/unit/engines/test_flickr.py +++ /dev/null @@ -1,142 +0,0 @@ -from collections import defaultdict -import mock -from searx.engines import flickr -from searx.testing import SearxTestCase - - -class TestFlickrEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 0 - params = flickr.request(query, dicto) - self.assertTrue('url' in params) - self.assertTrue(query in params['url']) - self.assertTrue('flickr.com' in params['url']) - - def test_response(self): - self.assertRaises(AttributeError, flickr.response, None) - self.assertRaises(AttributeError, flickr.response, []) - self.assertRaises(AttributeError, flickr.response, '') - self.assertRaises(AttributeError, flickr.response, '[]') - - response = mock.Mock(text='{}') - self.assertEqual(flickr.response(response), []) - - response = mock.Mock(text='{"data": []}') - self.assertEqual(flickr.response(response), []) - - json = r""" - { "photos": { "page": 1, "pages": "41001", "perpage": 100, "total": "4100032", - "photo": [ - { "id": "15751017054", "owner": "66847915@N08", - "secret": "69c22afc40", "server": "7285", "farm": 8, - "title": "Photo title", "ispublic": 1, - "isfriend": 0, "isfamily": 0, - "description": { "_content": "Description" }, - "ownername": "Owner", - "url_o": "https:\/\/farm8.staticflickr.com\/7285\/15751017054_9178e0f963_o.jpg", - "height_o": "2100", "width_o": "2653", - "url_n": "https:\/\/farm8.staticflickr.com\/7285\/15751017054_69c22afc40_n.jpg", - "height_n": "253", "width_n": "320", - "url_z": "https:\/\/farm8.staticflickr.com\/7285\/15751017054_69c22afc40_z.jpg", - "height_z": "507", "width_z": "640" } - ] }, "stat": "ok" } - """ - response = mock.Mock(text=json) - results = flickr.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'Photo title') - self.assertEqual(results[0]['url'], 'https://www.flickr.com/photos/66847915@N08/15751017054') - self.assertTrue('o.jpg' in results[0]['img_src']) - self.assertTrue('n.jpg' in results[0]['thumbnail_src']) - self.assertTrue('Owner' in results[0]['author']) - self.assertTrue('Description' in results[0]['content']) - - json = r""" - { "photos": { "page": 1, "pages": "41001", "perpage": 100, "total": "4100032", - "photo": [ - { "id": "15751017054", "owner": "66847915@N08", - "secret": "69c22afc40", "server": "7285", "farm": 8, - "title": "Photo title", "ispublic": 1, - "isfriend": 0, "isfamily": 0, - "description": { "_content": "Description" }, - "ownername": "Owner", - "url_z": "https:\/\/farm8.staticflickr.com\/7285\/15751017054_69c22afc40_z.jpg", - "height_z": "507", "width_z": "640" } - ] }, "stat": "ok" } - """ - response = mock.Mock(text=json) - results = flickr.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'Photo title') - self.assertEqual(results[0]['url'], 'https://www.flickr.com/photos/66847915@N08/15751017054') - self.assertTrue('z.jpg' in results[0]['img_src']) - self.assertTrue('z.jpg' in results[0]['thumbnail_src']) - self.assertTrue('Owner' in results[0]['author']) - self.assertTrue('Description' in results[0]['content']) - - json = r""" - { "photos": { "page": 1, "pages": "41001", "perpage": 100, "total": "4100032", - "photo": [ - { "id": "15751017054", "owner": "66847915@N08", - "secret": "69c22afc40", "server": "7285", "farm": 8, - "title": "Photo title", "ispublic": 1, - "isfriend": 0, "isfamily": 0, - "description": { "_content": "Description" }, - "ownername": "Owner", - "url_o": "https:\/\/farm8.staticflickr.com\/7285\/15751017054_9178e0f963_o.jpg", - "height_o": "2100", "width_o": "2653" } - ] }, "stat": "ok" } - """ - response = mock.Mock(text=json) - results = flickr.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'Photo title') - self.assertEqual(results[0]['url'], 'https://www.flickr.com/photos/66847915@N08/15751017054') - self.assertTrue('o.jpg' in results[0]['img_src']) - self.assertTrue('o.jpg' in results[0]['thumbnail_src']) - self.assertTrue('Owner' in results[0]['author']) - self.assertTrue('Description' in results[0]['content']) - - json = r""" - { "photos": { "page": 1, "pages": "41001", "perpage": 100, "total": "4100032", - "photo": [ - { "id": "15751017054", "owner": "66847915@N08", - "secret": "69c22afc40", "server": "7285", "farm": 8, - "title": "Photo title", "ispublic": 1, - "isfriend": 0, "isfamily": 0, - "description": { "_content": "Description" }, - "ownername": "Owner", - "url_n": "https:\/\/farm8.staticflickr.com\/7285\/15751017054_69c22afc40_n.jpg", - "height_n": "253", "width_n": "320" } - ] }, "stat": "ok" } - """ - response = mock.Mock(text=json) - results = flickr.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) - - json = """ - { "photos": { "page": 1, "pages": "41001", "perpage": 100, "total": "4100032", - "toto": [] }, "stat": "ok" } - """ - response = mock.Mock(text=json) - results = flickr.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) - - json = r""" - {"toto":[ - {"id":200,"name":"Artist Name", - "link":"http:\/\/www.flickr.com\/artist\/1217","type":"artist"} - ]} - """ - response = mock.Mock(text=json) - results = flickr.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) diff --git a/tests/unit/engines/test_flickr_noapi.py b/tests/unit/engines/test_flickr_noapi.py deleted file mode 100644 index 67699f2f0..000000000 --- a/tests/unit/engines/test_flickr_noapi.py +++ /dev/null @@ -1,357 +0,0 @@ -from collections import defaultdict -import mock -from searx.engines import flickr_noapi -from searx.testing import SearxTestCase - - -class TestFlickrNoapiEngine(SearxTestCase): - - def test_build_flickr_url(self): - url = flickr_noapi.build_flickr_url("uid", "pid") - self.assertIn("uid", url) - self.assertIn("pid", url) - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - dicto['time_range'] = '' - params = flickr_noapi.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('flickr.com', params['url']) - - def test_response(self): - self.assertRaises(AttributeError, flickr_noapi.response, None) - self.assertRaises(AttributeError, flickr_noapi.response, []) - self.assertRaises(AttributeError, flickr_noapi.response, '') - self.assertRaises(AttributeError, flickr_noapi.response, '[]') - - response = mock.Mock(text='"modelExport:{"legend":[],"main":{"search-photos-lite-models":[{"photos":{}}]}}') - self.assertEqual(flickr_noapi.response(response), []) - - response = \ - mock.Mock(text='"modelExport:{"legend":[],"main":{"search-photos-lite-models":[{"photos":{"_data":[]}}]}}') - self.assertEqual(flickr_noapi.response(response), []) - - # everthing is ok test - json = """ - modelExport: { - "legend": [ - [ - "search-photos-lite-models", - "0", - "photos", - "_data", - "0" - ] - ], - "main": { - "search-photos-lite-models": [ - { - "photos": { - "_data": [ - { - "_flickrModelRegistry": "photo-lite-models", - "title": "This%20is%20the%20title", - "username": "Owner", - "pathAlias": "klink692", - "realname": "Owner", - "license": 0, - "ownerNsid": "59729010@N00", - "canComment": false, - "commentCount": 14, - "faveCount": 21, - "id": "14001294434", - "sizes": { - "c": { - "displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_c.jpg", - "width": 541, - "height": 800, - "url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_c.jpg", - "key": "c" - }, - "h": { - "displayUrl": "//farm8.staticflickr.com/7246/14001294434_761d32237a_h.jpg", - "width": 1081, - "height": 1600, - "url": "//c4.staticflickr.com/8/7246/14001294434_761d32237a_h.jpg", - "key": "h" - }, - "k": { - "displayUrl": "//farm8.staticflickr.com/7246/14001294434_f145a2c11a_k.jpg", - "width": 1383, - "height": 2048, - "url": "//c4.staticflickr.com/8/7246/14001294434_f145a2c11a_k.jpg", - "key": "k" - }, - "l": { - "displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_b.jpg", - "width": 692, - "height": 1024, - "url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_b.jpg", - "key": "l" - }, - "m": { - "displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777.jpg", - "width": 338, - "height": 500, - "url": "//c4.staticflickr.com/8/7246/14001294434_410f653777.jpg", - "key": "m" - }, - "n": { - "displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_n.jpg", - "width": 216, - "height": 320, - "url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_n.jpg", - "key": "n" - }, - "q": { - "displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_q.jpg", - "width": 150, - "height": 150, - "url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_q.jpg", - "key": "q" - }, - "s": { - "displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_m.jpg", - "width": 162, - "height": 240, - "url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_m.jpg", - "key": "s" - }, - "sq": { - "displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_s.jpg", - "width": 75, - "height": 75, - "url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_s.jpg", - "key": "sq" - }, - "t": { - "displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_t.jpg", - "width": 68, - "height": 100, - "url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_t.jpg", - "key": "t" - }, - "z": { - "displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_z.jpg", - "width": 433, - "height": 640, - "url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_z.jpg", - "key": "z" - } - } - } - ] - } - } - ] - } - } - """ - # Flickr serves search results in a json block named 'modelExport' buried inside a script tag, - # this json is served as a single line terminating with a comma. - json = ''.join(json.split()) + ',\n' - response = mock.Mock(text=json) - results = flickr_noapi.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'This is the title') - self.assertEqual(results[0]['url'], 'https://www.flickr.com/photos/59729010@N00/14001294434') - self.assertIn('k.jpg', results[0]['img_src']) - self.assertIn('n.jpg', results[0]['thumbnail_src']) - self.assertIn('Owner', results[0]['author']) - - # no n size, only the z size - json = """ - modelExport: { - "legend": [ - [ - "search-photos-lite-models", - "0", - "photos", - "_data", - "0" - ] - ], - "main": { - "search-photos-lite-models": [ - { - "photos": { - "_data": [ - { - "_flickrModelRegistry": "photo-lite-models", - "title": "This%20is%20the%20title", - "username": "Owner", - "pathAlias": "klink692", - "realname": "Owner", - "license": 0, - "ownerNsid": "59729010@N00", - "canComment": false, - "commentCount": 14, - "faveCount": 21, - "id": "14001294434", - "sizes": { - "z": { - "displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_z.jpg", - "width": 433, - "height": 640, - "url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_z.jpg", - "key": "z" - } - } - } - ] - } - } - ] - } - } - """ - json = ''.join(json.split()) + ',\n' - response = mock.Mock(text=json) - results = flickr_noapi.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'This is the title') - self.assertEqual(results[0]['url'], 'https://www.flickr.com/photos/59729010@N00/14001294434') - self.assertIn('z.jpg', results[0]['img_src']) - self.assertIn('z.jpg', results[0]['thumbnail_src']) - self.assertIn('Owner', results[0]['author']) - - # no z or n size - json = """ - modelExport: { - "legend": [ - [ - "search-photos-lite-models", - "0", - "photos", - "_data", - "0" - ] - ], - "main": { - "search-photos-lite-models": [ - { - "photos": { - "_data": [ - { - "_flickrModelRegistry": "photo-lite-models", - "title": "This%20is%20the%20title", - "username": "Owner", - "pathAlias": "klink692", - "realname": "Owner", - "license": 0, - "ownerNsid": "59729010@N00", - "canComment": false, - "commentCount": 14, - "faveCount": 21, - "id": "14001294434", - "sizes": { - "o": { - "displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_o.jpg", - "width": 433, - "height": 640, - "url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_o.jpg", - "key": "o" - } - } - } - ] - } - } - ] - } - } - """ - json = ''.join(json.split()) + ',\n' - response = mock.Mock(text=json) - results = flickr_noapi.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'This is the title') - self.assertEqual(results[0]['url'], 'https://www.flickr.com/photos/59729010@N00/14001294434') - self.assertIn('o.jpg', results[0]['img_src']) - self.assertIn('o.jpg', results[0]['thumbnail_src']) - self.assertIn('Owner', results[0]['author']) - - # no image test - json = """ - modelExport: { - "legend": [ - [ - "search-photos-lite-models", - "0", - "photos", - "_data", - "0" - ] - ], - "main": { - "search-photos-lite-models": [ - { - "photos": { - "_data": [ - { - "_flickrModelRegistry": "photo-lite-models", - "title": "This is the title", - "username": "Owner", - "pathAlias": "klink692", - "realname": "Owner", - "license": 0, - "ownerNsid": "59729010@N00", - "canComment": false, - "commentCount": 14, - "faveCount": 21, - "id": "14001294434", - "sizes": { - } - } - ] - } - } - ] - } - } - """ - json = ''.join(json.split()) + ',\n' - response = mock.Mock(text=json) - results = flickr_noapi.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) - - # null test - json = """ - modelExport: { - "legend": [null], - "main": { - "search-photos-lite-models": [ - { - "photos": { - "_data": [null] - } - } - ] - } - } - """ - json = ''.join(json.split()) + ',\n' - response = mock.Mock(text=json) - results = flickr_noapi.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) - - # garbage test - json = r""" - {"toto":[ - {"id":200,"name":"Artist Name", - "link":"http:\/\/www.flickr.com\/artist\/1217","type":"artist"} - ]} - """ - json = ''.join(json.split()) + ',\n' - response = mock.Mock(text=json) - results = flickr_noapi.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) diff --git a/tests/unit/engines/test_framalibre.py b/tests/unit/engines/test_framalibre.py deleted file mode 100644 index 850996372..000000000 --- a/tests/unit/engines/test_framalibre.py +++ /dev/null @@ -1,103 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -from searx.engines import framalibre -from searx.testing import SearxTestCase - - -class TestFramalibreEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 0 - params = framalibre.request(query, dicto) - self.assertTrue('url' in params) - self.assertTrue(query in params['url']) - self.assertTrue('framalibre.org' in params['url']) - - def test_response(self): - self.assertRaises(AttributeError, framalibre.response, None) - self.assertRaises(AttributeError, framalibre.response, []) - self.assertRaises(AttributeError, framalibre.response, '') - self.assertRaises(AttributeError, framalibre.response, '[]') - - response = mock.Mock(text='{}') - self.assertEqual(framalibre.response(response), []) - - response = mock.Mock(text='{"data": []}') - self.assertEqual(framalibre.response(response), []) - - html = u""" -
-
-
-
- -
-
-

Gogs

- -
-
- -
-
-
-
-
-
-
-
-
-
-
-
-
-
-

Gogs est une interface web basée sur git et une bonne alternative à GitHub.

-
-
-
-
- -
-
- """ - response = mock.Mock(text=html) - results = framalibre.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'Gogs') - self.assertEqual(results[0]['url'], - 'https://framalibre.org/content/gogs') - self.assertEqual(results[0]['content'], - u"Gogs est une interface web basée sur git et une bonne alternative à GitHub.") diff --git a/tests/unit/engines/test_frinkiac.py b/tests/unit/engines/test_frinkiac.py deleted file mode 100644 index 5ea220cd3..000000000 --- a/tests/unit/engines/test_frinkiac.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -from searx.engines import frinkiac -from searx.testing import SearxTestCase - - -class TestFrinkiacEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - request_dict = defaultdict(dict) - params = frinkiac.request(query, request_dict) - self.assertTrue('url' in params) - - def test_response(self): - self.assertRaises(AttributeError, frinkiac.response, None) - self.assertRaises(AttributeError, frinkiac.response, []) - self.assertRaises(AttributeError, frinkiac.response, '') - self.assertRaises(AttributeError, frinkiac.response, '[]') - - text = """ -[{"Id":770931, - "Episode":"S06E18", - "Timestamp":534616, - "Filename":""}, - {"Id":1657080, - "Episode":"S12E14", - "Timestamp":910868, - "Filename":""}, - {"Id":1943753, - "Episode":"S14E21", - "Timestamp":773439, - "Filename":""}, - {"Id":107835, - "Episode":"S02E03", - "Timestamp":531709, - "Filename":""}] - """ - - response = mock.Mock(text=text) - results = frinkiac.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 4) - self.assertEqual(results[0]['title'], u'S06E18') - self.assertIn('p=caption', results[0]['url']) - self.assertIn('e=S06E18', results[0]['url']) - self.assertIn('t=534616', results[0]['url']) - self.assertEqual(results[0]['thumbnail_src'], 'https://frinkiac.com/img/S06E18/534616/medium.jpg') - self.assertEqual(results[0]['img_src'], 'https://frinkiac.com/img/S06E18/534616.jpg') diff --git a/tests/unit/engines/test_genius.py b/tests/unit/engines/test_genius.py deleted file mode 100644 index ea721943a..000000000 --- a/tests/unit/engines/test_genius.py +++ /dev/null @@ -1,231 +0,0 @@ -from collections import defaultdict -import mock -from datetime import datetime -from searx.engines import genius -from searx.testing import SearxTestCase - - -class TestGeniusEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - params = genius.request(query, dicto) - self.assertTrue('url' in params) - self.assertTrue(query in params['url']) - self.assertTrue('genius.com' in params['url']) - - def test_response(self): - - json_empty = """ - { - "meta": { - "status": 200 - }, - "response": { - "sections": [ - { - "type": "top_hit", - "hits": [] - }, - { - "type": "song", - "hits": [] - }, - { - "type": "lyric", - "hits": [] - }, - { - "type": "artist", - "hits": [] - }, - { - "type": "album", - "hits": [] - }, - { - "type": "tag", - "hits": [] - }, - { - "type": "video", - "hits": [] - }, - { - "type": "article", - "hits": [] - }, - { - "type": "user", - "hits": [] - } - ] - } - } - """ - - resp = mock.Mock(text=json_empty) - self.assertEqual(genius.response(resp), []) - - json = """ - { - "meta": { - "status": 200 - }, - "response": { - "sections": [ - { - "type": "lyric", - "hits": [ - { - "highlights": [ - { - "property": "lyrics", - "value": "Sample lyrics", - "snippet": true, - "ranges": [] - } - ], - "index": "lyric", - "type": "song", - "result": { - "_type": "song", - "annotation_count": 45, - "api_path": "/songs/52916", - "full_title": "J't'emmerde by MC Jean Gab'1", - "header_image_thumbnail_url": "https://images.genius.com/xxx.300x300x1.jpg", - "header_image_url": "https://images.genius.com/ef9f736a86df3c3b1772f3fb7fbdb21c.1000x1000x1.jpg", - "id": 52916, - "instrumental": false, - "lyrics_owner_id": 15586, - "lyrics_state": "complete", - "lyrics_updated_at": 1498744545, - "path": "/Mc-jean-gab1-jtemmerde-lyrics", - "pyongs_count": 4, - "song_art_image_thumbnail_url": "https://images.genius.com/xxx.300x300x1.jpg", - "stats": { - "hot": false, - "unreviewed_annotations": 0, - "pageviews": 62490 - }, - "title": "J't'emmerde", - "title_with_featured": "J't'emmerde", - "updated_by_human_at": 1498744546, - "url": "https://genius.com/Mc-jean-gab1-jtemmerde-lyrics", - "primary_artist": { - "_type": "artist", - "api_path": "/artists/12691", - "header_image_url": "https://images.genius.com/c7847662a58f8c2b0f02a6e217d60907.960x657x1.jpg", - "id": 12691, - "image_url": "https://s3.amazonaws.com/rapgenius/Mc-jean-gab1.jpg", - "index_character": "m", - "is_meme_verified": false, - "is_verified": false, - "name": "MC Jean Gab'1", - "slug": "Mc-jean-gab1", - "url": "https://genius.com/artists/Mc-jean-gab1" - } - } - } - ] - }, - { - "type": "artist", - "hits": [ - { - "highlights": [], - "index": "artist", - "type": "artist", - "result": { - "_type": "artist", - "api_path": "/artists/191580", - "header_image_url": "https://assets.genius.com/images/default_avatar_300.png?1503090542", - "id": 191580, - "image_url": "https://assets.genius.com/images/default_avatar_300.png?1503090542", - "index_character": "a", - "is_meme_verified": false, - "is_verified": false, - "name": "ASDF Guy", - "slug": "Asdf-guy", - "url": "https://genius.com/artists/Asdf-guy" - } - } - ] - }, - { - "type": "album", - "hits": [ - { - "highlights": [], - "index": "album", - "type": "album", - "result": { - "_type": "album", - "api_path": "/albums/132332", - "cover_art_thumbnail_url": "https://images.genius.com/xxx.300x300x1.jpg", - "cover_art_url": "https://images.genius.com/xxx.600x600x1.jpg", - "full_title": "ASD by A Skylit Drive", - "id": 132332, - "name": "ASD", - "name_with_artist": "ASD (artist: A Skylit Drive)", - "release_date_components": { - "year": 2015, - "month": null, - "day": null - }, - "url": "https://genius.com/albums/A-skylit-drive/Asd", - "artist": { - "_type": "artist", - "api_path": "/artists/48712", - "header_image_url": "https://images.genius.com/814c1551293172c56306d0e310c6aa89.620x400x1.jpg", - "id": 48712, - "image_url": "https://images.genius.com/814c1551293172c56306d0e310c6aa89.620x400x1.jpg", - "index_character": "s", - "is_meme_verified": false, - "is_verified": false, - "name": "A Skylit Drive", - "slug": "A-skylit-drive", - "url": "https://genius.com/artists/A-skylit-drive" - } - } - } - ] - } - ] - } - } - """ - - resp = mock.Mock(text=json) - results = genius.response(resp) - - self.assertEqual(len(results), 3) - self.assertEqual(type(results), list) - - # check lyric parsing - r = results[0] - self.assertEqual(r['url'], 'https://genius.com/Mc-jean-gab1-jtemmerde-lyrics') - self.assertEqual(r['title'], "J't'emmerde by MC Jean Gab'1") - self.assertEqual(r['content'], "Sample lyrics") - self.assertEqual(r['template'], 'videos.html') - self.assertEqual(r['thumbnail'], 'https://images.genius.com/xxx.300x300x1.jpg') - created = datetime.fromtimestamp(1498744545) - self.assertEqual(r['publishedDate'], created) - - # check artist parsing - r = results[1] - self.assertEqual(r['url'], 'https://genius.com/artists/Asdf-guy') - self.assertEqual(r['title'], "ASDF Guy") - self.assertEqual(r['content'], None) - self.assertEqual(r['template'], 'videos.html') - self.assertEqual(r['thumbnail'], 'https://assets.genius.com/images/default_avatar_300.png?1503090542') - - # check album parsing - r = results[2] - self.assertEqual(r['url'], 'https://genius.com/albums/A-skylit-drive/Asd') - self.assertEqual(r['title'], "ASD by A Skylit Drive") - self.assertEqual(r['content'], "Released: 2015") - self.assertEqual(r['template'], 'videos.html') - self.assertEqual(r['thumbnail'], 'https://images.genius.com/xxx.600x600x1.jpg') diff --git a/tests/unit/engines/test_gigablast.py b/tests/unit/engines/test_gigablast.py deleted file mode 100644 index 6b2d26458..000000000 --- a/tests/unit/engines/test_gigablast.py +++ /dev/null @@ -1,119 +0,0 @@ -from collections import defaultdict -import mock -from searx.engines import gigablast -from searx.testing import SearxTestCase - - -class TestGigablastEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 0 - dicto['safesearch'] = 0 - dicto['language'] = 'all' - params = gigablast.request(query, dicto) - self.assertTrue('url' in params) - self.assertTrue(query in params['url']) - self.assertTrue('gigablast.com' in params['url']) - self.assertTrue('xx' in params['url']) - - dicto['language'] = 'en-US' - params = gigablast.request(query, dicto) - self.assertTrue('en' in params['url']) - self.assertFalse('en-US' in params['url']) - - def test_response(self): - self.assertRaises(AttributeError, gigablast.response, None) - self.assertRaises(AttributeError, gigablast.response, []) - self.assertRaises(AttributeError, gigablast.response, '') - self.assertRaises(AttributeError, gigablast.response, '[]') - - response = mock.Mock(text='{"results": []}') - self.assertEqual(gigablast.response(response), []) - - json = """{"results": [ - { - "title":"South by Southwest 2016", - "dmozEntry":{ - "dmozCatId":1041152, - "directCatId":1, - "dmozCatStr":"Top: Regional: North America: United States", - "dmozTitle":"South by Southwest (SXSW)", - "dmozSum":"Annual music, film, and interactive conference.", - "dmozAnchor":"" - }, - "dmozEntry":{ - "dmozCatId":763945, - "directCatId":1, - "dmozCatStr":"Top: Regional: North America: United States", - "dmozTitle":"South by Southwest (SXSW)", - "dmozSum":"", - "dmozAnchor":"www.sxsw.com" - }, - "dmozEntry":{ - "dmozCatId":761446, - "directCatId":1, - "dmozCatStr":"Top: Regional: North America: United States", - "dmozTitle":"South by Southwest (SXSW)", - "dmozSum":"Music, film, and interactive conference and festival.", - "dmozAnchor":"" - }, - "indirectDmozCatId":1041152, - "indirectDmozCatId":763945, - "indirectDmozCatId":761446, - "contentType":"html", - "sum":"This should be the content.", - "url":"www.sxsw.com", - "hopCount":0, - "size":" 102k", - "sizeInBytes":104306, - "bytesUsedToComputeSummary":70000, - "docId":269411794364, - "docScore":586571136.000000, - "summaryGenTimeMS":12, - "summaryTagdbLookupTimeMS":0, - "summaryTitleRecLoadTimeMS":1, - "site":"www.sxsw.com", - "spidered":1452203608, - "firstIndexedDateUTC":1444167123, - "contentHash32":2170650347, - "language":"English", - "langAbbr":"en" - } -]} - """ - response = mock.Mock(text=json) - results = gigablast.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'South by Southwest 2016') - self.assertEqual(results[0]['url'], 'www.sxsw.com') - self.assertEqual(results[0]['content'], 'This should be the content.') - - def test_fetch_supported_languages(self): - html = """""" - response = mock.Mock(text=html) - results = gigablast._fetch_supported_languages(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) - - html = """ - - - - - - - - - - - - """ - response = mock.Mock(text=html) - languages = gigablast._fetch_supported_languages(response) - self.assertEqual(type(languages), list) - self.assertEqual(len(languages), 2) - self.assertIn('en', languages) - self.assertIn('zh-TW', languages) diff --git a/tests/unit/engines/test_github.py b/tests/unit/engines/test_github.py deleted file mode 100644 index 460be8c3d..000000000 --- a/tests/unit/engines/test_github.py +++ /dev/null @@ -1,61 +0,0 @@ -from collections import defaultdict -import mock -from searx.engines import github -from searx.testing import SearxTestCase - - -class TestGitHubEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - params = github.request(query, defaultdict(dict)) - self.assertTrue('url' in params) - self.assertTrue(query in params['url']) - self.assertTrue('github.com' in params['url']) - self.assertEqual(params['headers']['Accept'], github.accept_header) - - def test_response(self): - self.assertRaises(AttributeError, github.response, None) - self.assertRaises(AttributeError, github.response, []) - self.assertRaises(AttributeError, github.response, '') - self.assertRaises(AttributeError, github.response, '[]') - - response = mock.Mock(text='{}') - self.assertEqual(github.response(response), []) - - response = mock.Mock(text='{"items": []}') - self.assertEqual(github.response(response), []) - - json = """ - { - "items": [ - { - "name": "title", - "html_url": "url", - "description": "" - } - ] - } - """ - response = mock.Mock(text=json) - results = github.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'title') - self.assertEqual(results[0]['url'], 'url') - self.assertEqual(results[0]['content'], '') - - json = """ - { - "items": [ - { - "name": "title", - "html_url": "url", - "description": "desc" - } - ] - } - """ - response = mock.Mock(text=json) - results = github.response(response) - self.assertEqual(results[0]['content'], "desc") diff --git a/tests/unit/engines/test_google.py b/tests/unit/engines/test_google.py deleted file mode 100644 index 9d0edd439..000000000 --- a/tests/unit/engines/test_google.py +++ /dev/null @@ -1,194 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -import lxml -from searx.engines import google -from searx.testing import SearxTestCase - - -class TestGoogleEngine(SearxTestCase): - - def mock_response(self, text): - response = mock.Mock(text=text, url='https://www.google.com/search?q=test&start=0&gbv=1&gws_rd=cr') - response.search_params = mock.Mock() - response.search_params.get = mock.Mock(return_value='www.google.com') - return response - - def test_request(self): - google.supported_languages = ['en', 'fr', 'zh-CN', 'iw'] - google.language_aliases = {'he': 'iw'} - - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - dicto['language'] = 'fr-FR' - dicto['time_range'] = '' - params = google.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('google.fr', params['url']) - self.assertIn('fr', params['url']) - self.assertIn('fr', params['headers']['Accept-Language']) - - dicto['language'] = 'en-US' - params = google.request(query, dicto) - self.assertIn('google.com', params['url']) - self.assertIn('en', params['url']) - self.assertIn('en', params['headers']['Accept-Language']) - - dicto['language'] = 'zh' - params = google.request(query, dicto) - self.assertIn('google.com', params['url']) - self.assertIn('zh-CN', params['url']) - self.assertIn('zh-CN', params['headers']['Accept-Language']) - - dicto['language'] = 'he' - params = google.request(query, dicto) - self.assertIn('google.com', params['url']) - self.assertIn('iw', params['url']) - self.assertIn('iw', params['headers']['Accept-Language']) - - def test_response(self): - self.assertRaises(AttributeError, google.response, None) - self.assertRaises(AttributeError, google.response, []) - self.assertRaises(AttributeError, google.response, '') - self.assertRaises(AttributeError, google.response, '[]') - - response = self.mock_response('') - self.assertEqual(google.response(response), []) - - html = """ -
-
- -
-
-
-
-
- This should be the content. -
-
-
-
-
-
-

-
-
-
- -
- Related searches -
-
-
- -
-

- """ - response = self.mock_response(html) - results = google.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 2) - self.assertEqual(results[0]['title'], 'This is the title') - self.assertEqual(results[0]['url'], 'http://this.should.be.the.link/') - self.assertEqual(results[0]['content'], 'This should be the content.') - self.assertEqual(results[1]['suggestion'], 'suggestion title') - - html = """ -
  • -
  • - """ - response = self.mock_response(html) - results = google.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) - - response = mock.Mock(text='', url='https://sorry.google.com') - response.search_params = mock.Mock() - response.search_params.get = mock.Mock(return_value='www.google.com') - self.assertRaises(RuntimeWarning, google.response, response) - - response = mock.Mock(text='', url='https://www.google.com/sorry/IndexRedirect') - response.search_params = mock.Mock() - response.search_params.get = mock.Mock(return_value='www.google.com') - self.assertRaises(RuntimeWarning, google.response, response) - - def test_parse_images(self): - html = """ -
  • -
    - - - -
    -
  • - """ - dom = lxml.html.fromstring(html) - results = google.parse_images(dom, 'www.google.com') - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['url'], 'http://this.is.the.url/') - self.assertEqual(results[0]['title'], '') - self.assertEqual(results[0]['content'], '') - self.assertEqual(results[0]['img_src'], 'https://this.is.the.image/image.jpg') - - def test_fetch_supported_languages(self): - html = """""" - response = mock.Mock(text=html) - languages = google._fetch_supported_languages(response) - self.assertEqual(type(languages), dict) - self.assertEqual(len(languages), 0) - - html = u""" - - -
    -
    - - - -
    -
    - - - """ - response = mock.Mock(text=html) - languages = google._fetch_supported_languages(response) - self.assertEqual(type(languages), dict) - self.assertEqual(len(languages), 3) - - self.assertIn('en', languages) - self.assertIn('zh-CN', languages) - self.assertIn('zh-TW', languages) - - self.assertEquals(type(languages['en']), dict) - self.assertEquals(type(languages['zh-CN']), dict) - self.assertEquals(type(languages['zh-TW']), dict) - - self.assertIn('name', languages['en']) - self.assertIn('name', languages['zh-CN']) - self.assertIn('name', languages['zh-TW']) - - self.assertEquals(languages['en']['name'], 'English') - self.assertEquals(languages['zh-CN']['name'], u'中文 (简体)') - self.assertEquals(languages['zh-TW']['name'], u'中文 (繁體)') diff --git a/tests/unit/engines/test_google_images.py b/tests/unit/engines/test_google_images.py deleted file mode 100644 index 8366e1b08..000000000 --- a/tests/unit/engines/test_google_images.py +++ /dev/null @@ -1,27 +0,0 @@ -from collections import defaultdict -import mock -from searx.engines import google_images -from searx.testing import SearxTestCase - - -class TestGoogleImagesEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - dicto['safesearch'] = 1 - dicto['time_range'] = '' - params = google_images.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - - dicto['safesearch'] = 0 - params = google_images.request(query, dicto) - self.assertNotIn('safe', params['url']) - - def test_response(self): - self.assertRaises(AttributeError, google_images.response, None) - self.assertRaises(AttributeError, google_images.response, []) - self.assertRaises(AttributeError, google_images.response, '') - self.assertRaises(AttributeError, google_images.response, '[]') diff --git a/tests/unit/engines/test_google_news.py b/tests/unit/engines/test_google_news.py deleted file mode 100644 index 0a122ca6d..000000000 --- a/tests/unit/engines/test_google_news.py +++ /dev/null @@ -1,102 +0,0 @@ -# -*- coding: utf-8 -*- - -from collections import defaultdict -import mock -from searx.engines import google_news -from searx.testing import SearxTestCase - - -class TestGoogleNewsEngine(SearxTestCase): - - def test_request(self): - google_news.supported_languages = ['en-US', 'fr-FR'] - google_news.language_aliases = {} - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - dicto['language'] = 'fr-FR' - dicto['time_range'] = 'w' - params = google_news.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('fr', params['url']) - - dicto['language'] = 'all' - params = google_news.request(query, dicto) - self.assertIn('url', params) - self.assertNotIn('fr', params['url']) - - def test_response(self): - self.assertRaises(AttributeError, google_news.response, None) - self.assertRaises(AttributeError, google_news.response, []) - self.assertRaises(AttributeError, google_news.response, '') - self.assertRaises(AttributeError, google_news.response, '[]') - - response = mock.Mock(text='{}') - self.assertEqual(google_news.response(response), []) - - response = mock.Mock(text='{"data": []}') - self.assertEqual(google_news.response(response), []) - - html = u""" -

    Search Results

    -
    -
    -
    - -
    -
    -
    -

    - Example title -

    -
    - - Mac & i - - - - - Mar 21, 2016 -
    -
    Example description
    -
    -
    -
    -
    -
    - - Story image for searx from Golem.de - -
    -

    - Example title 2 -

    -
    - - Golem.de - - - - - Oct 4, 2016 -
    -
    Example description 2
    -
    -
    -
    -
    -
    -
    - - - """ # noqa - response = mock.Mock(text=html) - results = google_news.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 2) - self.assertEqual(results[0]['title'], u'Example title') - self.assertEqual(results[0]['url'], 'https://example.com/') - self.assertEqual(results[0]['content'], 'Example description') - self.assertEqual(results[1]['title'], u'Example title 2') - self.assertEqual(results[1]['url'], 'https://example2.com/') - self.assertEqual(results[1]['content'], 'Example description 2') - self.assertEqual(results[1]['img_src'], 'https://example2.com/image.jpg') diff --git a/tests/unit/engines/test_google_videos.py b/tests/unit/engines/test_google_videos.py deleted file mode 100644 index 3b7edf373..000000000 --- a/tests/unit/engines/test_google_videos.py +++ /dev/null @@ -1,79 +0,0 @@ -from collections import defaultdict -import mock -from searx.engines import google_videos -from searx.testing import SearxTestCase - - -class TestGoogleVideosEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - dicto['safesearch'] = 1 - dicto['time_range'] = '' - params = google_videos.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - - dicto['safesearch'] = 0 - params = google_videos.request(query, dicto) - self.assertNotIn('safe', params['url']) - - def test_response(self): - self.assertRaises(AttributeError, google_videos.response, None) - self.assertRaises(AttributeError, google_videos.response, []) - self.assertRaises(AttributeError, google_videos.response, '') - self.assertRaises(AttributeError, google_videos.response, '[]') - - html = r""" -
    -
    -
    -
    -

    Title 1

    -
    -
    - -
    -
    - Content 1 -
    -
    -
    -
    -

    Title 2

    -
    -
    - -
    -
    - Content 2 -
    -
    -
    -
    - - """ - response = mock.Mock(text=html) - results = google_videos.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 2) - self.assertEqual(results[0]['url'], u'url_1') - self.assertEqual(results[0]['title'], u'Title 1') - self.assertEqual(results[0]['content'], u'Content 1') - self.assertEqual(results[1]['url'], u'url_2') - self.assertEqual(results[1]['title'], u'Title 2') - self.assertEqual(results[1]['content'], u'Content 2') diff --git a/tests/unit/engines/test_ina.py b/tests/unit/engines/test_ina.py deleted file mode 100644 index 109a9592d..000000000 --- a/tests/unit/engines/test_ina.py +++ /dev/null @@ -1,64 +0,0 @@ -from collections import defaultdict -import mock -from searx.engines import ina -from searx.testing import SearxTestCase - - -class TestInaEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 0 - params = ina.request(query, dicto) - self.assertTrue('url' in params) - self.assertTrue(query in params['url']) - self.assertTrue('ina.fr' in params['url']) - - def test_response(self): - self.assertRaises(AttributeError, ina.response, None) - self.assertRaises(AttributeError, ina.response, []) - self.assertRaises(AttributeError, ina.response, '') - self.assertRaises(AttributeError, ina.response, '[]') - - response = mock.Mock(text='{}') - self.assertEqual(ina.response(response), []) - - response = mock.Mock(text='{"data": []}') - self.assertEqual(ina.response(response), []) - - json = """ - {"content":"\\t
    \\n\\t\\n\ - \\n
    \\n\ -
    \\n\ - \\t\\t\\t\\t\\n\ - \\"Conf\\u00e9rence\\n\ - \\t\\t\\t\\t\\t<\\/a>\\n\ - \\t\\t\\t\\t\\t
    \\n\\t\\t\\t\\t\\t\\t

    \\n\ - \\t\\t\\t\\t\\t\\t\\t\ - Conf\\u00e9rence de presse du G\\u00e9n\\u00e9ral de Gaulle <\\/a>\\n\ - <\\/h3>\\n\ -
    \\n27\\/11\\/1967<\\/span>\\n\ - 29321 vues<\\/span>\\n\ - 01h 33m 07s<\\/span>\\n\ - <\\/div>\\n\ -

    VERSION INTEGRALE DE LA CONFERENCE DE PRESSE DU GENERAL DE GAULLE . \ - - PA le Pr\\u00e9sident DE GAULLE : il ouvre les bras et s'assied. DP journalis...<\\/p>\\n\ - <\\/div>\\n<\\/div>\\n" - } - """ - response = mock.Mock(text=json) - results = ina.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], u'Conf\xe9rence de presse du G\xe9n\xe9ral de Gaulle') - self.assertEqual(results[0]['url'], - 'https://www.ina.fr/video/CAF89035682/conference-de-presse-du-general-de-gaulle-video.html') - self.assertEqual(results[0]['content'], - u"VERSION INTEGRALE DE LA CONFERENCE DE PRESSE DU GENERAL DE GAULLE ." - u" - PA le Pr\u00e9sident DE GAULLE : il ouvre les bras et s'assied. DP journalis...") diff --git a/tests/unit/engines/test_kickass.py b/tests/unit/engines/test_kickass.py deleted file mode 100644 index 3a75c6697..000000000 --- a/tests/unit/engines/test_kickass.py +++ /dev/null @@ -1,397 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -from searx.engines import kickass -from searx.testing import SearxTestCase - - -class TestKickassEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - params = kickass.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('kickass.cd', params['url']) - self.assertFalse(params['verify']) - - def test_response(self): - self.assertRaises(AttributeError, kickass.response, None) - self.assertRaises(AttributeError, kickass.response, []) - self.assertRaises(AttributeError, kickass.response, '') - self.assertRaises(AttributeError, kickass.response, '[]') - - response = mock.Mock(text='') - self.assertEqual(kickass.response(response), []) - - html = """ - - - - - - - - - - - - - - - - - -
    torrent name - size - - files - - age - - seed - - leech -
    - -
    - - -
    - - This should be the title - - - Posted by - riri in - - Other > Unsorted - - -
    -
    449 bytes42 years101
    - """ - response = mock.Mock(text=html) - results = kickass.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'This should be the title') - self.assertEqual(results[0]['url'], 'https://kickass.cd/url.html') - self.assertEqual(results[0]['content'], 'Posted by riri in Other > Unsorted') - self.assertEqual(results[0]['seed'], 10) - self.assertEqual(results[0]['leech'], 1) - self.assertEqual(results[0]['filesize'], 449) - self.assertEqual(results[0]['files'], 4) - self.assertEqual(results[0]['magnetlink'], 'magnet:?xt=urn:btih:MAGNETURL&dn=test') - self.assertEqual(results[0]['torrentfile'], 'http://torcache.net/torrent/53917.torrent?title=test') - - html = """ - - - - - - - - - -
    torrent name - size - - files - - age - - seed - - leech -
    - """ - response = mock.Mock(text=html) - results = kickass.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) - - html = """ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    torrent name - size - - files - - age - - seed - - leech -
    - -
    - - -
    - - This should be the title - - - Posted by - riri in - - Other > Unsorted - - -
    -
    1 KiB42 years101
    - -
    - - -
    - - This should be the title - - - Posted by - riri in - - Other > Unsorted - - -
    -
    1 MiB42 years91
    - -
    - - -
    - - This should be the title - - - Posted by - riri in - - Other > Unsorted - - -
    -
    1 GiB42 years81
    - -
    - - -
    - - This should be the title - - - Posted by - riri in - - Other > Unsorted - - -
    -
    1 TiB42 years71
    - -
    - - -
    - - This should be the title - - - Posted by - riri in - - Other > Unsorted - - -
    -
    z bytesr2 yearsat
    - """ - response = mock.Mock(text=html) - results = kickass.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 5) - self.assertEqual(results[0]['title'], 'This should be the title') - self.assertEqual(results[0]['url'], 'https://kickass.cd/url.html') - self.assertEqual(results[0]['content'], 'Posted by riri in Other > Unsorted') - self.assertEqual(results[0]['seed'], 10) - self.assertEqual(results[0]['leech'], 1) - self.assertEqual(results[0]['files'], 4) - self.assertEqual(results[0]['magnetlink'], 'magnet:?xt=urn:btih:MAGNETURL&dn=test') - self.assertEqual(results[0]['torrentfile'], 'http://torcache.net/torrent/53917.torrent?title=test') - self.assertEqual(results[0]['filesize'], 1000) - self.assertEqual(results[1]['filesize'], 1000000) - self.assertEqual(results[2]['filesize'], 1000000000) - self.assertEqual(results[3]['filesize'], 1000000000000) - self.assertEqual(results[4]['seed'], 0) - self.assertEqual(results[4]['leech'], 0) - self.assertEqual(results[4]['files'], None) - self.assertEqual(results[4]['filesize'], None) diff --git a/tests/unit/engines/test_mediawiki.py b/tests/unit/engines/test_mediawiki.py deleted file mode 100644 index b86372700..000000000 --- a/tests/unit/engines/test_mediawiki.py +++ /dev/null @@ -1,130 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -from searx.engines import mediawiki -from searx.testing import SearxTestCase - - -class TestMediawikiEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - dicto['language'] = 'fr_FR' - params = mediawiki.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('wikipedia.org', params['url']) - self.assertIn('fr', params['url']) - - dicto['language'] = 'all' - params = mediawiki.request(query, dicto) - self.assertIn('en', params['url']) - - mediawiki.base_url = "http://test.url/" - mediawiki.search_url = mediawiki.base_url +\ - 'w/api.php?action=query'\ - '&list=search'\ - '&{query}'\ - '&srprop=timestamp'\ - '&format=json'\ - '&sroffset={offset}'\ - '&srlimit={limit}' # noqa - params = mediawiki.request(query, dicto) - self.assertIn('test.url', params['url']) - - def test_response(self): - dicto = defaultdict(dict) - dicto['language'] = 'fr' - mediawiki.base_url = "https://{language}.wikipedia.org/" - - self.assertRaises(AttributeError, mediawiki.response, None) - self.assertRaises(AttributeError, mediawiki.response, []) - self.assertRaises(AttributeError, mediawiki.response, '') - self.assertRaises(AttributeError, mediawiki.response, '[]') - - response = mock.Mock(text='{}', search_params=dicto) - self.assertEqual(mediawiki.response(response), []) - - response = mock.Mock(text='{"data": []}', search_params=dicto) - self.assertEqual(mediawiki.response(response), []) - - json = """ - { - "query-continue": { - "search": { - "sroffset": 1 - } - }, - "query": { - "searchinfo": { - "totalhits": 29721 - }, - "search": [ - { - "ns": 0, - "title": "This is the title étude", - "timestamp": "2014-12-19T17:42:52Z" - } - ] - } - } - """ - response = mock.Mock(text=json, search_params=dicto) - results = mediawiki.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], u'This is the title étude') - self.assertIn('fr.wikipedia.org', results[0]['url']) - self.assertIn('This_is_the_title', results[0]['url']) - self.assertIn('%C3%A9tude', results[0]['url']) - self.assertEqual(results[0]['content'], '') - - json = """ - { - "query-continue": { - "search": { - "sroffset": 1 - } - }, - "query": { - "searchinfo": { - "totalhits": 29721 - }, - "search": [ - ] - } - } - """ - response = mock.Mock(text=json, search_params=dicto) - results = mediawiki.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) - - json = """ - { - "query-continue": { - "search": { - "sroffset": 1 - } - }, - "query": { - } - } - """ - response = mock.Mock(text=json, search_params=dicto) - results = mediawiki.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) - - json = r""" - {"toto":[ - {"id":200,"name":"Artist Name", - "link":"http:\/\/www.mediawiki.com\/artist\/1217","type":"artist"} - ]} - """ - response = mock.Mock(text=json, search_params=dicto) - results = mediawiki.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) diff --git a/tests/unit/engines/test_mixcloud.py b/tests/unit/engines/test_mixcloud.py deleted file mode 100644 index 9c79a478e..000000000 --- a/tests/unit/engines/test_mixcloud.py +++ /dev/null @@ -1,67 +0,0 @@ -from collections import defaultdict -import mock -from searx.engines import mixcloud -from searx.testing import SearxTestCase - - -class TestMixcloudEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 0 - params = mixcloud.request(query, dicto) - self.assertTrue('url' in params) - self.assertTrue(query in params['url']) - self.assertTrue('mixcloud.com' in params['url']) - - def test_response(self): - self.assertRaises(AttributeError, mixcloud.response, None) - self.assertRaises(AttributeError, mixcloud.response, []) - self.assertRaises(AttributeError, mixcloud.response, '') - self.assertRaises(AttributeError, mixcloud.response, '[]') - - response = mock.Mock(text='{}') - self.assertEqual(mixcloud.response(response), []) - - response = mock.Mock(text='{"data": []}') - self.assertEqual(mixcloud.response(response), []) - - json = """ - {"data":[ - { - "user": { - "url": "http://www.mixcloud.com/user/", - "username": "user", - "name": "User", - "key": "/user/" - }, - "key": "/user/this-is-the-url/", - "created_time": "2014-11-14T13:30:02Z", - "audio_length": 3728, - "slug": "this-is-the-url", - "name": "Title of track", - "url": "http://www.mixcloud.com/user/this-is-the-url/", - "updated_time": "2014-11-14T13:14:10Z" - } - ]} - """ - response = mock.Mock(text=json) - results = mixcloud.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'Title of track') - self.assertEqual(results[0]['url'], 'http://www.mixcloud.com/user/this-is-the-url/') - self.assertEqual(results[0]['content'], 'User') - self.assertTrue('http://www.mixcloud.com/user/this-is-the-url/' in results[0]['embedded']) - - json = r""" - {"toto":[ - {"id":200,"name":"Artist Name", - "link":"http:\/\/www.mixcloud.com\/artist\/1217","type":"artist"} - ]} - """ - response = mock.Mock(text=json) - results = mixcloud.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) diff --git a/tests/unit/engines/test_nyaa.py b/tests/unit/engines/test_nyaa.py deleted file mode 100644 index 6dcafc6b7..000000000 --- a/tests/unit/engines/test_nyaa.py +++ /dev/null @@ -1,124 +0,0 @@ -from collections import defaultdict -import mock -from searx.engines import nyaa -from searx.testing import SearxTestCase - - -class TestNyaaEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dic = defaultdict(dict) - dic['pageno'] = 1 - params = nyaa.request(query, dic) - self.assertTrue('url' in params) - self.assertTrue(query in params['url']) - self.assertTrue('nyaa.si' in params['url']) - - def test_response(self): - resp = mock.Mock(text='') - self.assertEqual(nyaa.response(resp), []) - - html = """ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    Category
    -
    -
    Name
    -
    - - - -
    Link
    -
    - -
    Size
    -
    - -
    Date
    -
    - - - - - - - - -
    - - Anime - English-translated - - - Sample title 1 - - - - 723.7 MiB2017-08-21 11:241312
    - - Anime - English-translated - - - Sample title 2 - - - 8.2 GiB2017-04-08 01:40101206
    - """ - - resp = mock.Mock(text=html) - results = nyaa.response(resp) - - self.assertEqual(type(results), list) - self.assertEqual(len(results), 2) - - r = results[0] - self.assertTrue(r['url'].find('1') >= 0) - self.assertTrue(r['torrentfile'].find('1.torrent') >= 0) - self.assertTrue(r['content'].find('Anime - English-translated') >= 0) - self.assertTrue(r['content'].find('Downloaded 12 times.') >= 0) - - self.assertEqual(r['title'], 'Sample title 1') - self.assertEqual(r['seed'], 1) - self.assertEqual(r['leech'], 3) - self.assertEqual(r['filesize'], 723700000) - - r = results[1] - self.assertTrue(r['url'].find('2') >= 0) - self.assertTrue(r['magnetlink'].find('magnet:') >= 0) diff --git a/tests/unit/engines/test_openstreetmap.py b/tests/unit/engines/test_openstreetmap.py deleted file mode 100644 index 7b7783f04..000000000 --- a/tests/unit/engines/test_openstreetmap.py +++ /dev/null @@ -1,199 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -from searx.engines import openstreetmap -from searx.testing import SearxTestCase - - -class TestOpenstreetmapEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - params = openstreetmap.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('openstreetmap.org', params['url']) - - def test_response(self): - self.assertRaises(AttributeError, openstreetmap.response, None) - self.assertRaises(AttributeError, openstreetmap.response, []) - self.assertRaises(AttributeError, openstreetmap.response, '') - self.assertRaises(AttributeError, openstreetmap.response, '[]') - - response = mock.Mock(text='{}') - self.assertEqual(openstreetmap.response(response), []) - - response = mock.Mock(text='{"data": []}') - self.assertEqual(openstreetmap.response(response), []) - - json = """ - [ - { - "place_id": "127732055", - "licence": "Data © OpenStreetMap contributors, ODbL 1.0. http://www.openstreetmap.org/copyright", - "osm_type": "relation", - "osm_id": "7444", - "boundingbox": [ - "48.8155755", - "48.902156", - "2.224122", - "2.4697602" - ], - "lat": "48.8565056", - "lon": "2.3521334", - "display_name": "This is the title", - "class": "place", - "type": "city", - "importance": 0.96893459932191, - "icon": "https://nominatim.openstreetmap.org/images/mapicons/poi_place_city.p.20.png", - "address": { - "city": "Paris", - "county": "Paris", - "state": "Île-de-France", - "country": "France", - "country_code": "fr" - }, - "geojson": { - "type": "Polygon", - "coordinates": [ - [ - [ - 2.224122, - 48.854199 - ] - ] - ] - } - } - ] - """ - response = mock.Mock(text=json) - results = openstreetmap.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'This is the title') - self.assertEqual(results[0]['url'], 'https://openstreetmap.org/relation/7444') - self.assertIn('coordinates', results[0]['geojson']) - self.assertEqual(results[0]['geojson']['coordinates'][0][0][0], 2.224122) - self.assertEqual(results[0]['geojson']['coordinates'][0][0][1], 48.854199) - self.assertEqual(results[0]['address'], None) - self.assertIn('48.8155755', results[0]['boundingbox']) - self.assertIn('48.902156', results[0]['boundingbox']) - self.assertIn('2.224122', results[0]['boundingbox']) - self.assertIn('2.4697602', results[0]['boundingbox']) - - json = """ - [ - { - "place_id": "127732055", - "licence": "Data © OpenStreetMap contributors, ODbL 1.0. http://www.openstreetmap.org/copyright", - "osm_type": "relation", - "osm_id": "7444", - "boundingbox": [ - "48.8155755", - "48.902156", - "2.224122", - "2.4697602" - ], - "lat": "48.8565056", - "lon": "2.3521334", - "display_name": "This is the title", - "class": "tourism", - "type": "city", - "importance": 0.96893459932191, - "icon": "https://nominatim.openstreetmap.org/images/mapicons/poi_place_city.p.20.png", - "address": { - "city": "Paris", - "county": "Paris", - "state": "Île-de-France", - "country": "France", - "country_code": "fr", - "address29": "Address" - }, - "geojson": { - "type": "Polygon", - "coordinates": [ - [ - [ - 2.224122, - 48.854199 - ] - ] - ] - } - }, - { - "place_id": "127732055", - "licence": "Data © OpenStreetMap contributors, ODbL 1.0. http://www.openstreetmap.org/copyright", - "osm_type": "relation", - "osm_id": "7444", - "boundingbox": [ - "48.8155755", - "48.902156", - "2.224122", - "2.4697602" - ], - "lat": "48.8565056", - "lon": "2.3521334", - "display_name": "This is the title", - "class": "tourism", - "type": "city", - "importance": 0.96893459932191, - "icon": "https://nominatim.openstreetmap.org/images/mapicons/poi_place_city.p.20.png", - "address": { - "city": "Paris", - "county": "Paris", - "state": "Île-de-France", - "country": "France", - "postcode": 75000, - "country_code": "fr" - }, - "geojson": { - "type": "Polygon", - "coordinates": [ - [ - [ - 2.224122, - 48.854199 - ] - ] - ] - } - }, - { - "place_id": "127732055", - "licence": "Data © OpenStreetMap contributors, ODbL 1.0. http://www.openstreetmap.org/copyright", - "osm_type": "node", - "osm_id": "7444", - "boundingbox": [ - "48.8155755", - "48.902156", - "2.224122", - "2.4697602" - ], - "lat": "48.8565056", - "lon": "2.3521334", - "display_name": "This is the title", - "class": "tourism", - "type": "city", - "importance": 0.96893459932191, - "icon": "https://nominatim.openstreetmap.org/images/mapicons/poi_place_city.p.20.png", - "address": { - "city": "Paris", - "county": "Paris", - "state": "Île-de-France", - "country": "France", - "country_code": "fr", - "address29": "Address" - } - } - ] - """ - response = mock.Mock(text=json) - results = openstreetmap.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 3) - self.assertIn('48.8565056', results[2]['geojson']['coordinates']) - self.assertIn('2.3521334', results[2]['geojson']['coordinates']) diff --git a/tests/unit/engines/test_pdbe.py b/tests/unit/engines/test_pdbe.py deleted file mode 100644 index ea5adf9dc..000000000 --- a/tests/unit/engines/test_pdbe.py +++ /dev/null @@ -1,109 +0,0 @@ -import mock -from collections import defaultdict -from searx.engines import pdbe -from searx.testing import SearxTestCase - - -class TestPdbeEngine(SearxTestCase): - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - params = pdbe.request(query, dicto) - self.assertTrue('url' in params) - self.assertTrue('ebi.ac.uk' in params['url']) - self.assertTrue('data' in params) - self.assertTrue('q' in params['data']) - self.assertTrue(query in params['data']['q']) - self.assertTrue('wt' in params['data']) - self.assertTrue('json' in params['data']['wt']) - self.assertTrue('method' in params) - self.assertTrue(params['method'] == 'POST') - - def test_response(self): - self.assertRaises(AttributeError, pdbe.response, None) - self.assertRaises(AttributeError, pdbe.response, []) - self.assertRaises(AttributeError, pdbe.response, '') - self.assertRaises(AttributeError, pdbe.response, '[]') - - json = """ -{ - "response": { - "docs": [ - { - "citation_title": "X-ray crystal structure of ferric Aplysia limacina myoglobin in different liganded states.", - "citation_year": 1993, - "entry_author_list": [ - "Conti E, Moser C, Rizzi M, Mattevi A, Lionetti C, Coda A, Ascenzi P, Brunori M, Bolognesi M" - ], - "journal": "J. Mol. Biol.", - "journal_page": "498-508", - "journal_volume": "233", - "pdb_id": "2fal", - "status": "REL", - "title": "X-RAY CRYSTAL STRUCTURE OF FERRIC APLYSIA LIMACINA MYOGLOBIN IN DIFFERENT LIGANDED STATES" - } - ], - "numFound": 1, - "start": 0 - }, - "responseHeader": { - "QTime": 0, - "params": { - "q": "2fal", - "wt": "json" - }, - "status": 0 - } -} -""" - - response = mock.Mock(text=json) - results = pdbe.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], - 'X-RAY CRYSTAL STRUCTURE OF FERRIC APLYSIA LIMACINA MYOGLOBIN IN DIFFERENT LIGANDED STATES') - self.assertEqual(results[0]['url'], pdbe.pdbe_entry_url.format(pdb_id='2fal')) - self.assertEqual(results[0]['img_src'], pdbe.pdbe_preview_url.format(pdb_id='2fal')) - self.assertTrue('Conti E' in results[0]['content']) - self.assertTrue('X-ray crystal structure of ferric Aplysia limacina myoglobin in different liganded states.' in - results[0]['content']) - self.assertTrue('1993' in results[0]['content']) - - # Testing proper handling of PDB entries marked as obsolete - json = """ -{ - "response": { - "docs": [ - { - "citation_title": "Obsolete entry test", - "citation_year": 2016, - "entry_author_list": ["Doe J"], - "journal": "J. Obs.", - "journal_page": "1-2", - "journal_volume": "1", - "pdb_id": "xxxx", - "status": "OBS", - "title": "OBSOLETE ENTRY TEST", - "superseded_by": "yyyy" - } - ], - "numFound": 1, - "start": 0 - }, - "responseHeader": { - "QTime": 0, - "params": { - "q": "xxxx", - "wt": "json" - }, - "status": 0 - } -} -""" - response = mock.Mock(text=json) - results = pdbe.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'OBSOLETE ENTRY TEST (OBSOLETE)') - self.assertTrue(results[0]['content'].startswith('This entry has been superseded by')) diff --git a/tests/unit/engines/test_photon.py b/tests/unit/engines/test_photon.py deleted file mode 100644 index 734497884..000000000 --- a/tests/unit/engines/test_photon.py +++ /dev/null @@ -1,166 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -from searx.engines import photon -from searx.testing import SearxTestCase - - -class TestPhotonEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - dicto['language'] = 'all' - params = photon.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('photon.komoot.de', params['url']) - - dicto['language'] = 'all' - params = photon.request(query, dicto) - self.assertNotIn('lang', params['url']) - - dicto['language'] = 'al' - params = photon.request(query, dicto) - self.assertNotIn('lang', params['url']) - - dicto['language'] = 'fr' - params = photon.request(query, dicto) - self.assertIn('fr', params['url']) - - def test_response(self): - self.assertRaises(AttributeError, photon.response, None) - self.assertRaises(AttributeError, photon.response, []) - self.assertRaises(AttributeError, photon.response, '') - self.assertRaises(AttributeError, photon.response, '[]') - - response = mock.Mock(text='{}') - self.assertEqual(photon.response(response), []) - - response = mock.Mock(text='{"data": []}') - self.assertEqual(photon.response(response), []) - - json = """ - { - "features": [ - { - "properties": { - "osm_key": "waterway", - "extent": [ - -1.4508446, - 51.1614997, - -1.4408036, - 51.1525635 - ], - "name": "This is the title", - "state": "England", - "osm_id": 114823817, - "osm_type": "W", - "osm_value": "river", - "city": "Test Valley", - "country": "United Kingdom" - }, - "type": "Feature", - "geometry": { - "type": "Point", - "coordinates": [ - -1.4458571, - 51.1576661 - ] - } - }, - { - "properties": { - "osm_key": "place", - "street": "Rue", - "state": "Ile-de-France", - "osm_id": 129211377, - "osm_type": "R", - "housenumber": "10", - "postcode": "75011", - "osm_value": "house", - "city": "Paris", - "country": "France" - }, - "type": "Feature", - "geometry": { - "type": "Point", - "coordinates": [ - 2.3725025, - 48.8654481 - ] - } - }, - { - "properties": { - "osm_key": "amenity", - "street": "Allée", - "name": "Bibliothèque", - "state": "Ile-de-France", - "osm_id": 1028573132, - "osm_type": "N", - "postcode": "75001", - "osm_value": "library", - "city": "Paris", - "country": "France" - }, - "type": "Feature", - "geometry": { - "type": "Point", - "coordinates": [ - 2.3445634, - 48.862494 - ] - } - }, - { - "properties": { - "osm_key": "amenity", - "osm_id": 1028573132, - "osm_type": "Y", - "postcode": "75001", - "osm_value": "library", - "city": "Paris", - "country": "France" - }, - "type": "Feature", - "geometry": { - "type": "Point", - "coordinates": [ - 2.3445634, - 48.862494 - ] - } - }, - { - } - ], - "type": "FeatureCollection" - } - """ - response = mock.Mock(text=json) - results = photon.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 3) - self.assertEqual(results[0]['title'], 'This is the title') - self.assertEqual(results[0]['content'], '') - self.assertEqual(results[0]['longitude'], -1.4458571) - self.assertEqual(results[0]['latitude'], 51.1576661) - self.assertIn(-1.4508446, results[0]['boundingbox']) - self.assertIn(51.1614997, results[0]['boundingbox']) - self.assertIn(-1.4408036, results[0]['boundingbox']) - self.assertIn(51.1525635, results[0]['boundingbox']) - self.assertIn('type', results[0]['geojson']) - self.assertEqual(results[0]['geojson']['type'], 'Point') - self.assertEqual(results[0]['address'], None) - self.assertEqual(results[0]['osm']['type'], 'way') - self.assertEqual(results[0]['osm']['id'], 114823817) - self.assertEqual(results[0]['url'], 'https://openstreetmap.org/way/114823817') - self.assertEqual(results[1]['osm']['type'], 'relation') - self.assertEqual(results[2]['address']['name'], u'Bibliothèque') - self.assertEqual(results[2]['address']['house_number'], None) - self.assertEqual(results[2]['address']['locality'], 'Paris') - self.assertEqual(results[2]['address']['postcode'], '75001') - self.assertEqual(results[2]['address']['country'], 'France') - self.assertEqual(results[2]['osm']['type'], 'node') diff --git a/tests/unit/engines/test_piratebay.py b/tests/unit/engines/test_piratebay.py deleted file mode 100644 index 89a78e796..000000000 --- a/tests/unit/engines/test_piratebay.py +++ /dev/null @@ -1,166 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -from searx.engines import piratebay -from searx.testing import SearxTestCase - - -class TestPiratebayEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - dicto['category'] = 'Toto' - params = piratebay.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('piratebay.org', params['url']) - self.assertIn('0', params['url']) - - dicto['category'] = 'music' - params = piratebay.request(query, dicto) - self.assertIn('100', params['url']) - - def test_response(self): - self.assertRaises(AttributeError, piratebay.response, None) - self.assertRaises(AttributeError, piratebay.response, []) - self.assertRaises(AttributeError, piratebay.response, '') - self.assertRaises(AttributeError, piratebay.response, '[]') - - response = mock.Mock(text='') - self.assertEqual(piratebay.response(response), []) - - html = """ - - - - - - - - - - - - - - - -
    -
    - Anime
    - (Anime) -
    -
    - - - Magnet link - - - Download - - - VIP - - - - This is the content and should be OK - - 13334
    -
    - Anime
    - (Anime) -
    -
    - - - Magnet link - - - VIP - - - - This is the content and should be OK - - 13334
    - """ - response = mock.Mock(text=html) - results = piratebay.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 2) - self.assertEqual(results[0]['title'], 'This is the title') - self.assertEqual(results[0]['url'], 'https://thepiratebay.org/this.is.the.link') - self.assertEqual(results[0]['content'], 'This is the content and should be OK') - self.assertEqual(results[0]['seed'], 13) - self.assertEqual(results[0]['leech'], 334) - self.assertEqual(results[0]['magnetlink'], 'magnet:?xt=urn:btih:MAGNETLINK') - self.assertEqual(results[0]['torrentfile'], 'http://torcache.net/torrent/TORRENTFILE.torrent') - - self.assertEqual(results[1]['torrentfile'], None) - - html = """ - - - - - - - - - -
    -
    - Anime
    - (Anime) -
    -
    - - - Magnet link - - - Download - - - VIP - - - - This is the content and should be OK - - sd
    - """ - response = mock.Mock(text=html) - results = piratebay.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'This is the title') - self.assertEqual(results[0]['url'], 'https://thepiratebay.org/this.is.the.link') - self.assertEqual(results[0]['content'], 'This is the content and should be OK') - self.assertEqual(results[0]['seed'], 0) - self.assertEqual(results[0]['leech'], 0) - self.assertEqual(results[0]['magnetlink'], 'magnet:?xt=urn:btih:MAGNETLINK') - self.assertEqual(results[0]['torrentfile'], 'http://torcache.net/torrent/TORRENTFILE.torrent') - - html = """ - -
    - """ - response = mock.Mock(text=html) - results = piratebay.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) diff --git a/tests/unit/engines/test_qwant.py b/tests/unit/engines/test_qwant.py deleted file mode 100644 index 6611264f8..000000000 --- a/tests/unit/engines/test_qwant.py +++ /dev/null @@ -1,339 +0,0 @@ -from collections import defaultdict -import mock -from searx.engines import qwant -from searx.testing import SearxTestCase - - -class TestQwantEngine(SearxTestCase): - - def test_request(self): - qwant.supported_languages = ['en-US', 'fr-CA', 'fr-FR'] - qwant.language_aliases = {} - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 0 - dicto['language'] = 'fr-FR' - qwant.categories = [''] - params = qwant.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('web', params['url']) - self.assertIn('qwant.com', params['url']) - self.assertIn('fr_fr', params['url']) - - dicto['language'] = 'all' - qwant.categories = ['news'] - params = qwant.request(query, dicto) - self.assertFalse('fr' in params['url']) - self.assertIn('news', params['url']) - - dicto['language'] = 'fr' - params = qwant.request(query, dicto) - self.assertIn('fr_fr', params['url']) - - def test_response(self): - self.assertRaises(AttributeError, qwant.response, None) - self.assertRaises(AttributeError, qwant.response, []) - self.assertRaises(AttributeError, qwant.response, '') - self.assertRaises(AttributeError, qwant.response, '[]') - - response = mock.Mock(text='{}') - self.assertEqual(qwant.response(response), []) - - response = mock.Mock(text='{"data": {}}') - self.assertEqual(qwant.response(response), []) - - json = """ - { - "status": "success", - "data": { - "query": { - "locale": "en_us", - "query": "Test", - "offset": 10 - }, - "result": { - "items": [ - { - "title": "Title", - "score": 9999, - "url": "http://www.url.xyz", - "source": "...", - "desc": "Description", - "date": "", - "_id": "db0aadd62c2a8565567ffc382f5c61fa", - "favicon": "https://s.qwant.com/fav.ico" - } - ], - "filters": [] - }, - "cache": { - "key": "e66aa864c00147a0e3a16ff7a5efafde", - "created": 1433092754, - "expiration": 259200, - "status": "miss", - "age": 0 - } - } - } - """ - response = mock.Mock(text=json) - qwant.categories = ['general'] - results = qwant.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'Title') - self.assertEqual(results[0]['url'], 'http://www.url.xyz') - self.assertEqual(results[0]['content'], 'Description') - - json = """ - { - "status": "success", - "data": { - "query": { - "locale": "en_us", - "query": "Test", - "offset": 10 - }, - "result": { - "items": [ - { - "title": "Title", - "score": 9999, - "url": "http://www.url.xyz", - "source": "...", - "media": "http://image.jpg", - "desc": "", - "thumbnail": "http://thumbnail.jpg", - "date": "", - "_id": "db0aadd62c2a8565567ffc382f5c61fa", - "favicon": "https://s.qwant.com/fav.ico" - } - ], - "filters": [] - }, - "cache": { - "key": "e66aa864c00147a0e3a16ff7a5efafde", - "created": 1433092754, - "expiration": 259200, - "status": "miss", - "age": 0 - } - } - } - """ - response = mock.Mock(text=json) - qwant.categories = ['images'] - results = qwant.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'Title') - self.assertEqual(results[0]['url'], 'http://www.url.xyz') - self.assertEqual(results[0]['content'], '') - self.assertEqual(results[0]['thumbnail_src'], 'http://thumbnail.jpg') - self.assertEqual(results[0]['img_src'], 'http://image.jpg') - - json = """ - { - "status": "success", - "data": { - "query": { - "locale": "en_us", - "query": "Test", - "offset": 10 - }, - "result": { - "items": [ - { - "title": "Title", - "score": 9999, - "url": "http://www.url.xyz", - "source": "...", - "desc": "Description", - "date": 1433260920, - "_id": "db0aadd62c2a8565567ffc382f5c61fa", - "favicon": "https://s.qwant.com/fav.ico" - } - ], - "filters": [] - }, - "cache": { - "key": "e66aa864c00147a0e3a16ff7a5efafde", - "created": 1433092754, - "expiration": 259200, - "status": "miss", - "age": 0 - } - } - } - """ - response = mock.Mock(text=json) - qwant.categories = ['news'] - results = qwant.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'Title') - self.assertEqual(results[0]['url'], 'http://www.url.xyz') - self.assertEqual(results[0]['content'], 'Description') - self.assertIn('publishedDate', results[0]) - - json = """ - { - "status": "success", - "data": { - "query": { - "locale": "en_us", - "query": "Test", - "offset": 10 - }, - "result": { - "items": [ - { - "title": "Title", - "score": 9999, - "url": "http://www.url.xyz", - "source": "...", - "desc": "Description", - "date": 1433260920, - "_id": "db0aadd62c2a8565567ffc382f5c61fa", - "favicon": "https://s.qwant.com/fav.ico" - } - ], - "filters": [] - }, - "cache": { - "key": "e66aa864c00147a0e3a16ff7a5efafde", - "created": 1433092754, - "expiration": 259200, - "status": "miss", - "age": 0 - } - } - } - """ - response = mock.Mock(text=json) - qwant.categories = ['social media'] - results = qwant.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'Title') - self.assertEqual(results[0]['url'], 'http://www.url.xyz') - self.assertEqual(results[0]['content'], 'Description') - self.assertIn('publishedDate', results[0]) - - json = """ - { - "status": "success", - "data": { - "query": { - "locale": "en_us", - "query": "Test", - "offset": 10 - }, - "result": { - "items": [ - { - "title": "Title", - "score": 9999, - "url": "http://www.url.xyz", - "source": "...", - "desc": "Description", - "date": 1433260920, - "_id": "db0aadd62c2a8565567ffc382f5c61fa", - "favicon": "https://s.qwant.com/fav.ico" - } - ], - "filters": [] - }, - "cache": { - "key": "e66aa864c00147a0e3a16ff7a5efafde", - "created": 1433092754, - "expiration": 259200, - "status": "miss", - "age": 0 - } - } - } - """ - response = mock.Mock(text=json) - qwant.categories = [''] - results = qwant.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) - - json = """ - { - "status": "success", - "data": { - "query": { - "locale": "en_us", - "query": "Test", - "offset": 10 - }, - "result": { - "filters": [] - }, - "cache": { - "key": "e66aa864c00147a0e3a16ff7a5efafde", - "created": 1433092754, - "expiration": 259200, - "status": "miss", - "age": 0 - } - } - } - """ - response = mock.Mock(text=json) - results = qwant.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) - - json = """ - { - "status": "success", - "data": { - "query": { - "locale": "en_us", - "query": "Test", - "offset": 10 - }, - "cache": { - "key": "e66aa864c00147a0e3a16ff7a5efafde", - "created": 1433092754, - "expiration": 259200, - "status": "miss", - "age": 0 - } - } - } - """ - response = mock.Mock(text=json) - results = qwant.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) - - json = """ - { - "status": "success" - } - """ - response = mock.Mock(text=json) - results = qwant.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) - - def test_fetch_supported_languages(self): - page = """some code... - config_set('project.regionalisation', {"continents":{},"languages": - {"de":{"code":"de","name":"Deutsch","countries":["DE","CH","AT"]}, - "it":{"code":"it","name":"Italiano","countries":["IT","CH"]}}}); - some more code...""" - response = mock.Mock(text=page) - languages = qwant._fetch_supported_languages(response) - self.assertEqual(type(languages), list) - self.assertEqual(len(languages), 5) - self.assertIn('de-DE', languages) - self.assertIn('de-CH', languages) - self.assertIn('de-AT', languages) - self.assertIn('it-IT', languages) - self.assertIn('it-CH', languages) diff --git a/tests/unit/engines/test_reddit.py b/tests/unit/engines/test_reddit.py deleted file mode 100644 index 9c94f4e2b..000000000 --- a/tests/unit/engines/test_reddit.py +++ /dev/null @@ -1,71 +0,0 @@ -from collections import defaultdict -import mock -from searx.engines import reddit -from searx.testing import SearxTestCase -from datetime import datetime - - -class TestRedditEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dic = defaultdict(dict) - params = reddit.request(query, dic) - self.assertTrue('url' in params) - self.assertTrue(query in params['url']) - self.assertTrue('reddit.com' in params['url']) - - def test_response(self): - resp = mock.Mock(text='{}') - self.assertEqual(reddit.response(resp), []) - - json = """ - { - "kind": "Listing", - "data": { - "children": [{ - "data": { - "url": "http://google2.com/", - "permalink": "http://google.com/", - "title": "Title number one", - "selftext": "Sample", - "created_utc": 1401219957.0, - "thumbnail": "http://image.com/picture.jpg" - } - }, { - "data": { - "url": "https://reddit2.com/", - "permalink": "https://reddit.com/", - "title": "Title number two", - "selftext": "Dominus vobiscum", - "created_utc": 1438792533.0, - "thumbnail": "self" - } - }] - } - } - """ - - resp = mock.Mock(text=json) - results = reddit.response(resp) - - self.assertEqual(len(results), 2) - self.assertEqual(type(results), list) - - # testing first result (picture) - r = results[0] - self.assertEqual(r['url'], 'http://google.com/') - self.assertEqual(r['title'], 'Title number one') - self.assertEqual(r['template'], 'images.html') - self.assertEqual(r['img_src'], 'http://google2.com/') - self.assertEqual(r['thumbnail_src'], 'http://image.com/picture.jpg') - - # testing second result (self-post) - r = results[1] - self.assertEqual(r['url'], 'https://reddit.com/') - self.assertEqual(r['title'], 'Title number two') - self.assertEqual(r['content'], 'Dominus vobiscum') - created = datetime.fromtimestamp(1438792533.0) - self.assertEqual(r['publishedDate'], created) - self.assertTrue('thumbnail_src' not in r) - self.assertTrue('img_src' not in r) diff --git a/tests/unit/engines/test_scanr_structures.py b/tests/unit/engines/test_scanr_structures.py deleted file mode 100644 index a7b9e9185..000000000 --- a/tests/unit/engines/test_scanr_structures.py +++ /dev/null @@ -1,175 +0,0 @@ -from collections import defaultdict -import mock -from searx.engines import scanr_structures -from searx.testing import SearxTestCase - - -class TestScanrStructuresEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - params = scanr_structures.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['data']) - self.assertIn('scanr.enseignementsup-recherche.gouv.fr', params['url']) - - def test_response(self): - self.assertRaises(AttributeError, scanr_structures.response, None) - self.assertRaises(AttributeError, scanr_structures.response, []) - self.assertRaises(AttributeError, scanr_structures.response, '') - self.assertRaises(AttributeError, scanr_structures.response, '[]') - - response = mock.Mock(text='{}') - self.assertEqual(scanr_structures.response(response), []) - - response = mock.Mock(text='{"data": []}') - self.assertEqual(scanr_structures.response(response), []) - - json = u""" - { - "request": - { - "query":"test_query", - "page":1, - "pageSize":20, - "sortOrder":"RELEVANCY", - "sortDirection":"ASC", - "searchField":"ALL", - "from":0 - }, - "total":2471, - "results":[ - { - "id":"200711886U", - "label":"Laboratoire d'Informatique de Grenoble", - "kind":"RNSR", - "publicEntity":true, - "address":{"city":"Grenoble","departement":"38"}, - "logo":"/static/logos/200711886U.png", - "acronym":"LIG", - "type":{"code":"UR","label":"Unit\xe9 de recherche"}, - "level":2, - "institutions":[ - { - "id":"193819125", - "label":"Grenoble INP", - "acronym":"IPG", - "code":"UMR 5217" - }, - { - "id":"130021397", - "label":"Universit\xe9 de Grenoble Alpes", - "acronym":"UGA", - "code":"UMR 5217" - }, - { - "id":"180089013", - "label":"Centre national de la recherche scientifique", - "acronym":"CNRS", - "code":"UMR 5217" - }, - { - "id":"180089047", - "label":"Institut national de recherche en informatique et en automatique", - "acronym":"Inria", - "code":"UMR 5217" - } - ], - "highlights":[ - { - "type":"projects", - "value":"linguicielles d\xe9velopp\xe9s jusqu'ici par le GETALP\ - du LIG en tant que prototypes op\xe9rationnels.\ -\\r\\nDans le contexte" - }, - { - "type":"acronym", - "value":"LIG" - }, - { - "type":"websiteContents", - "value":"S\xe9lection\\nListe structures\\nD\xe9tail\\n\ - Accueil\\n200711886U : LIG\ - Laboratoire d'Informatique de Grenoble Unit\xe9 de recherche"}, - { - "type":"publications", - "value":"de noms. Nous avons d'abord d\xe9velopp\xe9 LOOV \ - (pour Lig Overlaid OCR in Vid\xe9o), \ - un outil d'extraction des" - } - ] - }, - { - "id":"199511665F", - "label":"Laboratoire Bordelais de Recherche en Informatique", - "kind":"RNSR", - "publicEntity":true, - "address":{"city":"Talence","departement":"33"}, - "logo":"/static/logos/199511665F.png", - "acronym":"LaBRI", - "type":{"code":"UR","label":"Unit\xe9 de recherche"}, - "level":2, - "institutions":[ - { - "id":"130006356", - "label":"Institut polytechnique de Bordeaux", - "acronym":"IPB", - "code":"UMR 5800" - }, - { - "id":"130018351", - "label":"Universit\xe9 de Bordeaux", - "acronym":null, - "code":"UMR 5800" - }, - { - "id":"180089013", - "label":"Centre national de la recherche scientifique", - "acronym":"CNRS", - "code":"UMR 5800" - }, - { - "id":"180089047", - "label":"Institut national de recherche en informatique et en automatique", - "acronym":"Inria", - "code":"UMR 5800" - } - ], - "highlights":[ - { - "type":"websiteContents", - "value":"Samia Kerdjoudj\\n2016-07-05\\nDouble-exponential\ - and triple-exponential bounds for\ - choosability problems parameterized" - }, - { - "type":"publications", - "value":"de cam\xe9ras install\xe9es dans les lieux publiques \ - a tripl\xe9 en 2009, passant de 20 000 \ - \xe0 60 000. Malgr\xe9 le" - } - ] - } - ] - } - """ - response = mock.Mock(text=json) - results = scanr_structures.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 2) - self.assertEqual(results[0]['title'], u"Laboratoire d'Informatique de Grenoble") - self.assertEqual(results[0]['url'], 'https://scanr.enseignementsup-recherche.gouv.fr/structure/200711886U') - self.assertEqual(results[0]['content'], - u"linguicielles d\xe9velopp\xe9s jusqu'ici par le GETALP " - u"du LIG en tant que prototypes " - u"op\xe9rationnels. Dans le contexte") - self.assertEqual(results[1]['img_src'], - 'https://scanr.enseignementsup-recherche.gouv.fr//static/logos/199511665F.png') - self.assertEqual(results[1]['content'], - "Samia Kerdjoudj 2016-07-05 Double-exponential and" - " triple-exponential bounds for " - "choosability problems parameterized") - self.assertEqual(results[1]['url'], 'https://scanr.enseignementsup-recherche.gouv.fr/structure/199511665F') - self.assertEqual(results[1]['title'], u"Laboratoire Bordelais de Recherche en Informatique") diff --git a/tests/unit/engines/test_searchcode_code.py b/tests/unit/engines/test_searchcode_code.py deleted file mode 100644 index 955aea111..000000000 --- a/tests/unit/engines/test_searchcode_code.py +++ /dev/null @@ -1,75 +0,0 @@ -from collections import defaultdict -import mock -from searx.engines import searchcode_code -from searx.testing import SearxTestCase - - -class TestSearchcodeCodeEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 0 - params = searchcode_code.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('searchcode.com', params['url']) - - def test_response(self): - self.assertRaises(AttributeError, searchcode_code.response, None) - self.assertRaises(AttributeError, searchcode_code.response, []) - self.assertRaises(AttributeError, searchcode_code.response, '') - self.assertRaises(AttributeError, searchcode_code.response, '[]') - - response = mock.Mock(text='{}') - self.assertEqual(searchcode_code.response(response), []) - - response = mock.Mock(text='{"data": []}') - self.assertEqual(searchcode_code.response(response), []) - - json = """ - { - "matchterm": "test", - "previouspage": null, - "searchterm": "test", - "query": "test", - "total": 1000, - "page": 0, - "nextpage": 1, - "results": [ - { - "repo": "https://repo", - "linescount": 1044, - "location": "/tests", - "name": "Name", - "url": "https://url", - "md5hash": "ecac6e479edd2b9406c9e08603cec655", - "lines": { - "1": "// Test 011", - "2": "// Source: " - }, - "id": 51223527, - "filename": "File.CPP" - } - ] - } - """ - response = mock.Mock(text=json) - results = searchcode_code.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'Name - File.CPP') - self.assertEqual(results[0]['url'], 'https://url') - self.assertEqual(results[0]['repository'], 'https://repo') - self.assertEqual(results[0]['code_language'], 'cpp') - - json = r""" - {"toto":[ - {"id":200,"name":"Artist Name", - "link":"http:\/\/www.searchcode_code.com\/artist\/1217","type":"artist"} - ]} - """ - response = mock.Mock(text=json) - results = searchcode_code.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) diff --git a/tests/unit/engines/test_searchcode_doc.py b/tests/unit/engines/test_searchcode_doc.py deleted file mode 100644 index d02bb7a44..000000000 --- a/tests/unit/engines/test_searchcode_doc.py +++ /dev/null @@ -1,70 +0,0 @@ -from collections import defaultdict -import mock -from searx.engines import searchcode_doc -from searx.testing import SearxTestCase - - -class TestSearchcodeDocEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 0 - params = searchcode_doc.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('searchcode.com', params['url']) - - def test_response(self): - self.assertRaises(AttributeError, searchcode_doc.response, None) - self.assertRaises(AttributeError, searchcode_doc.response, []) - self.assertRaises(AttributeError, searchcode_doc.response, '') - self.assertRaises(AttributeError, searchcode_doc.response, '[]') - - response = mock.Mock(text='{}') - self.assertEqual(searchcode_doc.response(response), []) - - response = mock.Mock(text='{"data": []}') - self.assertEqual(searchcode_doc.response(response), []) - - json = """ - { - "matchterm": "test", - "previouspage": null, - "searchterm": "test", - "query": "test", - "total": 60, - "page": 0, - "nextpage": 1, - "results": [ - { - "synopsis": "Synopsis", - "displayname": null, - "name": "test", - "url": "http://url", - "type": "Type", - "icon": null, - "namespace": "Namespace", - "description": "Description" - } - ] - } - """ - response = mock.Mock(text=json) - results = searchcode_doc.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], '[Type] Namespace test') - self.assertEqual(results[0]['url'], 'http://url') - self.assertIn('Description', results[0]['content']) - - json = r""" - {"toto":[ - {"id":200,"name":"Artist Name", - "link":"http:\/\/www.searchcode_doc.com\/artist\/1217","type":"artist"} - ]} - """ - response = mock.Mock(text=json) - results = searchcode_doc.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) diff --git a/tests/unit/engines/test_seedpeer.py b/tests/unit/engines/test_seedpeer.py deleted file mode 100644 index 2057c1cb1..000000000 --- a/tests/unit/engines/test_seedpeer.py +++ /dev/null @@ -1,66 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -from searx.engines import seedpeer -from searx.testing import SearxTestCase - - -class TestBtdiggEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - params = seedpeer.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('seedpeer', params['url']) - - def test_response(self): - self.assertRaises(AttributeError, seedpeer.response, None) - self.assertRaises(AttributeError, seedpeer.response, []) - self.assertRaises(AttributeError, seedpeer.response, '') - self.assertRaises(AttributeError, seedpeer.response, '[]') - - response = mock.Mock(text='') - self.assertEqual(seedpeer.response(response), []) - - html = u""" - - - - - - - -
    - - - - - - - - - - - - -
    Title1 year1 KB1020
    - - - """ - response = mock.Mock(text=html) - results = seedpeer.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'Title') - self.assertEqual(results[0]['url'], 'https://seedpeer.me/link') - self.assertEqual(results[0]['seed'], 10) - self.assertEqual(results[0]['leech'], 20) - self.assertEqual(results[0]['filesize'], 1024) - self.assertEqual(results[0]['torrentfile'], 'https://seedpeer.me/torrent/abc123') - self.assertEqual(results[0]['magnetlink'], 'magnet:?xt=urn:btih:abc123') diff --git a/tests/unit/engines/test_soundcloud.py b/tests/unit/engines/test_soundcloud.py deleted file mode 100644 index 3077d3b4b..000000000 --- a/tests/unit/engines/test_soundcloud.py +++ /dev/null @@ -1,192 +0,0 @@ -from collections import defaultdict -import mock -from searx.engines import soundcloud -from searx.testing import SearxTestCase -from searx.url_utils import quote_plus - - -class TestSoundcloudEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - params = soundcloud.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('soundcloud.com', params['url']) - - def test_response(self): - self.assertRaises(AttributeError, soundcloud.response, None) - self.assertRaises(AttributeError, soundcloud.response, []) - self.assertRaises(AttributeError, soundcloud.response, '') - self.assertRaises(AttributeError, soundcloud.response, '[]') - - response = mock.Mock(text='{}') - self.assertEqual(soundcloud.response(response), []) - - response = mock.Mock(text='{"data": []}') - self.assertEqual(soundcloud.response(response), []) - - json = """ - { - "collection": [ - { - "kind": "track", - "id": 159723640, - "created_at": "2014/07/22 00:51:21 +0000", - "user_id": 2976616, - "duration": 303780, - "commentable": true, - "state": "finished", - "original_content_size": 13236349, - "last_modified": "2015/01/31 15:14:50 +0000", - "sharing": "public", - "tag_list": "seekae flume", - "permalink": "seekae-test-recognise-flume-re-work", - "streamable": true, - "embeddable_by": "all", - "downloadable": true, - "purchase_url": "http://www.facebook.com/seekaemusic", - "label_id": null, - "purchase_title": "Seekae", - "genre": "freedownload", - "title": "This is the title", - "description": "This is the content", - "label_name": "Future Classic", - "release": "", - "track_type": "remix", - "key_signature": "", - "isrc": "", - "video_url": null, - "bpm": null, - "release_year": 2014, - "release_month": 7, - "release_day": 22, - "original_format": "mp3", - "license": "all-rights-reserved", - "uri": "https://api.soundcloud.com/tracks/159723640", - "user": { - "id": 2976616, - "kind": "user", - "permalink": "flume", - "username": "Flume", - "last_modified": "2014/11/24 19:21:29 +0000", - "uri": "https://api.soundcloud.com/users/2976616", - "permalink_url": "http://soundcloud.com/flume", - "avatar_url": "https://i1.sndcdn.com/avatars-000044475439-4zi7ii-large.jpg" - }, - "permalink_url": "http://soundcloud.com/this.is.the.url", - "artwork_url": "https://i1.sndcdn.com/artworks-000085857162-xdxy5c-large.jpg", - "waveform_url": "https://w1.sndcdn.com/DWrL1lAN8BkP_m.png", - "stream_url": "https://api.soundcloud.com/tracks/159723640/stream", - "download_url": "https://api.soundcloud.com/tracks/159723640/download", - "playback_count": 2190687, - "download_count": 54856, - "favoritings_count": 49061, - "comment_count": 826, - "likes_count": 49061, - "reposts_count": 15910, - "attachments_uri": "https://api.soundcloud.com/tracks/159723640/attachments", - "policy": "ALLOW" - } - ], - "total_results": 375750, - "next_href": "https://api.soundcloud.com/search?&q=test", - "tx_id": "" - } - """ - response = mock.Mock(text=json) - results = soundcloud.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'This is the title') - self.assertEqual(results[0]['url'], 'http://soundcloud.com/this.is.the.url') - self.assertEqual(results[0]['content'], 'This is the content') - self.assertIn(quote_plus('https://api.soundcloud.com/tracks/159723640'), results[0]['embedded']) - - json = """ - { - "collection": [ - { - "kind": "user", - "id": 159723640, - "created_at": "2014/07/22 00:51:21 +0000", - "user_id": 2976616, - "duration": 303780, - "commentable": true, - "state": "finished", - "original_content_size": 13236349, - "last_modified": "2015/01/31 15:14:50 +0000", - "sharing": "public", - "tag_list": "seekae flume", - "permalink": "seekae-test-recognise-flume-re-work", - "streamable": true, - "embeddable_by": "all", - "downloadable": true, - "purchase_url": "http://www.facebook.com/seekaemusic", - "label_id": null, - "purchase_title": "Seekae", - "genre": "freedownload", - "title": "This is the title", - "description": "This is the content", - "label_name": "Future Classic", - "release": "", - "track_type": "remix", - "key_signature": "", - "isrc": "", - "video_url": null, - "bpm": null, - "release_year": 2014, - "release_month": 7, - "release_day": 22, - "original_format": "mp3", - "license": "all-rights-reserved", - "uri": "https://api.soundcloud.com/tracks/159723640", - "user": { - "id": 2976616, - "kind": "user", - "permalink": "flume", - "username": "Flume", - "last_modified": "2014/11/24 19:21:29 +0000", - "uri": "https://api.soundcloud.com/users/2976616", - "permalink_url": "http://soundcloud.com/flume", - "avatar_url": "https://i1.sndcdn.com/avatars-000044475439-4zi7ii-large.jpg" - }, - "permalink_url": "http://soundcloud.com/this.is.the.url", - "artwork_url": "https://i1.sndcdn.com/artworks-000085857162-xdxy5c-large.jpg", - "waveform_url": "https://w1.sndcdn.com/DWrL1lAN8BkP_m.png", - "stream_url": "https://api.soundcloud.com/tracks/159723640/stream", - "download_url": "https://api.soundcloud.com/tracks/159723640/download", - "playback_count": 2190687, - "download_count": 54856, - "favoritings_count": 49061, - "comment_count": 826, - "likes_count": 49061, - "reposts_count": 15910, - "attachments_uri": "https://api.soundcloud.com/tracks/159723640/attachments", - "policy": "ALLOW" - } - ], - "total_results": 375750, - "next_href": "https://api.soundcloud.com/search?&q=test", - "tx_id": "" - } - """ - response = mock.Mock(text=json) - results = soundcloud.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) - - json = """ - { - "collection": [], - "total_results": 375750, - "next_href": "https://api.soundcloud.com/search?&q=test", - "tx_id": "" - } - """ - response = mock.Mock(text=json) - results = soundcloud.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) diff --git a/tests/unit/engines/test_spotify.py b/tests/unit/engines/test_spotify.py deleted file mode 100644 index e37c344d2..000000000 --- a/tests/unit/engines/test_spotify.py +++ /dev/null @@ -1,124 +0,0 @@ -from collections import defaultdict -import mock -from searx.engines import spotify -from searx.testing import SearxTestCase - - -class TestSpotifyEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 0 - params = spotify.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('spotify.com', params['url']) - - def test_response(self): - self.assertRaises(AttributeError, spotify.response, None) - self.assertRaises(AttributeError, spotify.response, []) - self.assertRaises(AttributeError, spotify.response, '') - self.assertRaises(AttributeError, spotify.response, '[]') - - response = mock.Mock(text='{}') - self.assertEqual(spotify.response(response), []) - - response = mock.Mock(text='{"data": []}') - self.assertEqual(spotify.response(response), []) - - json = """ - { - "tracks": { - "href": "https://api.spotify.com/v1/search?query=nosfell&offset=0&limit=20&type=track", - "items": [ - { - "album": { - "album_type": "album", - "external_urls": { - "spotify": "https://open.spotify.com/album/5c9ap1PBkSGLxT3J73toxA" - }, - "href": "https://api.spotify.com/v1/albums/5c9ap1PBkSGLxT3J73toxA", - "id": "5c9ap1PBkSGLxT3J73toxA", - "name": "Album Title", - "type": "album", - "uri": "spotify:album:5c9ap1PBkSGLxT3J73toxA" - }, - "artists": [ - { - "external_urls": { - "spotify": "https://open.spotify.com/artist/0bMc6b75FfZEpQHG1jifKu" - }, - "href": "https://api.spotify.com/v1/artists/0bMc6b75FfZEpQHG1jifKu", - "id": "0bMc6b75FfZEpQHG1jifKu", - "name": "Artist Name", - "type": "artist", - "uri": "spotify:artist:0bMc6b75FfZEpQHG1jifKu" - } - ], - "disc_number": 1, - "duration_ms": 202386, - "explicit": false, - "external_ids": { - "isrc": "FRV640600067" - }, - "external_urls": { - "spotify": "https://open.spotify.com/track/2GzvFiedqW8hgqUpWcASZa" - }, - "href": "https://api.spotify.com/v1/tracks/2GzvFiedqW8hgqUpWcASZa", - "id": "1000", - "is_playable": true, - "name": "Title of track", - "popularity": 6, - "preview_url": "https://p.scdn.co/mp3-preview/7b8ecda580965a066b768c2647f877e43f7b1a0a", - "track_number": 3, - "type": "track", - "uri": "spotify:track:2GzvFiedqW8hgqUpWcASZa" - } - ], - "limit": 20, - "next": "https://api.spotify.com/v1/search?query=nosfell&offset=20&limit=20&type=track", - "offset": 0, - "previous": null, - "total": 107 - } - } - """ - response = mock.Mock(text=json) - results = spotify.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'Title of track') - self.assertEqual(results[0]['url'], 'https://open.spotify.com/track/2GzvFiedqW8hgqUpWcASZa') - self.assertEqual(results[0]['content'], 'Artist Name - Album Title - Title of track') - self.assertIn('1000', results[0]['embedded']) - - json = """ - { - "tracks": { - "href": "https://api.spotify.com/v1/search?query=nosfell&offset=0&limit=20&type=track", - "items": [ - { - "href": "https://api.spotify.com/v1/tracks/2GzvFiedqW8hgqUpWcASZa", - "id": "1000", - "is_playable": true, - "name": "Title of track", - "popularity": 6, - "preview_url": "https://p.scdn.co/mp3-preview/7b8ecda580965a066b768c2647f877e43f7b1a0a", - "track_number": 3, - "type": "album", - "uri": "spotify:track:2GzvFiedqW8hgqUpWcASZa" - } - ], - "limit": 20, - "next": "https://api.spotify.com/v1/search?query=nosfell&offset=20&limit=20&type=track", - "offset": 0, - "previous": null, - "total": 107 - } - } - """ - response = mock.Mock(text=json) - results = spotify.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) diff --git a/tests/unit/engines/test_stackoverflow.py b/tests/unit/engines/test_stackoverflow.py deleted file mode 100644 index 18a1ff4bd..000000000 --- a/tests/unit/engines/test_stackoverflow.py +++ /dev/null @@ -1,106 +0,0 @@ -from collections import defaultdict -import mock -from searx.engines import stackoverflow -from searx.testing import SearxTestCase - - -class TestStackoverflowEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 0 - params = stackoverflow.request(query, dicto) - self.assertTrue('url' in params) - self.assertTrue(query in params['url']) - self.assertTrue('stackoverflow.com' in params['url']) - - def test_response(self): - self.assertRaises(AttributeError, stackoverflow.response, None) - self.assertRaises(AttributeError, stackoverflow.response, []) - self.assertRaises(AttributeError, stackoverflow.response, '') - self.assertRaises(AttributeError, stackoverflow.response, '[]') - - response = mock.Mock(text='') - self.assertEqual(stackoverflow.response(response), []) - - html = """ -

    -
    -
    -
    -
    -
    - 2583 -
    votes
    -
    -
    -
    -
    -
    - -
    - This is the content -
    -
    -
    -
    - answered nov 23 '09 by - hallski -
    -
    -
    - """ - response = mock.Mock(text=html) - results = stackoverflow.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'This is the title') - self.assertEqual(results[0]['url'], 'https://stackoverflow.com/questions/this.is.the.url') - self.assertEqual(results[0]['content'], 'This is the content') - - html = """ -
    -
    -
    -
    -
    - 2583 -
    votes
    -
    -
    -
    -
    -
    - -
    - This is the content -
    -
    -
    -
    - answered nov 23 '09 by - hallski -
    -
    - """ - response = mock.Mock(text=html) - results = stackoverflow.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) diff --git a/tests/unit/engines/test_startpage.py b/tests/unit/engines/test_startpage.py deleted file mode 100644 index ac4454738..000000000 --- a/tests/unit/engines/test_startpage.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -from searx.engines import startpage -from searx.testing import SearxTestCase - - -class TestStartpageEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - dicto['language'] = 'fr_FR' - params = startpage.request(query, dicto) - self.assertIn('url', params) - self.assertIn('startpage.com', params['url']) - self.assertIn('data', params) - self.assertIn('query', params['data']) - self.assertIn(query, params['data']['query']) - - dicto['language'] = 'all' - params = startpage.request(query, dicto) - - def test_response(self): - self.assertRaises(AttributeError, startpage.response, None) - self.assertRaises(AttributeError, startpage.response, []) - self.assertRaises(AttributeError, startpage.response, '') - self.assertRaises(AttributeError, startpage.response, '[]') - - response = mock.Mock(text='') - self.assertEqual(startpage.response(response), []) - - html = """ - - """ # noqa - response = mock.Mock(text=html.encode('utf-8')) - results = startpage.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'This should be the title') - self.assertEqual(results[0]['url'], 'http://this.should.be.the.link/') - self.assertEqual(results[0]['content'], 'This should be the content.') diff --git a/tests/unit/engines/test_tokyotoshokan.py b/tests/unit/engines/test_tokyotoshokan.py deleted file mode 100644 index b5c6fad17..000000000 --- a/tests/unit/engines/test_tokyotoshokan.py +++ /dev/null @@ -1,110 +0,0 @@ -import mock -from collections import defaultdict -from searx.engines import tokyotoshokan -from searx.testing import SearxTestCase -from datetime import datetime - - -class TestTokyotoshokanEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dic = defaultdict(dict) - dic['pageno'] = 1 - params = tokyotoshokan.request(query, dic) - self.assertTrue('url' in params) - self.assertTrue(query in params['url']) - self.assertTrue('tokyotosho.info' in params['url']) - - def test_response(self): - resp = mock.Mock(text='') - self.assertEqual(tokyotoshokan.response(resp), []) - - html = """ - - - - - - - - - - - - - - - - - - - - - - -
    - - - - - - - Koyomimonogatari - - Details
    - Authorized: Yes - Submitter: Ohys | - Size: 10.5MB | - Date: 2016-03-26 16:41 UTC | - Comment: sample comment - - S: 53 - L: 18 - C: 0 - ID: 975700 -
    - - - - Owarimonogatari - - Details
    - Submitter: Ohys | - Size: 932.84EB | - Date: QWERTY-03-26 16:41 UTC - - S: 0 -
    - """ - - resp = mock.Mock(text=html) - results = tokyotoshokan.response(resp) - - self.assertEqual(type(results), list) - self.assertEqual(len(results), 2) - - # testing the first result, which has correct format - # and should have all information fields filled - r = results[0] - self.assertEqual(r['url'], 'http://www.nyaa.se/f') - self.assertEqual(r['title'], 'Koyomimonogatari') - self.assertEqual(r['magnetlink'], 'magnet:?xt=urn:btih:4c19eb46b5113685fbd2288ed2531b0b') - self.assertEqual(r['filesize'], int(1024 * 1024 * 10.5)) - self.assertEqual(r['publishedDate'], datetime(2016, 3, 26, 16, 41)) - self.assertEqual(r['content'], 'Comment: sample comment') - self.assertEqual(r['seed'], 53) - self.assertEqual(r['leech'], 18) - - # testing the second result, which does not include magnet link, - # seed & leech info, and has incorrect size & creation date - r = results[1] - self.assertEqual(r['url'], 'http://google.com/q') - self.assertEqual(r['title'], 'Owarimonogatari') - - self.assertFalse('magnetlink' in r) - self.assertFalse('filesize' in r) - self.assertFalse('content' in r) - self.assertFalse('publishedDate' in r) - self.assertFalse('seed' in r) - self.assertFalse('leech' in r) diff --git a/tests/unit/engines/test_torrentz.py b/tests/unit/engines/test_torrentz.py deleted file mode 100644 index f483bf68c..000000000 --- a/tests/unit/engines/test_torrentz.py +++ /dev/null @@ -1,87 +0,0 @@ -import mock -from collections import defaultdict -from searx.engines import torrentz -from searx.testing import SearxTestCase -from datetime import datetime - - -class TestTorrentzEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dic = defaultdict(dict) - dic['pageno'] = 1 - params = torrentz.request(query, dic) - self.assertTrue('url' in params) - self.assertTrue(query in params['url']) - self.assertTrue('torrentz2.eu' in params['url']) - - def test_response(self): - resp = mock.Mock(text='') - self.assertEqual(torrentz.response(resp), []) - - html = """ -
    -
    -
    - - Completely valid info - - books ebooks -
    -
    - 1 - 5 hours - 30 MB - 14 - 1 -
    -
    - -
    -
    - - Invalid hash and date and filesize - - books ebooks -
    -
    - 1 - 5 hours - 30MB - 5,555 - 1,234,567 -
    -
    -
    - """ - - resp = mock.Mock(text=html) - results = torrentz.response(resp) - - self.assertEqual(type(results), list) - self.assertEqual(len(results), 2) - - # testing against the first result - r = results[0] - self.assertEqual(r['url'], 'https://torrentz2.eu/4362e08b1d80e1820fb2550b752f9f3126fe76d6') - self.assertEqual(r['title'], 'Completely valid info books ebooks') - # 22 Nov 2015 03:01:42 - self.assertEqual(r['publishedDate'], datetime.fromtimestamp(1503595924)) - self.assertEqual(r['seed'], 14) - self.assertEqual(r['leech'], 1) - self.assertEqual(r['filesize'], 30 * 1024 * 1024) - self.assertEqual(r['magnetlink'], 'magnet:?xt=urn:btih:4362e08b1d80e1820fb2550b752f9f3126fe76d6') - - # testing against the second result - r = results[1] - self.assertEqual(r['url'], 'https://torrentz2.eu/poaskdpokaspod') - self.assertEqual(r['title'], 'Invalid hash and date and filesize books ebooks') - self.assertEqual(r['seed'], 5555) - self.assertEqual(r['leech'], 1234567) - - # in the second result we have invalid hash, creation date & torrent size, - # so these tests should fail - self.assertFalse('magnetlink' in r) - self.assertFalse('filesize' in r) - self.assertFalse('publishedDate' in r) diff --git a/tests/unit/engines/test_twitter.py b/tests/unit/engines/test_twitter.py deleted file mode 100644 index b444b48ee..000000000 --- a/tests/unit/engines/test_twitter.py +++ /dev/null @@ -1,502 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -from searx.engines import twitter -from searx.testing import SearxTestCase - - -class TestTwitterEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 0 - dicto['language'] = 'fr_FR' - params = twitter.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('twitter.com', params['url']) - self.assertIn('cookies', params) - self.assertIn('lang', params['cookies']) - self.assertIn('fr', params['cookies']['lang']) - - dicto['language'] = 'all' - params = twitter.request(query, dicto) - self.assertIn('cookies', params) - self.assertIn('lang', params['cookies']) - self.assertIn('en', params['cookies']['lang']) - - def test_response(self): - self.assertRaises(AttributeError, twitter.response, None) - self.assertRaises(AttributeError, twitter.response, []) - self.assertRaises(AttributeError, twitter.response, '') - self.assertRaises(AttributeError, twitter.response, '[]') - - response = mock.Mock(text='') - self.assertEqual(twitter.response(response), []) - - html = """ -
  • -
    -
    -
    -
    - -

    - This is the content étude à€ - - -

    -
    -
    - -
    -
    -
  • - """ - response = mock.Mock(text=html) - results = twitter.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], '@TitleName') - self.assertEqual(results[0]['url'], 'https://twitter.com/this.is.the.url') - self.assertIn(u'This is the content', results[0]['content']) - # self.assertIn(u'This is the content étude à€', results[0]['content']) - - html = """ -
  • -
    -
    -
    -
    - -

    - This is the content étude à€ - - -

    -
    -
    - -
    -
    -
  • - """ - response = mock.Mock(text=html) - results = twitter.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], '@TitleName') - self.assertEqual(results[0]['url'], 'https://twitter.com/this.is.the.url') - self.assertIn(u'This is the content', results[0]['content']) - - html = """ -
  • -
    - -
    - - this.meta.com - - - - -
    -

    - This should be the content.

    -
    -
  • - """ - response = mock.Mock(text=html) - results = twitter.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) diff --git a/tests/unit/engines/test_unsplash.py b/tests/unit/engines/test_unsplash.py deleted file mode 100644 index 4501de906..000000000 --- a/tests/unit/engines/test_unsplash.py +++ /dev/null @@ -1,38 +0,0 @@ -from collections import defaultdict -import mock -from searx.testing import SearxTestCase -from searx.engines import unsplash - - -class TestUnsplashEngine(SearxTestCase): - def test_request(self): - query = 'penguin' - _dict = defaultdict(dict) - _dict['pageno'] = 1 - params = unsplash.request(query, _dict) - - self.assertTrue('url' in params) - self.assertTrue(query in params['url']) - - def test_response(self): - resp = mock.Mock(text='{}') - result = unsplash.response(resp) - self.assertEqual([], result) - - resp.text = '{"results": []}' - result = unsplash.response(resp) - self.assertEqual([], result) - - # Sourced from https://unsplash.com/napi/search/photos?query=penguin&xp=&per_page=20&page=2 - with open('./tests/unit/engines/unsplash_fixture.json') as fixture: - resp.text = fixture.read() - - result = unsplash.response(resp) - self.assertEqual(len(result), 2) - self.assertEqual(result[0]['title'], 'low angle photography of swimming penguin') - self.assertEqual(result[0]['url'], 'https://unsplash.com/photos/FY8d721UO_4') - self.assertEqual(result[0]['thumbnail_src'], 'https://images.unsplash.com/photo-1523557148507-1b77641c7e7c?ixlib=rb-0.3.5&q=80\ -&fm=jpg&crop=entropy&cs=tinysrgb&w=200&fit=max') - self.assertEqual(result[0]['img_src'], 'https://images.unsplash.com/photo-1523557148507-1b77641c7e7c\ -?ixlib=rb-0.3.5') - self.assertEqual(result[0]['content'], '') diff --git a/tests/unit/engines/test_vimeo.py b/tests/unit/engines/test_vimeo.py deleted file mode 100644 index c86b50a14..000000000 --- a/tests/unit/engines/test_vimeo.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -from searx.engines import vimeo -from searx.testing import SearxTestCase - - -class TestVimeoEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 0 - params = vimeo.request(query, dicto) - self.assertTrue('url' in params) - self.assertTrue(query in params['url']) - self.assertTrue('vimeo.com' in params['url']) - - def test_response(self): - self.assertRaises(AttributeError, vimeo.response, None) - self.assertRaises(AttributeError, vimeo.response, []) - self.assertRaises(AttributeError, vimeo.response, '') - self.assertRaises(AttributeError, vimeo.response, '[]') - - json = u""" -{"filtered":{"total":274641,"page":1,"per_page":18,"paging":{"next":"?sizes=590x332&page=2","previous":null,"first":"?sizes=590x332&page=1","last":"?sizes=590x332&page=15258"},"data":[{"is_staffpick":false,"is_featured":true,"type":"clip","clip":{"uri":"\\/videos\\/106557563","name":"Hot Rod Revue: The South","link":"https:\\/\\/vimeo.com\\/106557563","duration":4069,"created_time":"2014-09-19T03:38:04+00:00","privacy":{"view":"ptv"},"pictures":{"sizes":[{"width":"590","height":"332","link":"https:\\/\\/i.vimeocdn.com\\/video\\/489717884_590x332.jpg?r=pad","link_with_play_button":"https:\\/\\/i.vimeocdn.com\\/filter\\/overlay?src0=https%3A%2F%2Fi.vimeocdn.com%2Fvideo%2F489717884_590x332.jpg&src1=http%3A%2F%2Ff.vimeocdn.com%2Fp%2Fimages%2Fcrawler_play.png"}]},"stats":{"plays":null},"metadata":{"connections":{"comments":{"total":0},"likes":{"total":5}},"interactions":[]},"user":{"name":"Cal Thorley","link":"https:\\/\\/vimeo.com\\/calthorley","pictures":{"sizes":[{"width":30,"height":30,"link":"https:\\/\\/i.vimeocdn.com\\/portrait\\/2545308_30x30?r=pad"},{"width":75,"height":75,"link":"https:\\/\\/i.vimeocdn.com\\/portrait\\/2545308_75x75?r=pad"},{"width":100,"height":100,"link":"https:\\/\\/i.vimeocdn.com\\/portrait\\/2545308_100x100?r=pad"},{"width":300,"height":300,"link":"https:\\/\\/i.vimeocdn.com\\/portrait\\/2545308_300x300?r=pad"}]}}}}]}}; - -""" # noqa - response = mock.Mock(text=json) - results = vimeo.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], u'Hot Rod Revue: The South') - self.assertEqual(results[0]['url'], 'https://vimeo.com/106557563') - self.assertEqual(results[0]['content'], '') - self.assertEqual(results[0]['thumbnail'], 'https://i.vimeocdn.com/video/489717884_590x332.jpg?r=pad') diff --git a/tests/unit/engines/test_wikidata.py b/tests/unit/engines/test_wikidata.py deleted file mode 100644 index 48be17bb4..000000000 --- a/tests/unit/engines/test_wikidata.py +++ /dev/null @@ -1,514 +0,0 @@ -# -*- coding: utf-8 -*- -from lxml.html import fromstring -from lxml import etree -from collections import defaultdict -import mock -from searx.engines import wikidata -from searx.testing import SearxTestCase - - -class TestWikidataEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['language'] = 'all' - params = wikidata.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('wikidata.org', params['url']) - - dicto['language'] = 'es_ES' - params = wikidata.request(query, dicto) - self.assertIn(query, params['url']) - - # successful cases are not tested here to avoid sending additional requests - def test_response(self): - self.assertRaises(AttributeError, wikidata.response, None) - self.assertRaises(AttributeError, wikidata.response, []) - self.assertRaises(AttributeError, wikidata.response, '') - self.assertRaises(AttributeError, wikidata.response, '[]') - - wikidata.supported_languages = ['en', 'es'] - wikidata.language_aliases = {} - response = mock.Mock(content=''.encode("utf-8"), search_params={"language": "en"}) - self.assertEqual(wikidata.response(response), []) - - def test_getDetail(self): - response = {} - results = wikidata.getDetail(response, "Q123", "en", "en-US", etree.HTMLParser()) - self.assertEqual(results, []) - - title_html = '
    Test
    ' - html = """ -
    -
    -
    -
    - -
    -
    - """ - response = {"parse": {"displaytitle": title_html, "text": html}} - - results = wikidata.getDetail(response, "Q123", "en", "en-US", etree.HTMLParser()) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['url'], 'https://en.wikipedia.org/wiki/Test') - - title_html = """ -
    -
    - Test - English -
    -
    - """ - html = """ -
    -
    - Description - English -
    - -
    - -
    -
    - """ - response = {"parse": {"displaytitle": title_html, "text": html}} - - results = wikidata.getDetail(response, "Q123", "yua", "yua_MX", etree.HTMLParser()) - self.assertEqual(len(results), 2) - self.assertEqual(results[0]['title'], 'Official website') - self.assertEqual(results[0]['url'], 'https://officialsite.com') - - self.assertEqual(results[1]['infobox'], 'Test') - self.assertEqual(results[1]['id'], None) - self.assertEqual(results[1]['content'], 'Description') - self.assertEqual(results[1]['attributes'], []) - self.assertEqual(results[1]['urls'][0]['title'], 'Official website') - self.assertEqual(results[1]['urls'][0]['url'], 'https://officialsite.com') - self.assertEqual(results[1]['urls'][1]['title'], 'Wikipedia (en)') - self.assertEqual(results[1]['urls'][1]['url'], 'https://en.wikipedia.org/wiki/Test') - - def test_add_image(self): - image_src = wikidata.add_image(fromstring("
    ")) - self.assertEqual(image_src, None) - - html = u""" -
    -
    - -
    -
    -
    - -
    -
    -
    -
    -
    - image.png -
    2,687 × 3,356; 1.22 MB -
    -
    -
    -
    -
    -
    -
    -
    - """ - html_etree = fromstring(html) - id_cache = wikidata.get_id_cache(html_etree) - image_src = wikidata.add_image(id_cache) - self.assertEqual(image_src, - "https://commons.wikimedia.org/wiki/Special:FilePath/image.png?width=500&height=400") - - html = u""" -
    -
    - -
    -
    -
    - -
    -
    -
    -
    -
    - icon.png -
    671 × 671; 18 KB
    -
    -
    -
    -
    -
    -
    -
    -
    - -
    -
    -
    - -
    -
    -
    -
    -
    - logo.png -
    170 × 170; 1 KB -
    -
    -
    -
    -
    -
    -
    -
    - """ - html_etree = fromstring(html) - id_cache = wikidata.get_id_cache(html_etree) - - image_src = wikidata.add_image(id_cache) - self.assertEqual(image_src, - "https://commons.wikimedia.org/wiki/Special:FilePath/logo.png?width=500&height=400") - - def test_add_attribute(self): - html = u""" -
    -
    - -
    -
    -
    - -
    - -
    -
    -
    -
    - """ - attributes = [] - html_etree = fromstring(html) - id_cache = wikidata.get_id_cache(html_etree) - - wikidata.add_attribute(attributes, id_cache, "Fail") - self.assertEqual(attributes, []) - - wikidata.add_attribute(attributes, id_cache, "P27") - self.assertEqual(len(attributes), 1) - self.assertEqual(attributes[0]["label"], "Country of citizenship") - self.assertEqual(attributes[0]["value"], "United Kingdom") - - html = u""" -
    -
    - -
    -
    -
    - -
    -
    -
    -
    - 27 January 1832 - - Gregorian - -
    -
    -
    -
    -
    -
    -
    - """ - attributes = [] - html_etree = fromstring(html) - id_cache = wikidata.get_id_cache(html_etree) - wikidata.add_attribute(attributes, id_cache, "P569", date=True) - self.assertEqual(len(attributes), 1) - self.assertEqual(attributes[0]["label"], "Date of birth") - self.assertEqual(attributes[0]["value"], "27 January 1832") - - html = u""" -
    -
    - -
    -
    -
    - -
    - -
    -
    -
    - -
    - -
    -
    -
    -
    - """ - attributes = [] - html_etree = fromstring(html) - id_cache = wikidata.get_id_cache(html_etree) - wikidata.add_attribute(attributes, id_cache, "P6") - self.assertEqual(len(attributes), 1) - self.assertEqual(attributes[0]["label"], "Head of government") - self.assertEqual(attributes[0]["value"], "Old Prime Minister, Actual Prime Minister") - - attributes = [] - html_etree = fromstring(html) - id_cache = wikidata.get_id_cache(html_etree) - wikidata.add_attribute(attributes, id_cache, "P6", trim=True) - self.assertEqual(len(attributes), 1) - self.assertEqual(attributes[0]["value"], "Actual Prime Minister") - - def test_add_url(self): - html = u""" -
    - -
    - """ - urls = [] - html_etree = fromstring(html) - id_cache = wikidata.get_id_cache(html_etree) - wikidata.add_url(urls, html_etree, id_cache, 'P856') - self.assertEquals(len(urls), 1) - self.assertIn({'title': 'Official website', 'url': 'https://searx.me/'}, urls) - urls = [] - results = [] - wikidata.add_url(urls, html_etree, id_cache, 'P856', 'custom label', results=results) - self.assertEquals(len(urls), 1) - self.assertEquals(len(results), 1) - self.assertIn({'title': 'custom label', 'url': 'https://searx.me/'}, urls) - self.assertIn({'title': 'custom label', 'url': 'https://searx.me/'}, results) - - html = u""" - - """ - urls = [] - html_etree = fromstring(html) - id_cache = wikidata.get_id_cache(html_etree) - wikidata.add_url(urls, html_etree, id_cache, 'P856') - self.assertEquals(len(urls), 2) - self.assertIn({'title': 'Official website', 'url': 'http://www.worldofwarcraft.com'}, urls) - self.assertIn({'title': 'Official website', 'url': 'http://eu.battle.net/wow/en/'}, urls) - - def test_get_imdblink(self): - html = u""" -
    -
    - -
    -
    - """ - html_etree = fromstring(html) - imdblink = wikidata.get_imdblink(html_etree, 'https://www.imdb.com/') - - html = u""" -
    -
    - -
    -
    - """ - html_etree = fromstring(html) - imdblink = wikidata.get_imdblink(html_etree, 'https://www.imdb.com/') - self.assertIn('https://www.imdb.com/name/nm4915994', imdblink) - - def test_get_geolink(self): - html = u""" -
    -
    -
    -
    - 60°N, 40°E -
    -
    -
    -
    - """ - html_etree = fromstring(html) - geolink = wikidata.get_geolink(html_etree) - self.assertIn('https://www.openstreetmap.org/', geolink) - self.assertIn('lat=60&lon=40', geolink) - - html = u""" -
    -
    -
    -
    - 34°35'59"S, 58°22'55"W -
    -
    -
    -
    - """ - html_etree = fromstring(html) - geolink = wikidata.get_geolink(html_etree) - self.assertIn('https://www.openstreetmap.org/', geolink) - self.assertIn('lat=-34.59', geolink) - self.assertIn('lon=-58.38', geolink) - - def test_get_wikilink(self): - html = """ -
    -
    - -
    -
    - -
    -
    - """ - html_etree = fromstring(html) - wikilink = wikidata.get_wikilink(html_etree, 'nowiki') - self.assertEqual(wikilink, None) - wikilink = wikidata.get_wikilink(html_etree, 'enwiki') - self.assertEqual(wikilink, 'https://en.wikipedia.org/wiki/Test') - wikilink = wikidata.get_wikilink(html_etree, 'arwiki') - self.assertEqual(wikilink, 'https://ar.wikipedia.org/wiki/Test') - wikilink = wikidata.get_wikilink(html_etree, 'enwikiquote') - self.assertEqual(wikilink, 'https://en.wikiquote.org/wiki/Test') diff --git a/tests/unit/engines/test_wikipedia.py b/tests/unit/engines/test_wikipedia.py deleted file mode 100644 index 316b12bc5..000000000 --- a/tests/unit/engines/test_wikipedia.py +++ /dev/null @@ -1,263 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -from searx.engines import wikipedia -from searx.testing import SearxTestCase - - -class TestWikipediaEngine(SearxTestCase): - - def test_request(self): - wikipedia.supported_languages = ['fr', 'en', 'no'] - wikipedia.language_aliases = {'nb': 'no'} - - query = 'test_query' - dicto = defaultdict(dict) - dicto['language'] = 'fr-FR' - params = wikipedia.request(query.encode('utf-8'), dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('test_query', params['url']) - self.assertIn('Test_Query', params['url']) - self.assertIn('fr.wikipedia.org', params['url']) - - query = u'Test_Query' - params = wikipedia.request(query.encode('utf-8'), dicto) - self.assertIn('Test_Query', params['url']) - self.assertNotIn('test_query', params['url']) - - dicto['language'] = 'nb' - params = wikipedia.request(query, dicto) - self.assertIn('no.wikipedia.org', params['url']) - dicto['language'] = 'all' - params = wikipedia.request(query, dicto) - self.assertIn('en', params['url']) - - dicto['language'] = 'xx' - params = wikipedia.request(query, dicto) - self.assertIn('en.wikipedia.org', params['url']) - - def test_response(self): - dicto = defaultdict(dict) - dicto['language'] = 'fr' - - self.assertRaises(AttributeError, wikipedia.response, None) - self.assertRaises(AttributeError, wikipedia.response, []) - self.assertRaises(AttributeError, wikipedia.response, '') - self.assertRaises(AttributeError, wikipedia.response, '[]') - - # page not found - json = """ - { - "batchcomplete": "", - "query": { - "normalized": [], - "pages": { - "-1": { - "ns": 0, - "title": "", - "missing": "" - } - } - } - }""" - response = mock.Mock(text=json, search_params=dicto) - self.assertEqual(wikipedia.response(response), []) - - # normal case - json = """ - { - "batchcomplete": "", - "query": { - "normalized": [], - "pages": { - "12345": { - "pageid": 12345, - "ns": 0, - "title": "The Title", - "extract": "The Title is...", - "thumbnail": { - "source": "img_src.jpg" - }, - "pageimage": "img_name.jpg" - } - } - } - }""" - response = mock.Mock(text=json, search_params=dicto) - results = wikipedia.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 2) - self.assertEqual(results[0]['title'], u'The Title') - self.assertIn('fr.wikipedia.org/wiki/The_Title', results[0]['url']) - self.assertEqual(results[1]['infobox'], u'The Title') - self.assertIn('fr.wikipedia.org/wiki/The_Title', results[1]['id']) - self.assertIn('The Title is...', results[1]['content']) - self.assertEqual(results[1]['img_src'], 'img_src.jpg') - - # disambiguation page - json = """ - { - "batchcomplete": "", - "query": { - "normalized": [], - "pages": { - "12345": { - "pageid": 12345, - "ns": 0, - "title": "The Title", - "extract": "The Title can be:\\nThe Title 1\\nThe Title 2\\nThe Title 3\\nThe Title 4......................................................................................................................................." """ # noqa - json += """ - } - } - } - }""" - response = mock.Mock(text=json, search_params=dicto) - results = wikipedia.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 2) - - # no image - json = """ - { - "batchcomplete": "", - "query": { - "normalized": [], - "pages": { - "12345": { - "pageid": 12345, - "ns": 0, - "title": "The Title", - "extract": "The Title is......................................................................................................................................................................................." """ # noqa - json += """ - } - } - } - }""" - response = mock.Mock(text=json, search_params=dicto) - results = wikipedia.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 2) - self.assertIn('The Title is...', results[1]['content']) - self.assertEqual(results[1]['img_src'], None) - - # title not in first paragraph - json = u""" - { - "batchcomplete": "", - "query": { - "normalized": [], - "pages": { - "12345": { - "pageid": 12345, - "ns": 0, - "title": "披頭四樂隊", - "extract": "披头士乐队....................................................................................................................................................................................................\\n披頭四樂隊...", """ # noqa - json += """ - "thumbnail": { - "source": "img_src.jpg" - }, - "pageimage": "img_name.jpg" - } - } - } - }""" - response = mock.Mock(text=json, search_params=dicto) - results = wikipedia.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 2) - self.assertEqual(results[1]['infobox'], u'披頭四樂隊') - self.assertIn(u'披头士乐队...', results[1]['content']) - - def test_fetch_supported_languages(self): - html = u"""""" - response = mock.Mock(text=html) - languages = wikipedia._fetch_supported_languages(response) - self.assertEqual(type(languages), dict) - self.assertEqual(len(languages), 0) - - html = u""" - - -
    -
    -

    Table header

    - - - - - - - - - - - - - - - - - - - - - - - - - - -
    NLanguageLanguage (local)WikiArticles
    2SwedishSvenskasv3000000
    3CebuanoSinugboanong Binisayaceb3000000
    -

    Table header

    - - - - - - - - - - - - - - - - - - - -
    NLanguageLanguage (local)WikiArticles
    2Norwegian (Bokmål)Norsk (Bokmål)no100000
    -
    -
    - - - """ - response = mock.Mock(text=html) - languages = wikipedia._fetch_supported_languages(response) - self.assertEqual(type(languages), dict) - self.assertEqual(len(languages), 3) - - self.assertIn('sv', languages) - self.assertIn('ceb', languages) - self.assertIn('no', languages) - - self.assertEqual(type(languages['sv']), dict) - self.assertEqual(type(languages['ceb']), dict) - self.assertEqual(type(languages['no']), dict) - - self.assertIn('name', languages['sv']) - self.assertIn('english_name', languages['sv']) - self.assertIn('articles', languages['sv']) - - self.assertEqual(languages['sv']['name'], 'Svenska') - self.assertEqual(languages['sv']['english_name'], 'Swedish') - self.assertEqual(languages['sv']['articles'], 3000000) - self.assertEqual(languages['ceb']['name'], 'Sinugboanong Binisaya') - self.assertEqual(languages['ceb']['english_name'], 'Cebuano') - self.assertEqual(languages['ceb']['articles'], 3000000) - self.assertEqual(languages['no']['name'], u'Norsk (Bokmål)') - self.assertEqual(languages['no']['english_name'], u'Norwegian (Bokmål)') - self.assertEqual(languages['no']['articles'], 100000) diff --git a/tests/unit/engines/test_wolframalpha_api.py b/tests/unit/engines/test_wolframalpha_api.py deleted file mode 100644 index 0433b34aa..000000000 --- a/tests/unit/engines/test_wolframalpha_api.py +++ /dev/null @@ -1,166 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -from requests import Request -from searx.engines import wolframalpha_api -from searx.testing import SearxTestCase - - -class TestWolframAlphaAPIEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - params = wolframalpha_api.request(query, dicto) - - # TODO: test api_key - self.assertIn('url', params) - self.assertIn('https://api.wolframalpha.com/v2/query?', params['url']) - self.assertIn(query, params['url']) - self.assertEqual('https://www.wolframalpha.com/input/?i=test_query', params['headers']['Referer']) - - def test_replace_pua_chars(self): - self.assertEqual('i', wolframalpha_api.replace_pua_chars(u'\uf74e')) - - def test_response(self): - self.assertRaises(AttributeError, wolframalpha_api.response, None) - self.assertRaises(AttributeError, wolframalpha_api.response, []) - self.assertRaises(AttributeError, wolframalpha_api.response, '') - self.assertRaises(AttributeError, wolframalpha_api.response, '[]') - - referer_url = 'referer_url' - request = Request(headers={'Referer': referer_url}) - - # test failure - xml = ''' - - ''' - response = mock.Mock(content=xml.encode('utf-8')) - self.assertEqual(wolframalpha_api.response(response), []) - - # test basic case - xml = b""" - - - - input_img_alt - input_plaintext</plaintext> - </subpod> - </pod> - <pod title='Result' - scanner='Simplification' - id='Result' - numsubpods='1' - primary='true'> - <subpod title=''> - <img src='result_img_src.gif' - alt='result_img_alt' - title='result_img_title' /> - <plaintext>result_plaintext</plaintext> - </subpod> - </pod> - <pod title='Manipulatives illustration' - scanner='Arithmetic' - id='Illustration' - numsubpods='1'> - <subpod title=''> - <img src='illustration_img_src.gif' - alt='illustration_img_alt' /> - <plaintext>illustration_plaintext</plaintext> - </subpod> - </pod> - </queryresult> - """ - response = mock.Mock(content=xml, request=request) - results = wolframalpha_api.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 2) - self.assertEqual('input_plaintext', results[0]['infobox']) - - self.assertEqual(len(results[0]['attributes']), 3) - self.assertEqual('Input', results[0]['attributes'][0]['label']) - self.assertEqual('input_plaintext', results[0]['attributes'][0]['value']) - self.assertEqual('Result', results[0]['attributes'][1]['label']) - self.assertEqual('result_plaintext', results[0]['attributes'][1]['value']) - self.assertEqual('Manipulatives illustration', results[0]['attributes'][2]['label']) - self.assertEqual('illustration_img_src.gif', results[0]['attributes'][2]['image']['src']) - self.assertEqual('illustration_img_alt', results[0]['attributes'][2]['image']['alt']) - - self.assertEqual(len(results[0]['urls']), 1) - - self.assertEqual(referer_url, results[0]['urls'][0]['url']) - self.assertEqual('Wolfram|Alpha', results[0]['urls'][0]['title']) - self.assertEqual(referer_url, results[1]['url']) - self.assertEqual('Wolfram|Alpha (input_plaintext)', results[1]['title']) - self.assertIn('result_plaintext', results[1]['content']) - - # test calc - xml = b"""<?xml version='1.0' encoding='UTF-8'?> - <queryresult success='true' - error='false' - numpods='2' - datatypes='' - parsetimedout='false' - id='queryresult_id' - host='http://www5b.wolframalpha.com' - related='related_url' - version='2.6' > - <pod title='Indefinite integral' - scanner='Integral' - id='IndefiniteIntegral' - error='false' - numsubpods='1' - primary='true'> - <subpod title=''> - <img src='integral_image.gif' - alt='integral_img_alt' - title='integral_img_title' /> - <plaintext>integral_plaintext</plaintext> - </subpod> - </pod> - <pod title='Plot of the integral' - scanner='Integral' - id='Plot' - error='false' - numsubpods='1'> - <subpod title=''> - <img src='plot.gif' - alt='plot_alt' - title='' /> - <plaintext></plaintext> - </subpod> - </pod> - </queryresult> - """ - response = mock.Mock(content=xml, request=request) - results = wolframalpha_api.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 2) - self.assertEqual('integral_plaintext', results[0]['infobox']) - - self.assertEqual(len(results[0]['attributes']), 2) - self.assertEqual('Indefinite integral', results[0]['attributes'][0]['label']) - self.assertEqual('integral_plaintext', results[0]['attributes'][0]['value']) - self.assertEqual('Plot of the integral', results[0]['attributes'][1]['label']) - self.assertEqual('plot.gif', results[0]['attributes'][1]['image']['src']) - self.assertEqual('plot_alt', results[0]['attributes'][1]['image']['alt']) - - self.assertEqual(len(results[0]['urls']), 1) - - self.assertEqual(referer_url, results[0]['urls'][0]['url']) - self.assertEqual('Wolfram|Alpha', results[0]['urls'][0]['title']) - self.assertEqual(referer_url, results[1]['url']) - self.assertEqual('Wolfram|Alpha (integral_plaintext)', results[1]['title']) - self.assertIn('integral_plaintext', results[1]['content']) diff --git a/tests/unit/engines/test_wolframalpha_noapi.py b/tests/unit/engines/test_wolframalpha_noapi.py deleted file mode 100644 index 982edd9f2..000000000 --- a/tests/unit/engines/test_wolframalpha_noapi.py +++ /dev/null @@ -1,224 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -from requests import Request -from searx.engines import wolframalpha_noapi -from searx.testing import SearxTestCase - - -class TestWolframAlphaNoAPIEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - params = wolframalpha_noapi.request(query, dicto) - - self.assertIn('url', params) - self.assertIn('https://www.wolframalpha.com/input/json.jsp', params['url']) - self.assertIn(query, params['url']) - self.assertEqual('https://www.wolframalpha.com/input/?i=test_query', params['headers']['Referer']) - - def test_response(self): - self.assertRaises(AttributeError, wolframalpha_noapi.response, None) - self.assertRaises(AttributeError, wolframalpha_noapi.response, []) - self.assertRaises(AttributeError, wolframalpha_noapi.response, '') - self.assertRaises(AttributeError, wolframalpha_noapi.response, '[]') - - referer_url = 'referer_url' - request = Request(headers={'Referer': referer_url}) - - # test failure - json = r''' - {"queryresult" : { - "success" : false, - "error" : false, - "numpods" : 0, - "id" : "", - "host" : "https:\/\/www5a.wolframalpha.com", - "didyoumeans" : {} - }} - ''' - response = mock.Mock(text=json, request=request) - self.assertEqual(wolframalpha_noapi.response(response), []) - - # test basic case - json = r''' - {"queryresult" : { - "success" : true, - "error" : false, - "numpods" : 6, - "datatypes" : "Math", - "id" : "queryresult_id", - "host" : "https:\/\/www5b.wolframalpha.com", - "related" : "related_url", - "version" : "2.6", - "pods" : [ - { - "title" : "Input", - "scanners" : [ - "Identity" - ], - "id" : "Input", - "error" : false, - "numsubpods" : 1, - "subpods" : [ - { - "title" : "", - "img" : { - "src" : "input_img_src.gif", - "alt" : "input_img_alt", - "title" : "input_img_title" - }, - "plaintext" : "input_plaintext", - "minput" : "input_minput" - } - ] - }, - { - "title" : "Result", - "scanners" : [ - "Simplification" - ], - "id" : "Result", - "error" : false, - "numsubpods" : 1, - "primary" : true, - "subpods" : [ - { - "title" : "", - "img" : { - "src" : "result_img_src.gif", - "alt" : "result_img_alt", - "title" : "result_img_title" - }, - "plaintext" : "result_plaintext", - "moutput" : "result_moutput" - } - ] - }, - { - "title" : "Manipulatives illustration", - "scanners" : [ - "Arithmetic" - ], - "id" : "Illustration", - "error" : false, - "numsubpods" : 1, - "subpods" : [ - { - "title" : "", - "CDFcontent" : "Resizeable", - "img" : { - "src" : "illustration_img_src.gif", - "alt" : "illustration_img_alt", - "title" : "illustration_img_title" - }, - "plaintext" : "illustration_img_plaintext" - } - ] - } - ] - }} - ''' - response = mock.Mock(text=json, request=request) - results = wolframalpha_noapi.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 2) - self.assertEqual('input_plaintext', results[0]['infobox']) - - self.assertEqual(len(results[0]['attributes']), 3) - self.assertEqual('Input', results[0]['attributes'][0]['label']) - self.assertEqual('input_plaintext', results[0]['attributes'][0]['value']) - self.assertEqual('Result', results[0]['attributes'][1]['label']) - self.assertEqual('result_plaintext', results[0]['attributes'][1]['value']) - self.assertEqual('Manipulatives illustration', results[0]['attributes'][2]['label']) - self.assertEqual('illustration_img_src.gif', results[0]['attributes'][2]['image']['src']) - self.assertEqual('illustration_img_alt', results[0]['attributes'][2]['image']['alt']) - - self.assertEqual(len(results[0]['urls']), 1) - - self.assertEqual(referer_url, results[0]['urls'][0]['url']) - self.assertEqual('Wolfram|Alpha', results[0]['urls'][0]['title']) - self.assertEqual(referer_url, results[1]['url']) - self.assertEqual('Wolfram|Alpha (input_plaintext)', results[1]['title']) - self.assertIn('result_plaintext', results[1]['content']) - - # test calc - json = r""" - {"queryresult" : { - "success" : true, - "error" : false, - "numpods" : 2, - "datatypes" : "", - "id" : "queryresult_id", - "host" : "https:\/\/www4b.wolframalpha.com", - "related" : "related_url", - "version" : "2.6", - "pods" : [ - { - "title" : "Indefinite integral", - "scanners" : [ - "Integral" - ], - "id" : "IndefiniteIntegral", - "error" : false, - "numsubpods" : 1, - "primary" : true, - "subpods" : [ - { - "title" : "", - "img" : { - "src" : "integral_img_src.gif", - "alt" : "integral_img_alt", - "title" : "integral_img_title" - }, - "plaintext" : "integral_plaintext", - "minput" : "integral_minput", - "moutput" : "integral_moutput" - } - ] - }, - { - "title" : "Plot of the integral", - "scanners" : [ - "Integral" - ], - "id" : "Plot", - "error" : false, - "numsubpods" : 1, - "subpods" : [ - { - "title" : "", - "img" : { - "src" : "plot.gif", - "alt" : "plot_alt", - "title" : "plot_title" - }, - "plaintext" : "", - "minput" : "plot_minput" - } - ] - } - ] - }} - """ - response = mock.Mock(text=json, request=request) - results = wolframalpha_noapi.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 2) - self.assertEqual('integral_plaintext', results[0]['infobox']) - - self.assertEqual(len(results[0]['attributes']), 2) - self.assertEqual('Indefinite integral', results[0]['attributes'][0]['label']) - self.assertEqual('integral_plaintext', results[0]['attributes'][0]['value']) - self.assertEqual('Plot of the integral', results[0]['attributes'][1]['label']) - self.assertEqual('plot.gif', results[0]['attributes'][1]['image']['src']) - self.assertEqual('plot_alt', results[0]['attributes'][1]['image']['alt']) - - self.assertEqual(len(results[0]['urls']), 1) - - self.assertEqual(referer_url, results[0]['urls'][0]['url']) - self.assertEqual('Wolfram|Alpha', results[0]['urls'][0]['title']) - self.assertEqual(referer_url, results[1]['url']) - self.assertEqual('Wolfram|Alpha (integral_plaintext)', results[1]['title']) - self.assertIn('integral_plaintext', results[1]['content']) diff --git a/tests/unit/engines/test_www1x.py b/tests/unit/engines/test_www1x.py deleted file mode 100644 index 40f5200fd..000000000 --- a/tests/unit/engines/test_www1x.py +++ /dev/null @@ -1,14 +0,0 @@ -from collections import defaultdict -import mock -from searx.engines import www1x -from searx.testing import SearxTestCase - - -class TestWww1xEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - params = www1x.request(query, defaultdict(dict)) - self.assertTrue('url' in params) - self.assertTrue(query in params['url']) - self.assertTrue('1x.com' in params['url']) diff --git a/tests/unit/engines/test_yacy.py b/tests/unit/engines/test_yacy.py deleted file mode 100644 index f49532cf4..000000000 --- a/tests/unit/engines/test_yacy.py +++ /dev/null @@ -1,96 +0,0 @@ -from collections import defaultdict -import mock -from searx.engines import yacy -from searx.testing import SearxTestCase - - -class TestYacyEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - dicto['language'] = 'fr_FR' - params = yacy.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('localhost', params['url']) - self.assertIn('fr', params['url']) - - dicto['language'] = 'all' - params = yacy.request(query, dicto) - self.assertIn('url', params) - self.assertNotIn('lr=lang_', params['url']) - - def test_response(self): - self.assertRaises(AttributeError, yacy.response, None) - self.assertRaises(AttributeError, yacy.response, []) - self.assertRaises(AttributeError, yacy.response, '') - self.assertRaises(AttributeError, yacy.response, '[]') - - response = mock.Mock(text='{}') - self.assertEqual(yacy.response(response), []) - - response = mock.Mock(text='{"data": []}') - self.assertEqual(yacy.response(response), []) - - json = """ - { - "channels": [ - { - "title": "YaCy P2P-Search for test", - "description": "Search for test", - "link": "http://search.yacy.de:7001/yacysearch.html?query=test&amp;resource=global&amp;contentdom=0", - "image": { - "url": "http://search.yacy.de:7001/env/grafics/yacy.png", - "title": "Search for test", - "link": "http://search.yacy.de:7001/yacysearch.html?query=test&amp;resource=global&amp;contentdom=0" - }, - "totalResults": "249", - "startIndex": "0", - "itemsPerPage": "5", - "searchTerms": "test", - "items": [ - { - "title": "This is the title", - "link": "http://this.is.the.url", - "code": "", - "description": "This should be the content", - "pubDate": "Sat, 08 Jun 2013 02:00:00 +0200", - "size": "44213", - "sizename": "43 kbyte", - "guid": "lzh_1T_5FP-A", - "faviconCode": "XTS4uQ_5FP-A", - "host": "www.gamestar.de", - "path": "/spiele/city-of-heroes-freedom/47019.html", - "file": "47019.html", - "urlhash": "lzh_1T_5FP-A", - "ranking": "0.20106804" - }, - { - "title": "This is the title2", - "icon": "/ViewImage.png?maxwidth=96&amp;maxheight=96&amp;code=7EbAbW6BpPOA", - "image": "http://image.url/image.png", - "cache": "/ViewImage.png?quadratic=&amp;url=http://golem.ivwbox.de/cgi-bin/ivw/CP/G_INET?d=14071378", - "url": "http://this.is.the.url", - "urlhash": "7EbAbW6BpPOA", - "host": "www.golem.de", - "width": "-1", - "height": "-1" - } - ] - } - ] - } - """ - response = mock.Mock(text=json) - results = yacy.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 2) - self.assertEqual(results[0]['title'], 'This is the title') - self.assertEqual(results[0]['url'], 'http://this.is.the.url') - self.assertEqual(results[0]['content'], 'This should be the content') - self.assertEqual(results[1]['img_src'], 'http://image.url/image.png') - self.assertEqual(results[1]['content'], '') - self.assertEqual(results[1]['url'], 'http://this.is.the.url') - self.assertEqual(results[1]['title'], 'This is the title2') diff --git a/tests/unit/engines/test_yahoo.py b/tests/unit/engines/test_yahoo.py deleted file mode 100644 index e52c1109e..000000000 --- a/tests/unit/engines/test_yahoo.py +++ /dev/null @@ -1,190 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -from searx.engines import yahoo -from searx.testing import SearxTestCase - - -class TestYahooEngine(SearxTestCase): - - def test_parse_url(self): - test_url = 'http://r.search.yahoo.com/_ylt=A0LEb9JUSKcAEGRXNyoA;_ylu=X3oDMTEzZm1qazYwBHNlYwNzcgRwb3MDMQRjb' +\ - '2xvA2Jm2dGlkA1NNRTcwM18x/RV=2/RE=1423106085/RO=10/RU=https%3a%2f%2fthis.is.the.url%2f/RK=0/RS=' +\ - 'dtcJsfP4mEeBOjnVfUQ-' - url = yahoo.parse_url(test_url) - self.assertEqual('https://this.is.the.url/', url) - - test_url = 'http://r.search.yahoo.com/_ylt=A0LElb9JUSKcAEGRXNyoA;_ylu=X3oDMTEzZm1qazYwBHNlYwNzcgRwb3MDMQRjb' +\ - '2xvA2Jm2dGlkA1NNRTcwM18x/RV=2/RE=1423106085/RO=10/RU=https%3a%2f%2fthis.is.the.url%2f/RS=' +\ - 'dtcJsfP4mEeBOjnVfUQ-' - url = yahoo.parse_url(test_url) - self.assertEqual('https://this.is.the.url/', url) - - test_url = 'https://this.is.the.url/' - url = yahoo.parse_url(test_url) - self.assertEqual('https://this.is.the.url/', url) - - def test_request(self): - yahoo.supported_languages = ['en', 'fr', 'zh-CHT', 'zh-CHS'] - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - dicto['time_range'] = '' - dicto['language'] = 'fr-FR' - params = yahoo.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('search.yahoo.com', params['url']) - self.assertIn('fr', params['url']) - self.assertIn('cookies', params) - self.assertIn('sB', params['cookies']) - self.assertIn('fr', params['cookies']['sB']) - - dicto['language'] = 'zh' - params = yahoo.request(query, dicto) - self.assertIn('zh_chs', params['url']) - self.assertIn('zh_chs', params['cookies']['sB']) - - dicto['language'] = 'zh-TW' - params = yahoo.request(query, dicto) - self.assertIn('zh_cht', params['url']) - self.assertIn('zh_cht', params['cookies']['sB']) - - dicto['language'] = 'all' - params = yahoo.request(query, dicto) - self.assertIn('cookies', params) - self.assertIn('sB', params['cookies']) - self.assertIn('en', params['cookies']['sB']) - self.assertIn('en', params['url']) - - def test_no_url_in_request_year_time_range(self): - dicto = defaultdict(dict) - query = 'test_query' - dicto['time_range'] = 'year' - params = yahoo.request(query, dicto) - self.assertEqual({}, params['url']) - - def test_response(self): - self.assertRaises(AttributeError, yahoo.response, None) - self.assertRaises(AttributeError, yahoo.response, []) - self.assertRaises(AttributeError, yahoo.response, '') - self.assertRaises(AttributeError, yahoo.response, '[]') - - response = mock.Mock(text='<html></html>') - self.assertEqual(yahoo.response(response), []) - - html = """ -<ol class="reg mb-15 searchCenterMiddle"> - <li class="first"> - <div class="dd algo fst Sr"> - <div class="compTitle"> - <h3 class="title"><a class=" td-u" href="http://r.search.yahoo.com/_ylt=A0LEb9JUSKcAEGRXNyoA; - _ylu=X3oDMTEzZm1qazYwBHNlYwNzcgRwb3MDMQRjb2xvA2Jm2dGlkA1NNRTcwM18x/RV=2/RE=1423106085/RO=10 - /RU=https%3a%2f%2fthis.is.the.url%2f/RK=0/RS=dtcJsfP4mEeBOjnVfUQ-" - target="_blank" data-bid="54e712e13671c"> - <b><b>This is the title</b></b></a> - </h3> - </div> - <div class="compText aAbs"> - <p class="lh-18"><b><b>This is the </b>content</b> - </p> - </div> - </div> - </li> - <li> - <div class="dd algo lst Sr"> - <div class="compTitle"> - </div> - <div class="compText aAbs"> - <p class="lh-18">This is the second content</p> - </div> - </div> - </li> -</ol> -<div class="dd assist fst lst AlsoTry" data-bid="54e712e138d04"> - <div class="compTitle mb-4 h-17"> - <h3 class="title">Also Try</h3> </div> - <table class="compTable m-0 ac-1st td-u fz-ms"> - <tbody> - <tr> - <td class="w-50p pr-28"><a href="https://search.yahoo.com/"><B>This is the </B>suggestion<B></B></a> - </td> - </tr> - </table> -</div> - """ - response = mock.Mock(text=html) - results = yahoo.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 2) - self.assertEqual(results[0]['title'], 'This is the title') - self.assertEqual(results[0]['url'], 'https://this.is.the.url/') - self.assertEqual(results[0]['content'], 'This is the content') - self.assertEqual(results[1]['suggestion'], 'This is the suggestion') - - html = """ -<ol class="reg mb-15 searchCenterMiddle"> - <li class="first"> - <div class="dd algo fst Sr"> - <div class="compTitle"> - <h3 class="title"><a class=" td-u" href="http://r.search.yahoo.com/_ylt=A0LEb9JUSKcAEGRXNyoA; - _ylu=X3oDMTEzZm1qazYwBHNlYwNzcgRwb3MDMQRjb2xvA2Jm2dGlkA1NNRTcwM18x/RV=2/RE=1423106085/RO=10 - /RU=https%3a%2f%2fthis.is.the.url%2f/RK=0/RS=dtcJsfP4mEeBOjnVfUQ-" - target="_blank" data-bid="54e712e13671c"> - <b><b>This is the title</b></b></a> - </h3> - </div> - <div class="compText aAbs"> - <p class="lh-18"><b><b>This is the </b>content</b> - </p> - </div> - </div> - </li> -</ol> - """ - response = mock.Mock(text=html) - results = yahoo.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'This is the title') - self.assertEqual(results[0]['url'], 'https://this.is.the.url/') - self.assertEqual(results[0]['content'], 'This is the content') - - html = """ - <li class="b_algo" u="0|5109|4755453613245655|UAGjXgIrPH5yh-o5oNHRx_3Zta87f_QO"> - </li> - """ - response = mock.Mock(text=html) - results = yahoo.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) - - def test_fetch_supported_languages(self): - html = """<html></html>""" - response = mock.Mock(text=html) - results = yahoo._fetch_supported_languages(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) - - html = """ - <html> - <div> - <div id="yschlang"> - <span> - <label><input value="lang_ar"></input></label> - </span> - <span> - <label><input value="lang_zh_chs"></input></label> - <label><input value="lang_zh_cht"></input></label> - </span> - </div> - </div> - </html> - """ - response = mock.Mock(text=html) - languages = yahoo._fetch_supported_languages(response) - self.assertEqual(type(languages), list) - self.assertEqual(len(languages), 3) - self.assertIn('ar', languages) - self.assertIn('zh-CHS', languages) - self.assertIn('zh-CHT', languages) diff --git a/tests/unit/engines/test_yahoo_news.py b/tests/unit/engines/test_yahoo_news.py deleted file mode 100644 index ae27df2a5..000000000 --- a/tests/unit/engines/test_yahoo_news.py +++ /dev/null @@ -1,150 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -from datetime import datetime -import mock -from searx.engines import yahoo_news -from searx.testing import SearxTestCase - - -class TestYahooNewsEngine(SearxTestCase): - - def test_request(self): - yahoo_news.supported_languages = ['en', 'fr'] - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 1 - dicto['language'] = 'fr-FR' - params = yahoo_news.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('news.search.yahoo.com', params['url']) - self.assertIn('fr', params['url']) - self.assertIn('cookies', params) - self.assertIn('sB', params['cookies']) - self.assertIn('fr', params['cookies']['sB']) - - dicto['language'] = 'all' - params = yahoo_news.request(query, dicto) - self.assertIn('cookies', params) - self.assertIn('sB', params['cookies']) - self.assertIn('en', params['cookies']['sB']) - self.assertIn('en', params['url']) - - def test_sanitize_url(self): - url = "test.url" - self.assertEqual(url, yahoo_news.sanitize_url(url)) - - url = "www.yahoo.com/;_ylt=test" - self.assertEqual("www.yahoo.com/", yahoo_news.sanitize_url(url)) - - def test_response(self): - self.assertRaises(AttributeError, yahoo_news.response, None) - self.assertRaises(AttributeError, yahoo_news.response, []) - self.assertRaises(AttributeError, yahoo_news.response, '') - self.assertRaises(AttributeError, yahoo_news.response, '[]') - - response = mock.Mock(text='<html></html>') - self.assertEqual(yahoo_news.response(response), []) - - html = """ - <ol class=" reg searchCenterMiddle"> - <li class="first"> - <div class="compTitle"> - <h3> - <a class="yschttl spt" href="http://this.is.the.url" target="_blank"> - This is - the <b>title</b>... - </a> - </h3> - </div> - <div> - <span class="cite">Business via Yahoo!</span> - <span class="tri fc-2nd ml-10">May 01 10:00 AM</span> - </div> - <div class="compText"> - This is the content - </div> - </li> - <li class="first"> - <div class="compTitle"> - <h3> - <a class="yschttl spt" target="_blank"> - </a> - </h3> - </div> - <div class="compText"> - </div> - </li> - </ol> - """ - response = mock.Mock(text=html) - results = yahoo_news.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'This is the title...') - self.assertEqual(results[0]['url'], 'http://this.is.the.url/') - self.assertEqual(results[0]['content'], 'This is the content') - - html = """ - <ol class=" reg searchCenterMiddle"> - <li class="first"> - <div class="compTitle"> - <h3> - <a class="yschttl spt" href="http://this.is.the.url" target="_blank"> - This is - the <b>title</b>... - </a> - </h3> - </div> - <div> - <span class="cite">Business via Yahoo!</span> - <span class="tri fc-2nd ml-10">2 hours, 22 minutes ago</span> - </div> - <div class="compText"> - This is the content - </div> - </li> - <li> - <div class="compTitle"> - <h3> - <a class="yschttl spt" href="http://this.is.the.url" target="_blank"> - This is - the <b>title</b>... - </a> - </h3> - </div> - <div> - <span class="cite">Business via Yahoo!</span> - <span class="tri fc-2nd ml-10">22 minutes ago</span> - </div> - <div class="compText"> - This is the content - </div> - </li> - <li> - <div class="compTitle"> - <h3> - <a class="yschttl spt" href="http://this.is.the.url" target="_blank"> - This is - the <b>title</b>... - </a> - </h3> - </div> - <div> - <span class="cite">Business via Yahoo!</span> - <span class="tri fc-2nd ml-10">Feb 03 09:45AM 1900</span> - </div> - <div class="compText"> - This is the content - </div> - </li> - </ol> - """ - response = mock.Mock(text=html) - results = yahoo_news.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 3) - self.assertEqual(results[0]['title'], 'This is the title...') - self.assertEqual(results[0]['url'], 'http://this.is.the.url/') - self.assertEqual(results[0]['content'], 'This is the content') - self.assertEqual(results[2]['publishedDate'].year, datetime.now().year) diff --git a/tests/unit/engines/test_youtube_api.py b/tests/unit/engines/test_youtube_api.py deleted file mode 100644 index 0d4d478c3..000000000 --- a/tests/unit/engines/test_youtube_api.py +++ /dev/null @@ -1,111 +0,0 @@ -from collections import defaultdict -import mock -from searx.engines import youtube_api -from searx.testing import SearxTestCase - - -class TestYoutubeAPIEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 0 - dicto['language'] = 'fr_FR' - params = youtube_api.request(query, dicto) - self.assertTrue('url' in params) - self.assertTrue(query in params['url']) - self.assertIn('googleapis.com', params['url']) - self.assertIn('youtube', params['url']) - self.assertIn('fr', params['url']) - - dicto['language'] = 'all' - params = youtube_api.request(query, dicto) - self.assertFalse('fr' in params['url']) - - def test_response(self): - self.assertRaises(AttributeError, youtube_api.response, None) - self.assertRaises(AttributeError, youtube_api.response, []) - self.assertRaises(AttributeError, youtube_api.response, '') - self.assertRaises(AttributeError, youtube_api.response, '[]') - - response = mock.Mock(text='{}') - self.assertEqual(youtube_api.response(response), []) - - response = mock.Mock(text='{"data": []}') - self.assertEqual(youtube_api.response(response), []) - - json = """ - { - "kind": "youtube#searchListResponse", - "etag": "xmg9xJZuZD438sF4hb-VcBBREXc/YJQDcTBCDcaBvl-sRZJoXdvy1ME", - "nextPageToken": "CAUQAA", - "pageInfo": { - "totalResults": 1000000, - "resultsPerPage": 20 - }, - "items": [ - { - "kind": "youtube#searchResult", - "etag": "xmg9xJZuZD438sF4hb-VcBBREXc/IbLO64BMhbHIgWLwLw7MDYe7Hs4", - "id": { - "kind": "youtube#video", - "videoId": "DIVZCPfAOeM" - }, - "snippet": { - "publishedAt": "2015-05-29T22:41:04.000Z", - "channelId": "UCNodmx1ERIjKqvcJLtdzH5Q", - "title": "Title", - "description": "Description", - "thumbnails": { - "default": { - "url": "https://i.ytimg.com/vi/DIVZCPfAOeM/default.jpg" - }, - "medium": { - "url": "https://i.ytimg.com/vi/DIVZCPfAOeM/mqdefault.jpg" - }, - "high": { - "url": "https://i.ytimg.com/vi/DIVZCPfAOeM/hqdefault.jpg" - } - }, - "channelTitle": "MinecraftUniverse", - "liveBroadcastContent": "none" - } - } - ] - } - """ - response = mock.Mock(text=json) - results = youtube_api.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 1) - self.assertEqual(results[0]['title'], 'Title') - self.assertEqual(results[0]['url'], 'https://www.youtube.com/watch?v=DIVZCPfAOeM') - self.assertEqual(results[0]['content'], 'Description') - self.assertEqual(results[0]['thumbnail'], 'https://i.ytimg.com/vi/DIVZCPfAOeM/hqdefault.jpg') - self.assertTrue('DIVZCPfAOeM' in results[0]['embedded']) - - json = """ - { - "kind": "youtube#searchListResponse", - "etag": "xmg9xJZuZD438sF4hb-VcBBREXc/YJQDcTBCDcaBvl-sRZJoXdvy1ME", - "nextPageToken": "CAUQAA", - "pageInfo": { - "totalResults": 1000000, - "resultsPerPage": 20 - } - } - """ - response = mock.Mock(text=json) - results = youtube_api.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) - - json = """ - {"toto":{"entry":[] - } - } - """ - response = mock.Mock(text=json) - results = youtube_api.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) diff --git a/tests/unit/engines/test_youtube_noapi.py b/tests/unit/engines/test_youtube_noapi.py deleted file mode 100644 index cbf7b9bcd..000000000 --- a/tests/unit/engines/test_youtube_noapi.py +++ /dev/null @@ -1,124 +0,0 @@ -# -*- coding: utf-8 -*- -from collections import defaultdict -import mock -from searx.engines import youtube_noapi -from searx.testing import SearxTestCase - - -class TestYoutubeNoAPIEngine(SearxTestCase): - - def test_request(self): - query = 'test_query' - dicto = defaultdict(dict) - dicto['pageno'] = 0 - dicto['time_range'] = '' - params = youtube_noapi.request(query, dicto) - self.assertIn('url', params) - self.assertIn(query, params['url']) - self.assertIn('youtube.com', params['url']) - - def test_time_range_search(self): - dicto = defaultdict(dict) - query = 'test_query' - dicto['time_range'] = 'year' - params = youtube_noapi.request(query, dicto) - self.assertIn('&sp=EgIIBQ%253D%253D', params['url']) - - dicto['time_range'] = 'month' - params = youtube_noapi.request(query, dicto) - self.assertIn('&sp=EgIIBA%253D%253D', params['url']) - - dicto['time_range'] = 'week' - params = youtube_noapi.request(query, dicto) - self.assertIn('&sp=EgIIAw%253D%253D', params['url']) - - dicto['time_range'] = 'day' - params = youtube_noapi.request(query, dicto) - self.assertIn('&sp=EgIIAg%253D%253D', params['url']) - - def test_response(self): - self.assertRaises(AttributeError, youtube_noapi.response, None) - self.assertRaises(AttributeError, youtube_noapi.response, []) - self.assertRaises(AttributeError, youtube_noapi.response, '') - self.assertRaises(AttributeError, youtube_noapi.response, '[]') - - response = mock.Mock(text='<html></html>') - self.assertEqual(youtube_noapi.response(response), []) - - html = """ - <div></div> - <script> - window["ytInitialData"] = { - "contents": { - "twoColumnSearchResultsRenderer": { - "primaryContents": { - "sectionListRenderer": { - "contents": [ - { - "itemSectionRenderer": { - "contents": [ - { - "videoRenderer": { - "videoId": "DIVZCPfAOeM", - "title": { - "simpleText": "Title" - }, - "descriptionSnippet": { - "runs": [ - { - "text": "Des" - }, - { - "text": "cription" - } - ] - } - } - }, - { - "videoRenderer": { - "videoId": "9C_HReR_McQ", - "title": { - "simpleText": "Title" - }, - "descriptionSnippet": { - "simpleText": "Description" - } - } - } - ] - } - } - ] - } - } - } - } - }; - </script> - """ - response = mock.Mock(text=html) - results = youtube_noapi.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 2) - self.assertEqual(results[0]['title'], 'Title') - self.assertEqual(results[0]['url'], 'https://www.youtube.com/watch?v=DIVZCPfAOeM') - self.assertEqual(results[0]['content'], 'Description') - self.assertEqual(results[0]['thumbnail'], 'https://i.ytimg.com/vi/DIVZCPfAOeM/hqdefault.jpg') - self.assertTrue('DIVZCPfAOeM' in results[0]['embedded']) - self.assertEqual(results[1]['title'], 'Title') - self.assertEqual(results[1]['url'], 'https://www.youtube.com/watch?v=9C_HReR_McQ') - self.assertEqual(results[1]['content'], 'Description') - self.assertEqual(results[1]['thumbnail'], 'https://i.ytimg.com/vi/9C_HReR_McQ/hqdefault.jpg') - self.assertTrue('9C_HReR_McQ' in results[1]['embedded']) - - html = """ - <ol id="item-section-063864" class="item-section"> - <li> - </li> - </ol> - """ - response = mock.Mock(text=html) - results = youtube_noapi.response(response) - self.assertEqual(type(results), list) - self.assertEqual(len(results), 0) diff --git a/tests/unit/engines/unsplash_fixture.json b/tests/unit/engines/unsplash_fixture.json deleted file mode 100644 index 4c8db2a2c..000000000 --- a/tests/unit/engines/unsplash_fixture.json +++ /dev/null @@ -1,241 +0,0 @@ -{ - "total": 2, - "total_pages": 1, - "results": [ - { - "id": "FY8d721UO_4", - "created_at": "2018-04-12T14:20:35-04:00", - "updated_at": "2018-08-28T20:58:33-04:00", - "width": 3891, - "height": 5829, - "color": "#152C33", - "description": "low angle photography of swimming penguin", - "urls": { - "raw": "https://images.unsplash.com/photo-1523557148507-1b77641c7e7c?ixlib=rb-0.3.5&ixid=eyJhcHBfaWQiOjEyMDd9&s=095c5fc319c5a77c705f49ad63e0f195", - "full": "https://images.unsplash.com/photo-1523557148507-1b77641c7e7c?ixlib=rb-0.3.5&q=85&fm=jpg&crop=entropy&cs=srgb&ixid=eyJhcHBfaWQiOjEyMDd9&s=74be977849c173d6929636d491a760c3", - "regular": "https://images.unsplash.com/photo-1523557148507-1b77641c7e7c?ixlib=rb-0.3.5&q=80&fm=jpg&crop=entropy&cs=tinysrgb&w=1080&fit=max&ixid=eyJhcHBfaWQiOjEyMDd9&s=ad65df26970bd010085f0ca25434de33", - "small": "https://images.unsplash.com/photo-1523557148507-1b77641c7e7c?ixlib=rb-0.3.5&q=80&fm=jpg&crop=entropy&cs=tinysrgb&w=400&fit=max&ixid=eyJhcHBfaWQiOjEyMDd9&s=5d2edfd073c31eb8ee7b305222bdc5a2", - "thumb": "https://images.unsplash.com/photo-1523557148507-1b77641c7e7c?ixlib=rb-0.3.5&q=80&fm=jpg&crop=entropy&cs=tinysrgb&w=200&fit=max&ixid=eyJhcHBfaWQiOjEyMDd9&s=a9b9e56e63efc6f4611a87ce7e9a48f8" - }, - "links": { - "self": "https://api.unsplash.com/photos/FY8d721UO_4", - "html": "https://unsplash.com/photos/FY8d721UO_4", - "download": "https://unsplash.com/photos/FY8d721UO_4/download", - "download_location": "https://api.unsplash.com/photos/FY8d721UO_4/download" - }, - "categories": [], - "sponsored": false, - "likes": 31, - "liked_by_user": false, - "current_user_collections": [], - "slug": null, - "user": { - "id": "N4gE4mrG8lE", - "updated_at": "2018-10-03T02:51:19-04:00", - "username": "gaspanik", - "name": "Masaaki Komori", - "first_name": "Masaaki", - "last_name": "Komori", - "twitter_username": "cipher", - "portfolio_url": "https://www.instagram.com/cipher/", - "bio": null, - "location": "Tokyo, JAPAN", - "links": { - "self": "https://api.unsplash.com/users/gaspanik", - "html": "https://unsplash.com/@gaspanik", - "photos": "https://api.unsplash.com/users/gaspanik/photos", - "likes": "https://api.unsplash.com/users/gaspanik/likes", - "portfolio": "https://api.unsplash.com/users/gaspanik/portfolio", - "following": "https://api.unsplash.com/users/gaspanik/following", - "followers": "https://api.unsplash.com/users/gaspanik/followers" - }, - "profile_image": { - "small": "https://images.unsplash.com/profile-fb-1502270358-e7c86c1011ce.jpg?ixlib=rb-0.3.5&q=80&fm=jpg&crop=faces&cs=tinysrgb&fit=crop&h=32&w=32&s=9fe12f6d177bd6fdbd56d233a80c01a3", - "medium": "https://images.unsplash.com/profile-fb-1502270358-e7c86c1011ce.jpg?ixlib=rb-0.3.5&q=80&fm=jpg&crop=faces&cs=tinysrgb&fit=crop&h=64&w=64&s=6ad7d156b62e438ae9dc794cba712fff", - "large": "https://images.unsplash.com/profile-fb-1502270358-e7c86c1011ce.jpg?ixlib=rb-0.3.5&q=80&fm=jpg&crop=faces&cs=tinysrgb&fit=crop&h=128&w=128&s=13a08a2e72e7d11632410e92bd3a9406" - }, - "instagram_username": "cipher", - "total_collections": 0, - "total_likes": 406, - "total_photos": 196 - }, - "tags": [ - { - "title": "animal" - }, - { - "title": "water" - }, - { - "title": "swim" - }, - { - "title": "aquarium" - }, - { - "title": "wallpaper" - }, - { - "title": "blue" - }, - { - "title": "sealife" - }, - { - "title": "wildlife" - }, - { - "title": "bird" - }, - { - "title": "deep sea" - }, - { - "title": "fish" - }, - { - "title": "water life" - } - ], - "photo_tags": [ - { - "title": "animal" - }, - { - "title": "water" - }, - { - "title": "swim" - }, - { - "title": "aquarium" - }, - { - "title": "wallpaper" - } - ] - }, - { - "id": "ayKyc01xLWA", - "created_at": "2018-02-16T23:14:31-05:00", - "updated_at": "2018-08-28T20:48:27-04:00", - "width": 4928, - "height": 3264, - "color": "#161618", - "description": "black and white penguins on ice field", - "urls": { - "raw": "https://images.unsplash.com/photo-1518840801558-9770b4a34eeb?ixlib=rb-0.3.5&ixid=eyJhcHBfaWQiOjEyMDd9&s=4e107a2bc49ab561ba6272eea2ec725d", - "full": "https://images.unsplash.com/photo-1518840801558-9770b4a34eeb?ixlib=rb-0.3.5&q=85&fm=jpg&crop=entropy&cs=srgb&ixid=eyJhcHBfaWQiOjEyMDd9&s=f9b1e4d4572ab44efb2cf3d601d2b4d9", - "regular": "https://images.unsplash.com/photo-1518840801558-9770b4a34eeb?ixlib=rb-0.3.5&q=80&fm=jpg&crop=entropy&cs=tinysrgb&w=1080&fit=max&ixid=eyJhcHBfaWQiOjEyMDd9&s=4430cedb63841f1fe055d5005316cc96", - "small": "https://images.unsplash.com/photo-1518840801558-9770b4a34eeb?ixlib=rb-0.3.5&q=80&fm=jpg&crop=entropy&cs=tinysrgb&w=400&fit=max&ixid=eyJhcHBfaWQiOjEyMDd9&s=ee73c7af22ce445d408e240821ce07af", - "thumb": "https://images.unsplash.com/photo-1518840801558-9770b4a34eeb?ixlib=rb-0.3.5&q=80&fm=jpg&crop=entropy&cs=tinysrgb&w=200&fit=max&ixid=eyJhcHBfaWQiOjEyMDd9&s=934302390d383cad8c571905e3a80bac" - }, - "links": { - "self": "https://api.unsplash.com/photos/ayKyc01xLWA", - "html": "https://unsplash.com/photos/ayKyc01xLWA", - "download": "https://unsplash.com/photos/ayKyc01xLWA/download", - "download_location": "https://api.unsplash.com/photos/ayKyc01xLWA/download" - }, - "categories": [], - "sponsored": false, - "likes": 37, - "liked_by_user": false, - "current_user_collections": [], - "slug": null, - "user": { - "id": "tRb_KGw60Xk", - "updated_at": "2018-09-20T11:51:54-04:00", - "username": "ghost_cat", - "name": "Danielle Barnes", - "first_name": "Danielle", - "last_name": "Barnes", - "twitter_username": null, - "portfolio_url": null, - "bio": null, - "location": null, - "links": { - "self": "https://api.unsplash.com/users/ghost_cat", - "html": "https://unsplash.com/@ghost_cat", - "photos": "https://api.unsplash.com/users/ghost_cat/photos", - "likes": "https://api.unsplash.com/users/ghost_cat/likes", - "portfolio": "https://api.unsplash.com/users/ghost_cat/portfolio", - "following": "https://api.unsplash.com/users/ghost_cat/following", - "followers": "https://api.unsplash.com/users/ghost_cat/followers" - }, - "profile_image": { - "small": "https://images.unsplash.com/profile-fb-1508491082-ae77f53e9ac3.jpg?ixlib=rb-0.3.5&q=80&fm=jpg&crop=faces&cs=tinysrgb&fit=crop&h=32&w=32&s=751bf6a557763648d52ffd7e60e79436", - "medium": "https://images.unsplash.com/profile-fb-1508491082-ae77f53e9ac3.jpg?ixlib=rb-0.3.5&q=80&fm=jpg&crop=faces&cs=tinysrgb&fit=crop&h=64&w=64&s=e46cd1c8713035f045130e1b093b981e", - "large": "https://images.unsplash.com/profile-fb-1508491082-ae77f53e9ac3.jpg?ixlib=rb-0.3.5&q=80&fm=jpg&crop=faces&cs=tinysrgb&fit=crop&h=128&w=128&s=352eabcf107c3ce95fe51a18485f116b" - }, - "instagram_username": null, - "total_collections": 0, - "total_likes": 0, - "total_photos": 21 - }, - "tags": [ - { - "title": "ice" - }, - { - "title": "bird" - }, - { - "title": "ice field" - }, - { - "title": "iceberg" - }, - { - "title": "snow" - }, - { - "title": "frozen" - }, - { - "title": "animal" - }, - { - "title": "wildlife" - }, - { - "title": "wild" - }, - { - "title": "antarctica" - }, - { - "title": "sunshine" - }, - { - "title": "daylight" - }, - { - "title": "wilderness" - }, - { - "title": "south pole" - }, - { - "title": "flock" - } - ], - "photo_tags": [ - { - "title": "ice" - }, - { - "title": "bird" - }, - { - "title": "ice field" - }, - { - "title": "iceberg" - }, - { - "title": "snow" - } - ] - } - ] -} \ No newline at end of file From 34ad3d6b34017523a9502f86b92c17fe389918eb Mon Sep 17 00:00:00 2001 From: Adam Tauber <asciimoo@gmail.com> Date: Sat, 21 Dec 2019 21:25:50 +0100 Subject: [PATCH 08/14] [enh] display error message if gigablast extra param expired --- searx/engines/gigablast.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/searx/engines/gigablast.py b/searx/engines/gigablast.py index 5af593e36..2bb29a9fe 100644 --- a/searx/engines/gigablast.py +++ b/searx/engines/gigablast.py @@ -99,7 +99,7 @@ def response(resp): response_json = loads(resp.text) except: parse_extra_param(resp.text) - return results + raise Exception('extra param expired, please reload') for result in response_json['results']: # append result From c18048e0454f4e3dc75c778940903091fbeae06a Mon Sep 17 00:00:00 2001 From: Marc Abonce Seguin <marc-abonce@mailbox.org> Date: Sun, 25 Aug 2019 22:23:37 -0700 Subject: [PATCH 09/14] exclude disambiguation pages from wikipedia infobox --- searx/engines/wikipedia.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/searx/engines/wikipedia.py b/searx/engines/wikipedia.py index 690da72fe..44dea56fa 100644 --- a/searx/engines/wikipedia.py +++ b/searx/engines/wikipedia.py @@ -21,7 +21,8 @@ search_url = base_url + u'w/api.php?'\ 'action=query'\ '&format=json'\ '&{query}'\ - '&prop=extracts|pageimages'\ + '&prop=extracts|pageimages|pageprops'\ + '&ppprop=disambiguation'\ '&exintro'\ '&explaintext'\ '&pithumbsize=300'\ @@ -87,7 +88,7 @@ def response(resp): if int(article_id) > 0: break - if int(article_id) < 0: + if int(article_id) < 0 or 'disambiguation' in page.get('pageprops', {}): return [] title = page.get('title') From 5706c12fba98e169c7c76a4d3c29aabf48242d63 Mon Sep 17 00:00:00 2001 From: Marc Abonce Seguin <marc-abonce@mailbox.org> Date: Sun, 25 Aug 2019 22:47:23 -0700 Subject: [PATCH 10/14] remove empty parenthesis in wikipedia's summary They're usually IPA pronunciations which are removed by the API. --- searx/engines/wikipedia.py | 1 + 1 file changed, 1 insertion(+) diff --git a/searx/engines/wikipedia.py b/searx/engines/wikipedia.py index 44dea56fa..a216ba886 100644 --- a/searx/engines/wikipedia.py +++ b/searx/engines/wikipedia.py @@ -100,6 +100,7 @@ def response(resp): extract = page.get('extract') summary = extract_first_paragraph(extract, title, image) + summary = summary.replace('() ', '') # link to wikipedia article wikipedia_link = base_url.format(language=url_lang(resp.search_params['language'])) \ From 495ae59b31b6aafae484ecdfb6aece3a84f1ede7 Mon Sep 17 00:00:00 2001 From: Marc Abonce Seguin <marc-abonce@mailbox.org> Date: Sun, 25 Aug 2019 23:01:30 -0700 Subject: [PATCH 11/14] hide suggestions box if empty This bug happens only in python3 because map returns an iterator. --- searx/webapp.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/searx/webapp.py b/searx/webapp.py index 7cf4106d3..212c874c9 100644 --- a/searx/webapp.py +++ b/searx/webapp.py @@ -606,11 +606,11 @@ def index(): # HTML output format # suggestions: use RawTextQuery to get the suggestion URLs with the same bang - suggestion_urls = map(lambda suggestion: { - 'url': raw_text_query.changeSearchQuery(suggestion).getFullQuery(), - 'title': suggestion - }, - result_container.suggestions) + suggestion_urls = list(map(lambda suggestion: { + 'url': raw_text_query.changeSearchQuery(suggestion).getFullQuery(), + 'title': suggestion + }, + result_container.suggestions)) correction_urls = list(map(lambda correction: { 'url': raw_text_query.changeSearchQuery(correction).getFullQuery(), From ee6781d777f3a95f6e1c23499ecbc7257d5e35ec Mon Sep 17 00:00:00 2001 From: Vipul <finn02@disroot.org> Date: Sat, 14 Sep 2019 12:37:39 +0000 Subject: [PATCH 12/14] [Fix] Libgen engine Libgen has switched to new domain (i.e https://libgen.is) with TLS support and older domain (i.e. http://libgen.io) is no longer accessible. See, https://en.wikipedia.org/wiki/Library_Genesis, for more information. Resolves: #1693 --- searx/settings.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/searx/settings.yml b/searx/settings.yml index cf2b13e08..539049ea0 100644 --- a/searx/settings.yml +++ b/searx/settings.yml @@ -407,7 +407,7 @@ engines: - name : library genesis engine : xpath - search_url : http://libgen.io/search.php?req={query} + search_url : https://libgen.is/search.php?req={query} url_xpath : //a[contains(@href,"bookfi.net")]/@href title_xpath : //a[contains(@href,"book/")]/text()[1] content_xpath : //td/a[1][contains(@href,"=author")]/text() From f407dd8ef4e3f6c82bef31f678139d6db2a4d810 Mon Sep 17 00:00:00 2001 From: Vipul <finn02@disroot.org> Date: Sat, 14 Sep 2019 12:45:02 +0000 Subject: [PATCH 13/14] Switch to https for some domains --- searx/settings.yml | 8 ++++---- searx/settings_robot.yml | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/searx/settings.yml b/searx/settings.yml index 539049ea0..d9a1f45f0 100644 --- a/searx/settings.yml +++ b/searx/settings.yml @@ -463,7 +463,7 @@ engines: - name : openairedatasets engine : json_engine paging : True - search_url : http://api.openaire.eu/search/datasets?format=json&page={pageno}&size=10&title={query} + search_url : https://api.openaire.eu/search/datasets?format=json&page={pageno}&size=10&title={query} results_query : response/results/result url_query : metadata/oaf:entity/oaf:result/children/instance/webresource/url/$ title_query : metadata/oaf:entity/oaf:result/title/$ @@ -475,7 +475,7 @@ engines: - name : openairepublications engine : json_engine paging : True - search_url : http://api.openaire.eu/search/publications?format=json&page={pageno}&size=10&title={query} + search_url : https://api.openaire.eu/search/publications?format=json&page={pageno}&size=10&title={query} results_query : response/results/result url_query : metadata/oaf:entity/oaf:result/children/instance/webresource/url/$ title_query : metadata/oaf:entity/oaf:result/title/$ @@ -806,7 +806,7 @@ locales: doi_resolvers : oadoi.org : 'https://oadoi.org/' doi.org : 'https://doi.org/' - doai.io : 'http://doai.io/' - sci-hub.tw : 'http://sci-hub.tw/' + doai.io : 'https://doai.io/' + sci-hub.tw : 'https://sci-hub.tw/' default_doi_resolver : 'oadoi.org' diff --git a/searx/settings_robot.yml b/searx/settings_robot.yml index 635809041..25f229e56 100644 --- a/searx/settings_robot.yml +++ b/searx/settings_robot.yml @@ -43,7 +43,7 @@ locales: doi_resolvers : oadoi.org : 'https://oadoi.org/' doi.org : 'https://doi.org/' - doai.io : 'http://doai.io/' - sci-hub.tw : 'http://sci-hub.tw/' + doai.io : 'https://doai.io/' + sci-hub.tw : 'https://sci-hub.tw/' default_doi_resolver : 'oadoi.org' From 8bea927bb02e02754834d6f9692942f621bd21c5 Mon Sep 17 00:00:00 2001 From: Vipul <finn02@disroot.org> Date: Sun, 22 Dec 2019 01:21:22 +0000 Subject: [PATCH 14/14] [Fix] oscar: no HTML escaping prior to output When results are fetched from any programming related documentation site (like git-scm.com, docs.python.org etc), content in Info box is shown as raw HTML code. This change addresses the issue by using "safe" filter feature provided by Django. See, - https://docs.djangoproject.com/en/3.0/ref/templates/builtins/#safe - Searx issue tracker (issue #1649), for more information. Resolves: #1649 --- searx/templates/oscar/infobox.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/searx/templates/oscar/infobox.html b/searx/templates/oscar/infobox.html index 9f5e58d2b..9802f11e2 100644 --- a/searx/templates/oscar/infobox.html +++ b/searx/templates/oscar/infobox.html @@ -6,7 +6,7 @@ <div class="panel-body"> {% if infobox.img_src %}<img class="img-responsive center-block infobox_part" src="{{ image_proxify(infobox.img_src) }}" alt="{{ infobox.infobox }}" />{% endif %} - {% if infobox.content %}<bdi><p class="infobox_part">{{ infobox.content }}</p></bdi>{% endif %} + {% if infobox.content %}<bdi><p class="infobox_part">{{ infobox.content | safe }}</p></bdi>{% endif %} {% if infobox.attributes -%} <table class="table table-striped infobox_part">