1
0
mirror of https://github.com/searxng/searxng.git synced 2024-11-22 12:10:11 +01:00

[enh] suspend engines after error

The duration is based on the number of continuous errors, but maximized in one minute
This commit is contained in:
Adam Tauber 2016-02-20 00:21:56 +01:00
parent 17b0c9f74a
commit 10947536aa
2 changed files with 17 additions and 4 deletions

View File

@ -40,7 +40,9 @@ engine_default_args = {'paging': False,
'safesearch': False,
'timeout': settings['outgoing']['request_timeout'],
'shortcut': '-',
'disabled': False}
'disabled': False,
'suspend_end_time': 0,
'continuous_errors': 0}
def load_module(filename):

View File

@ -34,16 +34,23 @@ number_of_searches = 0
def search_request_wrapper(fn, url, engine_name, **kwargs):
ret = None
engine = engines[engine_name]
try:
return fn(url, **kwargs)
ret = fn(url, **kwargs)
with threading.RLock():
engine.continuous_errors = 0
engine.suspend_end_time = 0
except:
# increase errors stats
with threading.RLock():
engines[engine_name].stats['errors'] += 1
engine.stats['errors'] += 1
engine.continuous_errors += 1
engine.suspend_end_time = time() + min(60, engine.continuous_errors)
# print engine name and specific error message
logger.exception('engine crash: {0}'.format(engine_name))
return
return ret
def threaded_requests(requests):
@ -241,6 +248,10 @@ class Search(object):
for engine in categories[categ]
if (engine.name, categ) not in self.blocked_engines)
# remove suspended engines
self.engines = [e for e in self.engines
if engines[e['name']].suspend_end_time <= time()]
# do search-request
def search(self, request):
global number_of_searches