1
0
mirror of https://github.com/searxng/searxng.git synced 2024-11-17 18:00:12 +01:00

[mod] change settings file structure according to #314

This commit is contained in:
Adam Tauber 2015-08-02 19:38:27 +02:00
parent d0830d4edf
commit 1fcf066a81
7 changed files with 46 additions and 34 deletions

View File

@ -29,7 +29,7 @@ from searx.poolrequests import get as http_get
def get(*args, **kwargs): def get(*args, **kwargs):
if 'timeout' not in kwargs: if 'timeout' not in kwargs:
kwargs['timeout'] = settings['server']['request_timeout'] kwargs['timeout'] = settings['outgoing']['request_timeout']
return http_get(*args, **kwargs) return http_get(*args, **kwargs)

View File

@ -75,7 +75,7 @@ def load_engine(engine_data):
engine.safesearch = False engine.safesearch = False
if not hasattr(engine, 'timeout'): if not hasattr(engine, 'timeout'):
engine.timeout = settings['server']['request_timeout'] engine.timeout = settings['outgoing']['request_timeout']
if not hasattr(engine, 'shortcut'): if not hasattr(engine, 'shortcut'):
engine.shortcut = '' engine.shortcut = ''

View File

@ -39,11 +39,11 @@ class HTTPAdapterWithConnParams(requests.adapters.HTTPAdapter):
block=self._pool_block, **self._conn_params) block=self._pool_block, **self._conn_params)
if settings.get('source_ips'): if settings['outgoing'].get('source_ips'):
http_adapters = cycle(HTTPAdapterWithConnParams(pool_connections=100, source_address=(source_ip, 0)) http_adapters = cycle(HTTPAdapterWithConnParams(pool_connections=100, source_address=(source_ip, 0))
for source_ip in settings['source_ips']) for source_ip in settings['outgoing']['source_ips'])
https_adapters = cycle(HTTPAdapterWithConnParams(pool_connections=100, source_address=(source_ip, 0)) https_adapters = cycle(HTTPAdapterWithConnParams(pool_connections=100, source_address=(source_ip, 0))
for source_ip in settings['source_ips']) for source_ip in settings['outgoing']['source_ips'])
else: else:
http_adapters = cycle((HTTPAdapterWithConnParams(pool_connections=100), )) http_adapters = cycle((HTTPAdapterWithConnParams(pool_connections=100), ))
https_adapters = cycle((HTTPAdapterWithConnParams(pool_connections=100), )) https_adapters = cycle((HTTPAdapterWithConnParams(pool_connections=100), ))
@ -69,7 +69,7 @@ def request(method, url, **kwargs):
"""same as requests/requests/api.py request(...) except it use SessionSinglePool and force proxies""" """same as requests/requests/api.py request(...) except it use SessionSinglePool and force proxies"""
global settings global settings
session = SessionSinglePool() session = SessionSinglePool()
kwargs['proxies'] = settings.get('outgoing_proxies', None) kwargs['proxies'] = settings['outgoing'].get('proxies', None)
response = session.request(method=method, url=url, **kwargs) response = session.request(method=method, url=url, **kwargs)
session.close() session.close()
return response return response

View File

@ -1,23 +1,27 @@
general:
debug : False # Debug mode, only for development
server: server:
port : 8888 port : 8888
bind_address : "127.0.0.1" # address to listen on bind_address : "127.0.0.1" # address to listen on
secret_key : "ultrasecretkey" # change this! secret_key : "ultrasecretkey" # change this!
debug : False # Debug mode, only for development
request_timeout : 2.0 # seconds
base_url : False # Set custom base_url. Possible values: False or "https://your.custom.host/location/" base_url : False # Set custom base_url. Possible values: False or "https://your.custom.host/location/"
image_proxy : False # Proxying image results through searx
ui:
themes_path : "" # Custom ui themes path - leave it blank if you didn't change themes_path : "" # Custom ui themes path - leave it blank if you didn't change
default_theme : oscar # ui theme default_theme : oscar # ui theme
useragent_suffix : "" # suffix of searx_useragent, could contain informations like an email address to the administrator
image_proxy : False # Proxying image results through searx
default_locale : "" # Default interface locale - leave blank to detect from browser information or use codes from the 'locales' config section default_locale : "" # Default interface locale - leave blank to detect from browser information or use codes from the 'locales' config section
outgoing: # communication with search engines
request_timeout : 2.0 # seconds
useragent_suffix : "" # suffix of searx_useragent, could contain informations like an email address to the administrator
# uncomment below section if you want to use a proxy # uncomment below section if you want to use a proxy
# see http://docs.python-requests.org/en/latest/user/advanced/#proxies # see http://docs.python-requests.org/en/latest/user/advanced/#proxies
# SOCKS proxies are not supported : see https://github.com/kennethreitz/requests/pull/478 # SOCKS proxies are not supported : see https://github.com/kennethreitz/requests/pull/478
#outgoing_proxies : # proxies :
# http : http://127.0.0.1:8080 # http : http://127.0.0.1:8080
# https: http://127.0.0.1:8080 # https: http://127.0.0.1:8080
# uncomment below section only if you have more than one network interface # uncomment below section only if you have more than one network interface
# which can be the source of outgoing search requests # which can be the source of outgoing search requests
# source_ips: # source_ips:

View File

@ -1,14 +1,21 @@
general:
debug : False
server: server:
port : 11111 port : 11111
bind_address : 127.0.0.1 bind_address : 127.0.0.1
secret_key : "ultrasecretkey" # change this! secret_key : "ultrasecretkey" # change this!
debug : False
request_timeout : 3.0 # seconds
base_url : False base_url : False
image_proxy : False
ui:
themes_path : "" themes_path : ""
default_theme : default default_theme : default
https_rewrite : True default_locale : ""
image_proxy : False
outgoing:
request_timeout : 1.0 # seconds
useragent_suffix : ""
engines: engines:
- name : general_dummy - name : general_dummy

View File

@ -26,6 +26,7 @@ ua_versions = ('33.0',
ua_os = ('Windows NT 6.3; WOW64', ua_os = ('Windows NT 6.3; WOW64',
'X11; Linux x86_64', 'X11; Linux x86_64',
'X11; Linux x86') 'X11; Linux x86')
ua = "Mozilla/5.0 ({os}; rv:{version}) Gecko/20100101 Firefox/{version}" ua = "Mozilla/5.0 ({os}; rv:{version}) Gecko/20100101 Firefox/{version}"
blocked_tags = ('script', blocked_tags = ('script',
@ -40,7 +41,7 @@ def gen_useragent():
def searx_useragent(): def searx_useragent():
return 'searx/{searx_version} {suffix}'.format( return 'searx/{searx_version} {suffix}'.format(
searx_version=VERSION_STRING, searx_version=VERSION_STRING,
suffix=settings['server'].get('useragent_suffix', '')) suffix=settings['outgoing'].get('useragent_suffix', ''))
def highlight_content(content, query): def highlight_content(content, query):

View File

@ -77,11 +77,11 @@ except ImportError:
static_path, templates_path, themes =\ static_path, templates_path, themes =\
get_themes(settings['themes_path'] get_themes(settings['ui']['themes_path']
if settings.get('themes_path') if settings['ui']['themes_path']
else searx_dir) else searx_dir)
default_theme = settings['server'].get('default_theme', 'default') default_theme = settings['ui']['default_theme']
static_files = get_static_files(searx_dir) static_files = get_static_files(searx_dir)
@ -121,15 +121,15 @@ _category_names = (gettext('files'),
gettext('news'), gettext('news'),
gettext('map')) gettext('map'))
outgoing_proxies = settings.get('outgoing_proxies', None) outgoing_proxies = settings['outgoing'].get('proxies', None)
@babel.localeselector @babel.localeselector
def get_locale(): def get_locale():
locale = request.accept_languages.best_match(settings['locales'].keys()) locale = request.accept_languages.best_match(settings['locales'].keys())
if settings['server'].get('default_locale'): if settings['ui'].get('default_locale'):
locale = settings['server']['default_locale'] locale = settings['ui']['default_locale']
if request.cookies.get('locale', '') in settings['locales']: if request.cookies.get('locale', '') in settings['locales']:
locale = request.cookies.get('locale', '') locale = request.cookies.get('locale', '')
@ -640,12 +640,12 @@ def preferences():
stats[e.name] = {'time': None, stats[e.name] = {'time': None,
'warn_timeout': False, 'warn_timeout': False,
'warn_time': False} 'warn_time': False}
if e.timeout > settings['server']['request_timeout']: if e.timeout > settings['outgoing']['request_timeout']:
stats[e.name]['warn_timeout'] = True stats[e.name]['warn_timeout'] = True
for engine_stat in get_engines_stats()[0][1]: for engine_stat in get_engines_stats()[0][1]:
stats[engine_stat.get('name')]['time'] = round(engine_stat.get('avg'), 3) stats[engine_stat.get('name')]['time'] = round(engine_stat.get('avg'), 3)
if engine_stat.get('avg') > settings['server']['request_timeout']: if engine_stat.get('avg') > settings['outgoing']['request_timeout']:
stats[engine_stat.get('name')]['warn_time'] = True stats[engine_stat.get('name')]['warn_time'] = True
# end of stats # end of stats
@ -683,7 +683,7 @@ def image_proxy():
resp = requests.get(url, resp = requests.get(url,
stream=True, stream=True,
timeout=settings['server'].get('request_timeout', 2), timeout=settings['outgoing']['request_timeout'],
headers=headers, headers=headers,
proxies=outgoing_proxies) proxies=outgoing_proxies)
@ -775,8 +775,8 @@ def clear_cookies():
def run(): def run():
app.run( app.run(
debug=settings['server']['debug'], debug=settings['general']['debug'],
use_debugger=settings['server']['debug'], use_debugger=settings['general']['debug'],
port=settings['server']['port'], port=settings['server']['port'],
host=settings['server']['bind_address'] host=settings['server']['bind_address']
) )