1
0
mirror of https://github.com/searxng/searxng.git synced 2024-11-04 20:30:11 +01:00
searxng/searx/webapp.py

684 lines
22 KiB
Python
Raw Normal View History

2013-10-14 23:09:13 +02:00
#!/usr/bin/env python
2013-10-15 00:33:18 +02:00
'''
searx is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
searx is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with searx. If not, see < http://www.gnu.org/licenses/ >.
(C) 2013- by Adam Tauber, <asciimoo@gmail.com>
'''
2014-02-14 16:16:20 +01:00
if __name__ == '__main__':
from sys import path
from os.path import realpath, dirname
path.append(realpath(dirname(realpath(__file__)) + '/../'))
2014-02-14 16:16:20 +01:00
2014-01-21 21:28:54 +01:00
import json
import cStringIO
import os
import hashlib
2014-01-21 21:28:54 +01:00
2014-03-14 09:55:04 +01:00
from datetime import datetime, timedelta
2015-01-16 16:26:15 +01:00
from urllib import urlencode
from werkzeug.contrib.fixers import ProxyFix
2014-02-05 20:24:31 +01:00
from flask import (
Flask, request, render_template, url_for, Response, make_response,
redirect, send_from_directory
)
2014-03-15 20:20:41 +01:00
from flask.ext.babel import Babel, gettext, format_date
2014-01-30 19:02:23 +01:00
from searx import settings, searx_dir
from searx.poolrequests import get as http_get
2014-02-05 20:24:31 +01:00
from searx.engines import (
2014-07-07 13:59:27 +02:00
categories, engines, get_engines_stats, engine_shortcuts
2014-02-05 20:24:31 +01:00
)
2014-04-25 01:46:40 +02:00
from searx.utils import (
2015-01-01 17:48:12 +01:00
UnicodeWriter, highlight_content, html_to_text, get_themes,
2015-01-29 19:44:52 +01:00
get_static_files, get_result_templates, gen_useragent, dict_subset,
prettify_url, get_blocked_engines
2014-04-25 01:46:40 +02:00
)
2014-11-18 11:37:42 +01:00
from searx.version import VERSION_STRING
2014-01-31 04:35:23 +01:00
from searx.languages import language_codes
from searx.https_rewrite import https_url_rewrite
from searx.search import Search
from searx.query import Query
from searx.autocomplete import searx_bang, backends as autocomplete_backends
2015-01-09 04:13:05 +01:00
from searx import logger
2015-01-15 18:55:25 +01:00
try:
from pygments import highlight
from pygments.lexers import get_lexer_by_name
from pygments.formatters import HtmlFormatter
except:
logger.critical("cannot import dependency: pygments")
from sys import exit
exit(1)
2013-12-01 23:52:49 +01:00
2013-10-14 23:09:13 +02:00
2015-01-09 04:13:05 +01:00
logger = logger.getChild('webapp')
2014-06-24 16:30:04 +02:00
static_path, templates_path, themes =\
get_themes(settings['themes_path']
if settings.get('themes_path')
else searx_dir)
2014-10-09 19:26:02 +02:00
default_theme = settings['server'].get('default_theme', 'default')
2014-04-25 01:46:40 +02:00
2015-01-01 17:48:12 +01:00
static_files = get_static_files(searx_dir)
2015-01-01 18:59:53 +01:00
result_templates = get_result_templates(searx_dir)
app = Flask(
__name__,
2014-04-25 01:46:40 +02:00
static_folder=static_path,
template_folder=templates_path
)
app.secret_key = settings['server']['secret_key']
2014-01-14 18:17:19 +01:00
babel = Babel(app)
rtl_locales = ['ar', 'arc', 'bcc', 'bqi', 'ckb', 'dv', 'fa', 'glk', 'he',
'ku', 'mzn', 'pnb'', ''ps', 'sd', 'ug', 'ur', 'yi']
2014-12-13 21:37:28 +01:00
global_favicons = []
for indice, theme in enumerate(themes):
2014-12-13 21:37:28 +01:00
global_favicons.append([])
theme_img_path = searx_dir + "/static/themes/" + theme + "/img/icons/"
2014-12-13 21:37:28 +01:00
for (dirpath, dirnames, filenames) in os.walk(theme_img_path):
global_favicons[indice].extend(filenames)
2014-01-19 23:04:09 +01:00
cookie_max_age = 60 * 60 * 24 * 365 * 5 # 5 years
2015-02-10 23:14:37 +01:00
_category_names = (gettext('files'),
gettext('general'),
gettext('music'),
gettext('social media'),
gettext('images'),
gettext('videos'),
gettext('it'),
gettext('news'),
gettext('map'))
2014-01-19 23:04:09 +01:00
@babel.localeselector
def get_locale():
2014-01-22 00:59:18 +01:00
locale = request.accept_languages.best_match(settings['locales'].keys())
if settings['server'].get('default_locale'):
locale = settings['server']['default_locale']
2014-01-22 00:59:18 +01:00
if request.cookies.get('locale', '') in settings['locales']:
locale = request.cookies.get('locale', '')
if 'locale' in request.args\
and request.args['locale'] in settings['locales']:
locale = request.args['locale']
if 'locale' in request.form\
and request.form['locale'] in settings['locales']:
locale = request.form['locale']
return locale
2014-12-20 23:33:03 +01:00
# code-highlighter
@app.template_filter('code_highlighter')
def code_highlighter(codelines, language=None):
if not language:
language = 'text'
try:
# find lexer by programing language
lexer = get_lexer_by_name(language, stripall=True)
except:
# if lexer is not found, using default one
2015-01-15 18:39:40 +01:00
logger.debug('highlighter cannot find lexer for {0}'.format(language))
lexer = get_lexer_by_name('text', stripall=True)
2014-12-20 23:33:03 +01:00
html_code = ''
tmp_code = ''
last_line = None
# parse lines
for line, code in codelines:
if not last_line:
line_code_start = line
# new codeblock is detected
if last_line is not None and\
last_line + 1 != line:
2014-12-20 23:33:03 +01:00
# highlight last codepart
formatter = HtmlFormatter(linenos='inline',
linenostart=line_code_start)
2014-12-20 23:33:03 +01:00
html_code = html_code + highlight(tmp_code, lexer, formatter)
2014-12-20 23:33:03 +01:00
# reset conditions for next codepart
tmp_code = ''
line_code_start = line
# add codepart
tmp_code += code + '\n'
2014-12-20 23:33:03 +01:00
# update line
last_line = line
# highlight last codepart
formatter = HtmlFormatter(linenos='inline', linenostart=line_code_start)
html_code = html_code + highlight(tmp_code, lexer, formatter)
return html_code
2014-01-14 18:17:19 +01:00
def get_base_url():
if settings['server']['base_url']:
hostname = settings['server']['base_url']
2014-01-14 18:17:19 +01:00
else:
scheme = 'http'
if request.is_secure:
scheme = 'https'
hostname = url_for('index', _external=True, _scheme=scheme)
return hostname
2014-04-25 01:46:40 +02:00
def get_current_theme_name(override=None):
"""Returns theme name.
Checks in this order:
1. override
2. cookies
3. settings"""
if override and override in themes:
return override
2014-09-03 00:57:09 +02:00
theme_name = request.args.get('theme',
request.cookies.get('theme',
default_theme))
2014-04-25 01:46:40 +02:00
if theme_name not in themes:
theme_name = default_theme
return theme_name
2015-01-01 18:59:53 +01:00
def get_result_template(theme, template_name):
themed_path = theme + '/result_templates/' + template_name
if themed_path in result_templates:
return themed_path
return 'result_templates/' + template_name
2014-04-25 01:46:40 +02:00
def url_for_theme(endpoint, override_theme=None, **values):
2015-01-01 17:48:12 +01:00
if endpoint == 'static' and values.get('filename'):
2014-04-25 01:46:40 +02:00
theme_name = get_current_theme_name(override=override_theme)
2015-01-01 17:48:12 +01:00
filename_with_theme = "themes/{}/{}".format(theme_name, values['filename'])
if filename_with_theme in static_files:
values['filename'] = filename_with_theme
2014-04-25 01:46:40 +02:00
return url_for(endpoint, **values)
2015-01-16 16:26:15 +01:00
def image_proxify(url):
if url.startswith('//'):
url = 'https:' + url
if not settings['server'].get('image_proxy') and not request.cookies.get('image_proxy'):
return url
2015-02-09 12:24:54 +01:00
hash_string = url + settings['server']['secret_key']
h = hashlib.sha256(hash_string.encode('utf-8')).hexdigest()
2015-01-16 16:26:15 +01:00
return '{0}?{1}'.format(url_for('image_proxy'),
2015-02-09 12:24:54 +01:00
urlencode(dict(url=url.encode('utf-8'), h=h)))
2015-01-16 16:26:15 +01:00
2014-04-25 01:46:40 +02:00
def render(template_name, override_theme=None, **kwargs):
blocked_engines = get_blocked_engines(engines, request.cookies)
2014-03-29 16:29:19 +01:00
autocomplete = request.cookies.get('autocomplete')
if autocomplete not in autocomplete_backends:
autocomplete = None
nonblocked_categories = set(category for engine_name in engines
for category in engines[engine_name].categories
if (engine_name, category) not in blocked_engines)
2014-03-29 16:29:19 +01:00
2014-10-19 12:41:04 +02:00
if 'categories' not in kwargs:
2014-03-08 14:00:20 +01:00
kwargs['categories'] = ['general']
kwargs['categories'].extend(x for x in
sorted(categories.keys())
2014-03-08 14:03:42 +01:00
if x != 'general'
2014-03-08 14:00:20 +01:00
and x in nonblocked_categories)
2014-03-29 16:29:19 +01:00
2014-10-19 12:41:04 +02:00
if 'selected_categories' not in kwargs:
kwargs['selected_categories'] = []
2014-09-06 15:21:29 +02:00
for arg in request.args:
if arg.startswith('category_'):
c = arg.split('_', 1)[1]
if c in categories:
kwargs['selected_categories'].append(c)
if not kwargs['selected_categories']:
cookie_categories = request.cookies.get('categories', '').split(',')
for ccateg in cookie_categories:
if ccateg in categories:
kwargs['selected_categories'].append(ccateg)
2014-09-06 15:21:29 +02:00
if not kwargs['selected_categories']:
kwargs['selected_categories'] = ['general']
2014-03-29 16:29:19 +01:00
2014-10-19 12:41:04 +02:00
if 'autocomplete' not in kwargs:
2014-03-29 16:29:19 +01:00
kwargs['autocomplete'] = autocomplete
if get_locale() in rtl_locales and 'rtl' not in kwargs:
kwargs['rtl'] = True
2014-11-18 11:37:42 +01:00
kwargs['searx_version'] = VERSION_STRING
kwargs['method'] = request.cookies.get('method', 'POST')
kwargs['safesearch'] = request.cookies.get('safesearch', '1')
2014-04-25 01:46:40 +02:00
# override url_for function in templates
kwargs['url_for'] = url_for_theme
2015-01-16 16:26:15 +01:00
kwargs['image_proxify'] = image_proxify
2015-01-01 18:59:53 +01:00
kwargs['get_result_template'] = get_result_template
2014-04-25 01:46:40 +02:00
kwargs['theme'] = get_current_theme_name(override=override_theme)
2014-11-18 19:55:39 +01:00
kwargs['template_name'] = template_name
2014-04-25 01:46:40 +02:00
2015-01-20 16:29:54 +01:00
kwargs['cookies'] = request.cookies
2014-04-25 01:46:40 +02:00
return render_template(
'{}/{}'.format(kwargs['theme'], template_name), **kwargs)
2013-10-15 20:50:12 +02:00
@app.route('/search', methods=['GET', 'POST'])
2014-01-14 18:19:21 +01:00
@app.route('/', methods=['GET', 'POST'])
2013-10-14 23:09:13 +02:00
def index():
2014-01-31 07:08:24 +01:00
"""Render index page.
Supported outputs: html, json, csv, rss.
"""
2013-11-04 00:18:07 +01:00
2014-02-07 03:15:34 +01:00
if not request.args and not request.form:
2014-03-21 11:11:31 +01:00
return render(
'index.html',
)
2014-02-07 03:15:34 +01:00
try:
search = Search(request)
except:
2014-03-21 11:11:31 +01:00
return render(
'index.html',
)
2014-01-29 20:52:04 +01:00
2014-10-19 12:41:04 +02:00
search.results, search.suggestions,\
search.answers, search.infoboxes = search.search(request)
for result in search.results:
2014-06-24 16:30:04 +02:00
if not search.paging and engines[result['engine']].paging:
search.paging = True
2014-06-24 16:30:04 +02:00
2014-10-19 12:41:04 +02:00
# check if HTTPS rewrite is required
2014-06-24 16:30:04 +02:00
if settings['server']['https_rewrite']\
and result['parsed_url'].scheme == 'http':
result = https_url_rewrite(result)
2014-06-24 16:30:04 +02:00
if search.request_data.get('format', 'html') == 'html':
2014-01-10 23:38:08 +01:00
if 'content' in result:
2014-02-07 02:45:12 +01:00
result['content'] = highlight_content(result['content'],
search.query.encode('utf-8')) # noqa
result['title'] = highlight_content(result['title'],
search.query.encode('utf-8'))
2014-01-10 23:38:08 +01:00
else:
if 'content' in result:
result['content'] = html_to_text(result['content']).strip()
# removing html content and whitespace duplications
2014-02-05 08:20:24 +01:00
result['title'] = ' '.join(html_to_text(result['title'])
.strip().split())
2014-06-24 16:30:04 +02:00
2015-01-29 19:44:52 +01:00
result['pretty_url'] = prettify_url(result['url'])
2013-11-15 19:28:30 +01:00
2014-03-14 09:55:04 +01:00
# TODO, check if timezone is calculated right
if 'publishedDate' in result:
result['pubdate'] = result['publishedDate'].strftime('%Y-%m-%d %H:%M:%S%z')
if result['publishedDate'].replace(tzinfo=None) >= datetime.now() - timedelta(days=1):
timedifference = datetime.now() - result['publishedDate'].replace(tzinfo=None)
2014-03-15 20:20:41 +01:00
minutes = int((timedifference.seconds / 60) % 60)
hours = int(timedifference.seconds / 60 / 60)
2014-03-15 19:13:57 +01:00
if hours == 0:
2014-03-15 20:20:41 +01:00
result['publishedDate'] = gettext(u'{minutes} minute(s) ago').format(minutes=minutes) # noqa
2014-03-14 09:55:04 +01:00
else:
2014-03-15 20:20:41 +01:00
result['publishedDate'] = gettext(u'{hours} hour(s), {minutes} minute(s) ago').format(hours=hours, minutes=minutes) # noqa
2014-03-14 09:55:04 +01:00
else:
result['publishedDate'] = format_date(result['publishedDate'])
2014-03-14 09:55:04 +01:00
if search.request_data.get('format') == 'json':
2014-02-07 02:45:12 +01:00
return Response(json.dumps({'query': search.query,
'results': search.results}),
2014-01-20 02:31:20 +01:00
mimetype='application/json')
elif search.request_data.get('format') == 'csv':
2013-11-15 18:55:18 +01:00
csv = UnicodeWriter(cStringIO.StringIO())
2013-12-02 21:36:09 +01:00
keys = ('title', 'url', 'content', 'host', 'engine', 'score')
2014-02-11 13:13:51 +01:00
if search.results:
2013-11-15 18:55:18 +01:00
csv.writerow(keys)
for row in search.results:
2013-12-02 21:36:09 +01:00
row['host'] = row['parsed_url'].netloc
csv.writerow([row.get(key, '') for key in keys])
2014-02-11 13:13:51 +01:00
csv.stream.seek(0)
2013-11-15 19:28:30 +01:00
response = Response(csv.stream.read(), mimetype='application/csv')
2014-02-07 02:45:12 +01:00
cont_disp = 'attachment;Filename=searx_-_{0}.csv'.format(search.query)
response.headers.add('Content-Disposition', cont_disp)
2013-11-15 18:55:18 +01:00
return response
elif search.request_data.get('format') == 'rss':
response_rss = render(
'opensearch_response_rss.xml',
results=search.results,
q=search.request_data['q'],
number_of_results=len(search.results),
base_url=get_base_url()
)
2014-01-14 22:18:21 +01:00
return Response(response_rss, mimetype='text/xml')
2014-01-14 18:17:19 +01:00
return render(
'results.html',
results=search.results,
q=search.request_data['q'],
selected_categories=search.categories,
paging=search.paging,
pageno=search.pageno,
2014-03-04 18:53:56 +01:00
base_url=get_base_url(),
2014-04-25 01:46:40 +02:00
suggestions=search.suggestions,
2014-09-28 16:51:41 +02:00
answers=search.answers,
infoboxes=search.infoboxes,
theme=get_current_theme_name(),
2014-12-13 21:37:28 +01:00
favicons=global_favicons[themes.index(get_current_theme_name())]
)
2014-01-01 22:16:53 +01:00
2013-10-14 23:09:13 +02:00
2013-10-21 00:28:48 +02:00
@app.route('/about', methods=['GET'])
def about():
2014-01-31 07:08:24 +01:00
"""Render about page"""
2014-03-21 12:19:48 +01:00
return render(
'about.html',
)
2014-01-17 16:23:23 +01:00
@app.route('/autocompleter', methods=['GET', 'POST'])
def autocompleter():
"""Return autocompleter results"""
request_data = {}
2014-03-29 16:29:19 +01:00
# select request method
if request.method == 'POST':
request_data = request.form
else:
request_data = request.args
2014-03-29 16:29:19 +01:00
# set blocked engines
blocked_engines = get_blocked_engines(engines, request.cookies)
# parse query
query = Query(request_data.get('q', '').encode('utf-8'), blocked_engines)
query.parse_query()
2014-03-29 16:29:19 +01:00
# check if search query is set
if not query.getSearchQuery():
2015-01-10 15:27:42 +01:00
return '', 400
2014-03-29 16:29:19 +01:00
# run autocompleter
2014-03-29 16:29:19 +01:00
completer = autocomplete_backends.get(request.cookies.get('autocomplete'))
# parse searx specific autocompleter results like !bang
raw_results = searx_bang(query)
# normal autocompletion results only appear if max 3 inner results returned
if len(raw_results) <= 3 and completer:
2015-01-10 19:55:21 +01:00
# run autocompletion
raw_results.extend(completer(query.getSearchQuery()))
# parse results (write :language and !engine back to result string)
results = []
for result in raw_results:
query.changeSearchQuery(result)
# add parsed result
results.append(query.getFullQuery())
2014-03-29 16:29:19 +01:00
# return autocompleter results
if request_data.get('format') == 'x-suggestions':
2014-12-09 00:57:04 +01:00
return Response(json.dumps([query.query, results]),
2014-03-29 16:29:19 +01:00
mimetype='application/json')
return Response(json.dumps(results),
mimetype='application/json')
2014-01-01 22:16:53 +01:00
@app.route('/preferences', methods=['GET', 'POST'])
def preferences():
2014-01-31 07:08:24 +01:00
"""Render preferences page.
Settings that are going to be saved as cookies."""
2014-01-31 04:35:23 +01:00
lang = None
image_proxy = request.cookies.get('image_proxy', settings['server'].get('image_proxy'))
2014-01-31 04:35:23 +01:00
if request.cookies.get('language')\
and request.cookies['language'] in (x[0] for x in language_codes):
lang = request.cookies['language']
2014-01-01 22:16:53 +01:00
2014-02-07 00:35:15 +01:00
blocked_engines = []
2015-01-19 22:26:48 +01:00
2015-01-19 20:16:06 +01:00
resp = make_response(redirect(url_for('index')))
2014-02-07 00:35:15 +01:00
if request.method == 'GET':
blocked_engines = get_blocked_engines(engines, request.cookies)
2014-04-25 01:46:40 +02:00
else: # on save
2014-01-01 22:16:53 +01:00
selected_categories = []
2014-01-22 00:59:18 +01:00
locale = None
2014-03-29 16:29:19 +01:00
autocomplete = ''
method = 'POST'
safesearch = '1'
for pd_name, pd in request.form.items():
2014-01-01 22:16:53 +01:00
if pd_name.startswith('category_'):
category = pd_name[9:]
2014-10-19 12:41:04 +02:00
if category not in categories:
2014-01-01 22:16:53 +01:00
continue
selected_categories.append(category)
2014-01-22 00:59:18 +01:00
elif pd_name == 'locale' and pd in settings['locales']:
locale = pd
elif pd_name == 'image_proxy':
image_proxy = pd
2014-03-29 16:29:19 +01:00
elif pd_name == 'autocomplete':
autocomplete = pd
2014-01-31 04:35:23 +01:00
elif pd_name == 'language' and (pd == 'all' or
pd in (x[0] for
x in language_codes)):
lang = pd
elif pd_name == 'method':
method = pd
elif pd_name == 'safesearch':
safesearch = pd
2014-02-07 00:35:15 +01:00
elif pd_name.startswith('engine_'):
if pd_name.find('__') > -1:
engine_name, category = pd_name.replace('engine_', '', 1).split('__', 1)
if engine_name in engines and category in engines[engine_name].categories:
blocked_engines.append((engine_name, category))
2014-04-25 01:46:40 +02:00
elif pd_name == 'theme':
theme = pd if pd in themes else default_theme
2015-01-19 20:16:06 +01:00
else:
resp.set_cookie(pd_name, pd, max_age=cookie_max_age)
2014-01-22 00:59:18 +01:00
resp.set_cookie(
'blocked_engines', ','.join('__'.join(e) for e in blocked_engines),
max_age=cookie_max_age
)
2014-02-07 00:35:15 +01:00
2014-01-22 00:59:18 +01:00
if locale:
resp.set_cookie(
'locale', locale,
max_age=cookie_max_age
2014-01-22 00:59:18 +01:00
)
2014-01-31 04:35:23 +01:00
if lang:
resp.set_cookie(
'language', lang,
max_age=cookie_max_age
2014-01-31 04:35:23 +01:00
)
2014-01-01 22:16:53 +01:00
if selected_categories:
# cookie max age: 4 weeks
resp.set_cookie(
'categories', ','.join(selected_categories),
2014-03-29 16:29:19 +01:00
max_age=cookie_max_age
)
2014-03-29 16:29:19 +01:00
resp.set_cookie(
'autocomplete', autocomplete,
max_age=cookie_max_age
)
resp.set_cookie('method', method, max_age=cookie_max_age)
2015-02-08 22:01:24 +01:00
resp.set_cookie('safesearch', safesearch, max_age=cookie_max_age)
resp.set_cookie('image_proxy', image_proxy, max_age=cookie_max_age)
resp.set_cookie('theme', theme, max_age=cookie_max_age)
2014-04-25 01:46:40 +02:00
2014-01-22 00:59:18 +01:00
return resp
2014-01-22 01:20:38 +01:00
return render('preferences.html',
locales=settings['locales'],
2014-01-31 04:35:23 +01:00
current_locale=get_locale(),
current_language=lang or 'all',
2015-01-16 17:37:34 +01:00
image_proxy=image_proxy,
2014-02-07 00:35:15 +01:00
language_codes=language_codes,
categs=categories.items(),
blocked_engines=blocked_engines,
2014-03-29 16:29:19 +01:00
autocomplete_backends=autocomplete_backends,
2014-04-25 01:46:40 +02:00
shortcuts={y: x for x, y in engine_shortcuts.items()},
themes=themes,
theme=get_current_theme_name())
2014-01-01 22:16:53 +01:00
2015-01-16 16:02:21 +01:00
@app.route('/image_proxy', methods=['GET'])
def image_proxy():
url = request.args.get('url').encode('utf-8')
2015-01-16 16:02:21 +01:00
if not url:
return '', 400
h = hashlib.sha256(url + settings['server']['secret_key'].encode('utf-8')).hexdigest()
if h != request.args.get('h'):
return '', 400
headers = dict_subset(request.headers, {'If-Modified-Since', 'If-None-Match'})
headers['User-Agent'] = gen_useragent()
2015-01-16 16:02:21 +01:00
resp = http_get(url,
stream=True,
timeout=settings['server'].get('request_timeout', 2),
headers=headers)
if resp.status_code == 304:
return '', resp.status_code
2015-01-16 16:02:21 +01:00
if resp.status_code != 200:
logger.debug('image-proxy: wrong response code: {0}'.format(resp.status_code))
if resp.status_code >= 400:
return '', resp.status_code
return '', 400
if not resp.headers.get('content-type', '').startswith('image/'):
logger.debug('image-proxy: wrong content-type: {0}'.format(resp.get('content-type')))
return '', 400
img = ''
chunk_counter = 0
for chunk in resp.iter_content(1024 * 1024):
2015-01-16 16:02:21 +01:00
chunk_counter += 1
if chunk_counter > 5:
return '', 502 # Bad gateway - file is too big (>5M)
img += chunk
headers = dict_subset(resp.headers, {'Content-Length', 'Length', 'Date', 'Last-Modified', 'Expires', 'Etag'})
return Response(img, mimetype=resp.headers['content-type'], headers=headers)
2015-01-16 16:02:21 +01:00
2013-10-27 01:03:05 +02:00
@app.route('/stats', methods=['GET'])
def stats():
2014-01-31 07:08:24 +01:00
"""Render engine statistics page."""
2013-10-27 01:03:05 +02:00
stats = get_engines_stats()
2014-03-21 12:19:48 +01:00
return render(
'stats.html',
stats=stats,
)
2013-10-27 01:03:05 +02:00
2014-01-01 22:16:53 +01:00
2013-12-01 16:10:38 +01:00
@app.route('/robots.txt', methods=['GET'])
def robots():
return Response("""User-agent: *
Allow: /
Allow: /about
Disallow: /stats
2014-02-07 18:43:05 +01:00
Disallow: /preferences
2013-12-01 16:10:38 +01:00
""", mimetype='text/plain')
2014-01-01 22:16:53 +01:00
2013-10-16 00:01:08 +02:00
@app.route('/opensearch.xml', methods=['GET'])
def opensearch():
2013-10-20 22:37:55 +02:00
method = 'post'
2013-10-21 00:28:48 +02:00
# chrome/chromium only supports HTTP GET....
2013-10-20 22:37:55 +02:00
if request.headers.get('User-Agent', '').lower().find('webkit') >= 0:
method = 'get'
2014-03-29 16:29:19 +01:00
ret = render('opensearch.xml',
opensearch_method=method,
2014-03-29 16:40:43 +01:00
host=get_base_url())
2014-03-29 16:29:19 +01:00
2013-10-16 00:01:08 +02:00
resp = Response(response=ret,
2014-01-20 02:31:20 +01:00
status=200,
2015-01-18 14:26:52 +01:00
mimetype="text/xml")
2013-10-16 00:01:08 +02:00
return resp
2013-12-01 23:52:49 +01:00
@app.route('/favicon.ico')
def favicon():
2014-04-25 01:46:40 +02:00
return send_from_directory(os.path.join(app.root_path,
2015-01-01 19:24:47 +01:00
'static/themes',
2014-04-25 01:46:40 +02:00
get_current_theme_name(),
'img'),
2014-01-20 02:31:20 +01:00
'favicon.png',
mimetype='image/vnd.microsoft.icon')
2013-12-01 23:52:49 +01:00
def run():
app.run(
debug=settings['server']['debug'],
use_debugger=settings['server']['debug'],
port=settings['server']['port']
)
2014-07-03 22:02:53 +02:00
application = app
app.wsgi_app = ProxyFix(application.wsgi_app)
2014-07-03 22:02:53 +02:00
if __name__ == "__main__":
run()