2020-10-19 08:55:57 +02:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
# This script saves Ahmia's blacklist for onion sites.
|
|
|
|
# More info in https://ahmia.fi/blacklist/
|
|
|
|
|
|
|
|
# set path
|
2021-02-25 17:42:52 +01:00
|
|
|
from os.path import join
|
2020-10-19 08:55:57 +02:00
|
|
|
|
|
|
|
import requests
|
|
|
|
from searx import searx_dir
|
|
|
|
|
|
|
|
URL = 'https://ahmia.fi/blacklist/banned/'
|
|
|
|
|
|
|
|
|
|
|
|
def fetch_ahmia_blacklist():
|
|
|
|
resp = requests.get(URL, timeout=3.0)
|
|
|
|
if resp.status_code != 200:
|
|
|
|
raise Exception("Error fetching Ahmia blacklist, HTTP code " + resp.status_code)
|
|
|
|
else:
|
|
|
|
blacklist = resp.text.split()
|
|
|
|
return blacklist
|
|
|
|
|
|
|
|
|
|
|
|
def get_ahmia_blacklist_filename():
|
|
|
|
return join(join(searx_dir, "data"), "ahmia_blacklist.txt")
|
|
|
|
|
|
|
|
|
|
|
|
blacklist = fetch_ahmia_blacklist()
|
|
|
|
with open(get_ahmia_blacklist_filename(), "w") as f:
|
|
|
|
f.write('\n'.join(blacklist))
|