1
0
mirror of https://gitlab.com/fdroid/fdroidserver.git synced 2024-10-05 18:50:09 +02:00

mirror: verify index signature if fingerprint included in URL

This keeps the old handling without signature checks so that it works
without requiring Java/apksigner or the whole config.py parsing stuff.
This commit is contained in:
Hans-Christoph Steiner 2017-11-29 16:54:27 +01:00
parent d5481927f2
commit 94e67a7623

View File

@ -1,20 +1,18 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import io
import ipaddress import ipaddress
import json
import logging import logging
import os import os
import posixpath
import socket import socket
import subprocess import subprocess
import sys import sys
import zipfile
from argparse import ArgumentParser from argparse import ArgumentParser
from urllib.parse import urlparse import urllib.parse
from . import _ from . import _
from . import common from . import common
from . import net from . import index
from . import update from . import update
options = None options = None
@ -25,10 +23,12 @@ def main():
parser = ArgumentParser(usage=_("%(prog)s [options] url")) parser = ArgumentParser(usage=_("%(prog)s [options] url"))
common.setup_global_opts(parser) common.setup_global_opts(parser)
parser.add_argument("url", nargs='?', help=_("Base URL to mirror")) parser.add_argument("url", nargs='?',
help=_('Base URL to mirror, can include the index signing key '
+ 'using the query string: ?fingerprint='))
parser.add_argument("--archive", action='store_true', default=False, parser.add_argument("--archive", action='store_true', default=False,
help=_("Also mirror the full archive section")) help=_("Also mirror the full archive section"))
parser.add_argument("--output-dir", default=os.getcwd(), parser.add_argument("--output-dir", default=None,
help=_("The directory to write the mirror to")) help=_("The directory to write the mirror to"))
options = parser.parse_args() options = parser.parse_args()
@ -37,11 +37,36 @@ def main():
parser.print_help() parser.print_help()
sys.exit(1) sys.exit(1)
baseurl = options.url scheme, hostname, path, params, query, fragment = urllib.parse.urlparse(options.url)
basedir = options.output_dir fingerprint = urllib.parse.parse_qs(query).get('fingerprint')
def _append_to_url_path(*args):
'''Append the list of path components to URL, keeping the rest the same'''
newpath = posixpath.join(path, *args)
return urllib.parse.urlunparse((scheme, hostname, newpath, params, query, fragment))
if fingerprint:
config = common.read_config(options)
if not ('jarsigner' in config or 'apksigner' in config):
logging.error(_('Java JDK not found! Install in standard location or set java_paths!'))
sys.exit(1)
def _get_index(section, etag=None):
url = _append_to_url_path(section)
return index.download_repo_index(url, etag=etag)
else:
def _get_index(section, etag=None):
import io
import json
import zipfile
from . import net
url = _append_to_url_path(section, 'index-v1.jar')
content, etag = net.http_get(url)
with zipfile.ZipFile(io.BytesIO(content)) as zip:
jsoncontents = zip.open('index-v1.json').read()
data = json.loads(jsoncontents.decode('utf-8'))
return data, etag
url = urlparse(baseurl)
hostname = url.netloc
ip = None ip = None
try: try:
ip = ipaddress.ip_address(hostname) ip = ipaddress.ip_address(hostname)
@ -53,39 +78,40 @@ def main():
'A full mirror of f-droid.org requires more than 200GB.')) 'A full mirror of f-droid.org requires more than 200GB.'))
sys.exit(1) sys.exit(1)
path = url.path.rstrip('/') path = path.rstrip('/')
if path.endswith('repo') or path.endswith('archive'): if path.endswith('repo') or path.endswith('archive'):
logging.error(_('Do not include "{path}" in URL!').format(path=path.split('/')[-1])) logging.warning(_('Do not include "{path}" in URL!')
sys.exit(1) .format(path=path.split('/')[-1]))
elif not path.endswith('fdroid'): elif not path.endswith('fdroid'):
logging.warning(_('{url} does not end with "fdroid", check the URL path!') logging.warning(_('{url} does not end with "fdroid", check the URL path!')
.format(url=baseurl)) .format(url=options.url))
icondirs = ['icons', ] icondirs = ['icons', ]
for density in update.screen_densities: for density in update.screen_densities:
icondirs.append('icons-' + density) icondirs.append('icons-' + density)
if options.output_dir:
basedir = options.output_dir
else:
basedir = os.path.join(os.getcwd(), hostname, path.strip('/'))
os.makedirs(basedir, exist_ok=True)
if options.archive: if options.archive:
sections = ('repo', 'archive') sections = ('repo', 'archive')
else: else:
sections = ('repo', ) sections = ('repo', )
for section in sections: for section in sections:
sectionurl = baseurl + '/' + section
sectiondir = os.path.join(basedir, section) sectiondir = os.path.join(basedir, section)
repourl = sectionurl + '/index-v1.jar'
content, etag = net.http_get(repourl) data, etag = _get_index(section)
with zipfile.ZipFile(io.BytesIO(content)) as zip:
jsoncontents = zip.open('index-v1.json').read()
os.makedirs(sectiondir, exist_ok=True) os.makedirs(sectiondir, exist_ok=True)
os.chdir(sectiondir) os.chdir(sectiondir)
for icondir in icondirs: for icondir in icondirs:
os.makedirs(os.path.join(sectiondir, icondir), exist_ok=True) os.makedirs(os.path.join(sectiondir, icondir), exist_ok=True)
data = json.loads(jsoncontents.decode('utf-8')) urls = []
urls = ''
for packageName, packageList in data['packages'].items(): for packageName, packageList in data['packages'].items():
for package in packageList: for package in packageList:
to_fetch = [] to_fetch = []
@ -98,9 +124,8 @@ def main():
for f in to_fetch: for f in to_fetch:
if not os.path.exists(f) \ if not os.path.exists(f) \
or (f.endswith('.apk') and os.path.getsize(f) != package['size']): or (f.endswith('.apk') and os.path.getsize(f) != package['size']):
url = sectionurl + '/' + f urls.append(_append_to_url_path(section, f))
urls += url + '\n' urls.append(_append_to_url_path(section, f + '.asc'))
urls += url + '.asc\n'
for app in data['apps']: for app in data['apps']:
localized = app.get('localized') localized = app.get('localized')
@ -109,18 +134,19 @@ def main():
for k in update.GRAPHIC_NAMES: for k in update.GRAPHIC_NAMES:
f = d.get(k) f = d.get(k)
if f: if f:
urls += '/'.join((sectionurl, locale, f)) + '\n' urls.append(_append_to_url_path(section, app['packageName'], locale, f))
for k in update.SCREENSHOT_DIRS: for k in update.SCREENSHOT_DIRS:
filelist = d.get(k) filelist = d.get(k)
if filelist: if filelist:
for f in filelist: for f in filelist:
urls += '/'.join((sectionurl, locale, k, f)) + '\n' urls.append(_append_to_url_path(section, app['packageName'], locale, k, f))
with open('.rsync-input-file', 'w') as fp: with open(urls_file, 'w') as fp:
fp.write(urls) for url in urls:
fp.write(url.split('?')[0] + '\n') # wget puts query string in the filename
subprocess.call(['wget', '--continue', '--user-agent="fdroid mirror"', subprocess.call(['wget', '--continue', '--user-agent="fdroid mirror"',
'--input-file=.rsync-input-file']) '--input-file=' + urls_file])
os.remove('.rsync-input-file') os.remove(urls_file)
urls = dict() urls = dict()
for app in data['apps']: for app in data['apps']:
@ -129,17 +155,18 @@ def main():
continue continue
icon = app['icon'] icon = app['icon']
for icondir in icondirs: for icondir in icondirs:
url = sectionurl + '/' + icondir + '/' + icon url = _append_to_url_path(section, icondir, icon)
if icondir not in urls: if icondir not in urls:
urls[icondir] = '' urls[icondir] = []
urls[icondir] += url + '\n' urls[icondir].append(url)
for icondir in icondirs: for icondir in icondirs:
os.chdir(os.path.join(basedir, section, icondir)) os.chdir(os.path.join(basedir, section, icondir))
with open('.rsync-input-file', 'w') as fp: with open(urls_file, 'w') as fp:
fp.write(urls[icondir]) for url in urls[icondir]:
subprocess.call(['wget', '--continue', '--input-file=.rsync-input-file']) fp.write(url.split('?')[0] + '\n') # wget puts query string in the filename
os.remove('.rsync-input-file') subprocess.call(['wget', '--continue', '--input-file=' + urls_file])
os.remove(urls_file)
if __name__ == "__main__": if __name__ == "__main__":