mirror of
https://gitlab.com/fdroid/fdroidserver.git
synced 2024-11-14 02:50:12 +01:00
Merge branch 'verification-fixes' into 'master'
verification fixes See merge request fdroid/fdroidserver!602
This commit is contained in:
commit
076f885950
@ -53,6 +53,7 @@ packages="
|
||||
ca-certificates-java
|
||||
cmake
|
||||
curl
|
||||
disorderfs
|
||||
expect
|
||||
faketime
|
||||
flex
|
||||
|
@ -19,19 +19,108 @@
|
||||
import sys
|
||||
import os
|
||||
import glob
|
||||
import json
|
||||
import logging
|
||||
import requests
|
||||
from argparse import ArgumentParser
|
||||
import logging
|
||||
from collections import OrderedDict
|
||||
|
||||
from . import _
|
||||
from . import common
|
||||
from . import net
|
||||
from . import update
|
||||
from .exception import FDroidException
|
||||
|
||||
options = None
|
||||
config = None
|
||||
|
||||
|
||||
class hashabledict(OrderedDict):
|
||||
def __key(self):
|
||||
return tuple((k, self[k]) for k in sorted(self))
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.__key())
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__key() == other.__key()
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.__key() < other.__key()
|
||||
|
||||
def __qt__(self, other):
|
||||
return self.__key() > other.__key()
|
||||
|
||||
|
||||
class Decoder(json.JSONDecoder):
|
||||
def __init__(self, **kwargs):
|
||||
json.JSONDecoder.__init__(self, **kwargs)
|
||||
self.parse_array = self.JSONArray
|
||||
# Use the python implemenation of the scanner
|
||||
self.scan_once = json.scanner.py_make_scanner(self)
|
||||
|
||||
def JSONArray(self, s_and_end, scan_once, **kwargs):
|
||||
values, end = json.decoder.JSONArray(s_and_end, scan_once, **kwargs)
|
||||
return set(values), end
|
||||
|
||||
|
||||
class Encoder(json.JSONEncoder):
|
||||
def default(self, obj):
|
||||
if isinstance(obj, set):
|
||||
return sorted(obj)
|
||||
return super().default(obj)
|
||||
|
||||
|
||||
def write_json_report(url, remote_apk, unsigned_apk, compare_result):
|
||||
"""write out the results of the verify run to JSON
|
||||
|
||||
This builds up reports on the repeated runs of `fdroid verify` on
|
||||
a set of apps. It uses the timestamps on the compared files to
|
||||
ensure that there is only one report per file, even when run
|
||||
repeatedly.
|
||||
|
||||
"""
|
||||
|
||||
jsonfile = unsigned_apk + '.json'
|
||||
if os.path.exists(jsonfile):
|
||||
with open(jsonfile) as fp:
|
||||
data = json.load(fp, object_pairs_hook=OrderedDict)
|
||||
else:
|
||||
data = OrderedDict()
|
||||
output = hashabledict()
|
||||
output['url'] = url
|
||||
for key, filename in (('local', unsigned_apk), ('remote', remote_apk)):
|
||||
d = hashabledict()
|
||||
output[key] = d
|
||||
d['file'] = filename
|
||||
d['sha256'] = update.sha256sum(filename)
|
||||
d['timestamp'] = os.stat(filename).st_ctime
|
||||
d['packageName'], d['versionCode'], d['versionName'] = common.get_apk_id(filename)
|
||||
if compare_result:
|
||||
output['verified'] = False
|
||||
output['result'] = compare_result
|
||||
else:
|
||||
output['verified'] = True
|
||||
data[str(output['local']['timestamp'])] = output # str makes better dict keys than float
|
||||
with open(jsonfile, 'w') as fp:
|
||||
json.dump(data, fp, sort_keys=True)
|
||||
|
||||
if output['verified']:
|
||||
jsonfile = 'unsigned/verified.json'
|
||||
if os.path.exists(jsonfile):
|
||||
with open(jsonfile) as fp:
|
||||
data = json.load(fp, cls=Decoder, object_pairs_hook=hashabledict)
|
||||
else:
|
||||
data = OrderedDict()
|
||||
data['packages'] = OrderedDict()
|
||||
packageName = output['local']['packageName']
|
||||
if packageName not in data['packages']:
|
||||
data['packages'][packageName] = set()
|
||||
data['packages'][packageName].add(output)
|
||||
with open(jsonfile, 'w') as fp:
|
||||
json.dump(data, fp, cls=Encoder, sort_keys=True)
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
global options, config
|
||||
@ -42,6 +131,8 @@ def main():
|
||||
parser.add_argument("appid", nargs='*', help=_("applicationId with optional versionCode in the form APPID[:VERCODE]"))
|
||||
parser.add_argument("--reuse-remote-apk", action="store_true", default=False,
|
||||
help=_("Verify against locally cached copy rather than redownloading."))
|
||||
parser.add_argument("--output-json", action="store_true", default=False,
|
||||
help=_("Output JSON report to file named after APK."))
|
||||
options = parser.parse_args()
|
||||
|
||||
config = common.read_config(options)
|
||||
@ -64,6 +155,7 @@ def main():
|
||||
for apkfile in sorted(glob.glob(os.path.join(unsigned_dir, '*.apk'))):
|
||||
|
||||
apkfilename = os.path.basename(apkfile)
|
||||
url = 'https://f-droid.org/repo/' + apkfilename
|
||||
appid, vercode = common.publishednameinfo(apkfile)
|
||||
|
||||
if vercodes and appid not in vercodes:
|
||||
@ -75,11 +167,10 @@ def main():
|
||||
|
||||
logging.info("Processing {apkfilename}".format(apkfilename=apkfilename))
|
||||
|
||||
remoteapk = os.path.join(tmp_dir, apkfilename)
|
||||
if not options.reuse_remote_apk or not os.path.exists(remoteapk):
|
||||
if os.path.exists(remoteapk):
|
||||
os.remove(remoteapk)
|
||||
url = 'https://f-droid.org/repo/' + apkfilename
|
||||
remote_apk = os.path.join(tmp_dir, apkfilename)
|
||||
if not options.reuse_remote_apk or not os.path.exists(remote_apk):
|
||||
if os.path.exists(remote_apk):
|
||||
os.remove(remote_apk)
|
||||
logging.info("...retrieving " + url)
|
||||
try:
|
||||
net.download_file(url, dldir=tmp_dir)
|
||||
@ -90,10 +181,10 @@ def main():
|
||||
raise FDroidException(_('Downloading {url} failed. {error}')
|
||||
.format(url=url, error=e))
|
||||
|
||||
compare_result = common.verify_apks(
|
||||
remoteapk,
|
||||
os.path.join(unsigned_dir, apkfilename),
|
||||
tmp_dir)
|
||||
unsigned_apk = os.path.join(unsigned_dir, apkfilename)
|
||||
compare_result = common.verify_apks(remote_apk, unsigned_apk, tmp_dir)
|
||||
if options.output_json:
|
||||
write_json_report(url, remote_apk, unsigned_apk, compare_result)
|
||||
if compare_result:
|
||||
raise FDroidException(compare_result)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user