mirror of
https://gitlab.com/fdroid/fdroidserver.git
synced 2024-11-10 17:30:11 +01:00
Merge branch fdroidserver:master into fix-icon-check
This commit is contained in:
commit
65934f35a8
@ -15,11 +15,12 @@ variables:
|
||||
# * python3-babel for compiling localization files
|
||||
# * gnupg-agent for the full signing setup
|
||||
# * python3-clint for fancy progress bars for users
|
||||
# * python3-pycountry for linting config/mirrors.yml
|
||||
buildserver run-tests:
|
||||
image: registry.gitlab.com/fdroid/fdroidserver:buildserver
|
||||
script:
|
||||
- apt-get update
|
||||
- apt-get install gnupg-agent python3-babel python3-clint
|
||||
- apt-get install gnupg-agent python3-babel python3-biplist python3-clint python3-pycountry
|
||||
- ./tests/run-tests
|
||||
# make sure that translations do not cause stacktraces
|
||||
- cd $CI_PROJECT_DIR/locale
|
||||
@ -41,11 +42,11 @@ metadata_v0:
|
||||
image: registry.gitlab.com/fdroid/fdroidserver:buildserver
|
||||
variables:
|
||||
GIT_DEPTH: 1000
|
||||
RELEASE_COMMIT_ID: a1c4f803de8d4dc92ebd6b571a493183d14a00bf # after ArchivePolicy: 0
|
||||
RELEASE_COMMIT_ID: 50aa35772b058e76b950c01e16019c072c191b73 # after switching to `git rev-parse`
|
||||
script:
|
||||
- git fetch https://gitlab.com/fdroid/fdroidserver.git $RELEASE_COMMIT_ID
|
||||
- cd tests
|
||||
- export GITCOMMIT=`git describe`
|
||||
- export GITCOMMIT=$(git rev-parse HEAD)
|
||||
- git checkout $RELEASE_COMMIT_ID
|
||||
- cd ..
|
||||
- git clone --depth 1 https://gitlab.com/fdroid/fdroiddata.git
|
||||
@ -98,7 +99,9 @@ debian_testing:
|
||||
git
|
||||
gnupg
|
||||
ipfs-cid
|
||||
python3-biplist
|
||||
python3-defusedxml
|
||||
python3-pycountry
|
||||
python3-setuptools
|
||||
sdkmanager
|
||||
- python3 -c 'import fdroidserver'
|
||||
@ -122,7 +125,14 @@ ubuntu_lts_ppa:
|
||||
- echo "deb http://ppa.launchpad.net/fdroid/fdroidserver/ubuntu $RELEASE main" >> /etc/apt/sources.list
|
||||
- apt-get update
|
||||
- apt-get dist-upgrade
|
||||
- apt-get install --install-recommends dexdump fdroidserver git python3-setuptools sdkmanager
|
||||
- apt-get install --install-recommends
|
||||
dexdump
|
||||
fdroidserver
|
||||
git
|
||||
python3-biplist
|
||||
python3-pycountry
|
||||
python3-setuptools
|
||||
sdkmanager
|
||||
|
||||
# Test things work with a default branch other than 'master'
|
||||
- git config --global init.defaultBranch thisisnotmasterormain
|
||||
@ -152,6 +162,9 @@ ubuntu_jammy_pip:
|
||||
- $pip install sdkmanager
|
||||
- sdkmanager 'build-tools;33.0.0'
|
||||
|
||||
# Install extras_require.optional from setup.py
|
||||
- $pip install biplist pycountry
|
||||
|
||||
- $pip install dist/fdroidserver-*.tar.gz
|
||||
- tar xzf dist/fdroidserver-*.tar.gz
|
||||
- cd fdroidserver-*
|
||||
@ -287,6 +300,7 @@ fedora_latest:
|
||||
python3-babel
|
||||
python3-matplotlib
|
||||
python3-pip
|
||||
python3-pycountry
|
||||
rsync
|
||||
which
|
||||
- $pip install sdkmanager
|
||||
@ -343,6 +357,9 @@ macOS:
|
||||
- /bin/bash --version
|
||||
- /bin/bash -n gradlew-fdroid tests/run-tests
|
||||
|
||||
# TODO remove the packages below once they are included in the Homebrew package
|
||||
- $(brew --prefix fdroidserver)/libexec/bin/python3 -m pip install biplist pycountry
|
||||
|
||||
# test fdroidserver from git with current package's dependencies
|
||||
- fdroid="$(brew --prefix fdroidserver)/libexec/bin/python3 $PWD/fdroid" ./tests/run-tests
|
||||
|
||||
|
@ -543,6 +543,7 @@ include tests/build-tools/28.0.3/aapt-output-souch.smsbypass_9.txt
|
||||
include tests/build-tools/generate.sh
|
||||
include tests/check-fdroid-apk
|
||||
include tests/checkupdates.TestCase
|
||||
include tests/com.fake.IpaApp_1000000000001.ipa
|
||||
include tests/common.TestCase
|
||||
include tests/config.py
|
||||
include tests/config/antiFeatures.yml
|
||||
|
@ -178,6 +178,12 @@
|
||||
# serverwebroot:
|
||||
# - foo.com:/usr/share/nginx/www/fdroid
|
||||
# - bar.info:/var/www/fdroid
|
||||
#
|
||||
# There is a special mode to only deploy the index file:
|
||||
#
|
||||
# serverwebroot:
|
||||
# - url: 'me@b.az:/srv/fdroid'
|
||||
# indexOnly: true
|
||||
|
||||
|
||||
# When running fdroid processes on a remote server, it is possible to
|
||||
|
@ -55,7 +55,9 @@ scan_apk # NOQA: B101
|
||||
scan_repo_files # NOQA: B101
|
||||
from fdroidserver.deploy import (update_awsbucket,
|
||||
update_servergitmirrors,
|
||||
update_serverwebroots,
|
||||
update_serverwebroot) # NOQA: E402
|
||||
update_awsbucket # NOQA: B101
|
||||
update_servergitmirrors # NOQA: B101
|
||||
update_serverwebroots # NOQA: B101
|
||||
update_serverwebroot # NOQA: B101
|
||||
|
@ -361,6 +361,26 @@ def regsub_file(pattern, repl, path):
|
||||
f.write(text)
|
||||
|
||||
|
||||
def config_type_check(path, data):
|
||||
if Path(path).name == 'mirrors.yml':
|
||||
expected_type = list
|
||||
else:
|
||||
expected_type = dict
|
||||
if expected_type == dict:
|
||||
if not isinstance(data, dict):
|
||||
msg = _('{path} is not "key: value" dict, but a {datatype}!')
|
||||
raise TypeError(msg.format(path=path, datatype=type(data).__name__))
|
||||
elif not isinstance(data, expected_type):
|
||||
msg = _('{path} is not {expected_type}, but a {datatype}!')
|
||||
raise TypeError(
|
||||
msg.format(
|
||||
path=path,
|
||||
expected_type=expected_type.__name__,
|
||||
datatype=type(data).__name__,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def read_config(opts=None):
|
||||
"""Read the repository config.
|
||||
|
||||
@ -401,11 +421,7 @@ def read_config(opts=None):
|
||||
config = yaml.safe_load(fp)
|
||||
if not config:
|
||||
config = {}
|
||||
if not isinstance(config, dict):
|
||||
msg = _('{path} is not "key: value" dict, but a {datatype}!')
|
||||
raise TypeError(
|
||||
msg.format(path=config_file, datatype=type(config).__name__)
|
||||
)
|
||||
config_type_check(config_file, config)
|
||||
elif os.path.exists(old_config_file):
|
||||
logging.warning(_("""{oldfile} is deprecated, use {newfile}""")
|
||||
.format(oldfile=old_config_file, newfile=config_file))
|
||||
@ -446,18 +462,22 @@ def read_config(opts=None):
|
||||
|
||||
if 'serverwebroot' in config:
|
||||
if isinstance(config['serverwebroot'], str):
|
||||
roots = [config['serverwebroot']]
|
||||
roots = [{'url': config['serverwebroot']}]
|
||||
elif all(isinstance(item, str) for item in config['serverwebroot']):
|
||||
roots = [{'url': i} for i in config['serverwebroot']]
|
||||
elif all(isinstance(item, dict) for item in config['serverwebroot']):
|
||||
roots = config['serverwebroot']
|
||||
else:
|
||||
raise TypeError(_('only accepts strings, lists, and tuples'))
|
||||
rootlist = []
|
||||
for rootstr in roots:
|
||||
for d in roots:
|
||||
# since this is used with rsync, where trailing slashes have
|
||||
# meaning, ensure there is always a trailing slash
|
||||
rootstr = d['url']
|
||||
if rootstr[-1] != '/':
|
||||
rootstr += '/'
|
||||
rootlist.append(rootstr.replace('//', '/'))
|
||||
d['url'] = rootstr.replace('//', '/')
|
||||
rootlist.append(d)
|
||||
config['serverwebroot'] = rootlist
|
||||
|
||||
if 'servergitmirrors' in config:
|
||||
@ -4036,7 +4056,8 @@ def rsync_status_file_to_repo(path, repo_subdir=None):
|
||||
logging.debug(_('skip deploying full build logs: not enabled in config'))
|
||||
return
|
||||
|
||||
for webroot in config.get('serverwebroot', []):
|
||||
for d in config.get('serverwebroot', []):
|
||||
webroot = d['url']
|
||||
cmd = ['rsync',
|
||||
'--archive',
|
||||
'--delete-after',
|
||||
|
@ -284,34 +284,63 @@ def update_serverwebroot(serverwebroot, repo_section):
|
||||
_('rsync is missing or broken: {error}').format(error=e)
|
||||
) from e
|
||||
rsyncargs = ['rsync', '--archive', '--delete-after', '--safe-links']
|
||||
if not options.no_checksum:
|
||||
if not options or not options.no_checksum:
|
||||
rsyncargs.append('--checksum')
|
||||
if options.verbose:
|
||||
if options and options.verbose:
|
||||
rsyncargs += ['--verbose']
|
||||
if options.quiet:
|
||||
if options and options.quiet:
|
||||
rsyncargs += ['--quiet']
|
||||
if options.identity_file is not None:
|
||||
if options and options.identity_file:
|
||||
rsyncargs += ['-e', 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + options.identity_file]
|
||||
elif 'identity_file' in config:
|
||||
elif config and config.get('identity_file'):
|
||||
rsyncargs += ['-e', 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + config['identity_file']]
|
||||
logging.info('rsyncing ' + repo_section + ' to ' + serverwebroot)
|
||||
url = serverwebroot['url']
|
||||
logging.info('rsyncing ' + repo_section + ' to ' + url)
|
||||
excludes = _get_index_excludes(repo_section)
|
||||
if subprocess.call(rsyncargs + excludes + [repo_section, serverwebroot]) != 0:
|
||||
if subprocess.call(rsyncargs + excludes + [repo_section, url]) != 0:
|
||||
raise FDroidException()
|
||||
if subprocess.call(rsyncargs + [repo_section, serverwebroot]) != 0:
|
||||
if subprocess.call(rsyncargs + [repo_section, url]) != 0:
|
||||
raise FDroidException()
|
||||
# upload "current version" symlinks if requested
|
||||
if config['make_current_version_link'] and repo_section == 'repo':
|
||||
if config and config.get('make_current_version_link') and repo_section == 'repo':
|
||||
links_to_upload = []
|
||||
for f in glob.glob('*.apk') \
|
||||
+ glob.glob('*.apk.asc') + glob.glob('*.apk.sig'):
|
||||
if os.path.islink(f):
|
||||
links_to_upload.append(f)
|
||||
if len(links_to_upload) > 0:
|
||||
if subprocess.call(rsyncargs + links_to_upload + [serverwebroot]) != 0:
|
||||
if subprocess.call(rsyncargs + links_to_upload + [url]) != 0:
|
||||
raise FDroidException()
|
||||
|
||||
|
||||
def update_serverwebroots(serverwebroots, repo_section, standardwebroot=True):
|
||||
for d in serverwebroots:
|
||||
# this supports both an ssh host:path and just a path
|
||||
serverwebroot = d['url']
|
||||
s = serverwebroot.rstrip('/').split(':')
|
||||
if len(s) == 1:
|
||||
fdroiddir = s[0]
|
||||
elif len(s) == 2:
|
||||
host, fdroiddir = s
|
||||
else:
|
||||
logging.error(_('Malformed serverwebroot line:') + ' ' + serverwebroot)
|
||||
sys.exit(1)
|
||||
repobase = os.path.basename(fdroiddir)
|
||||
if standardwebroot and repobase != 'fdroid':
|
||||
logging.error(
|
||||
_(
|
||||
'serverwebroot: path does not end with "fdroid", perhaps you meant one of these:'
|
||||
)
|
||||
+ '\n\t'
|
||||
+ serverwebroot.rstrip('/')
|
||||
+ '/fdroid\n\t'
|
||||
+ serverwebroot.rstrip('/').rstrip(repobase)
|
||||
+ 'fdroid'
|
||||
)
|
||||
sys.exit(1)
|
||||
update_serverwebroot(d, repo_section)
|
||||
|
||||
|
||||
def sync_from_localcopy(repo_section, local_copy_dir):
|
||||
"""Sync the repo from "local copy dir" filesystem to this box.
|
||||
|
||||
@ -747,24 +776,6 @@ def main():
|
||||
else:
|
||||
standardwebroot = True
|
||||
|
||||
for serverwebroot in config.get('serverwebroot', []):
|
||||
# this supports both an ssh host:path and just a path
|
||||
s = serverwebroot.rstrip('/').split(':')
|
||||
if len(s) == 1:
|
||||
fdroiddir = s[0]
|
||||
elif len(s) == 2:
|
||||
host, fdroiddir = s
|
||||
else:
|
||||
logging.error(_('Malformed serverwebroot line:') + ' ' + serverwebroot)
|
||||
sys.exit(1)
|
||||
repobase = os.path.basename(fdroiddir)
|
||||
if standardwebroot and repobase != 'fdroid':
|
||||
logging.error('serverwebroot path does not end with "fdroid", '
|
||||
+ 'perhaps you meant one of these:\n\t'
|
||||
+ serverwebroot.rstrip('/') + '/fdroid\n\t'
|
||||
+ serverwebroot.rstrip('/').rstrip(repobase) + 'fdroid')
|
||||
sys.exit(1)
|
||||
|
||||
if options.local_copy_dir is not None:
|
||||
local_copy_dir = options.local_copy_dir
|
||||
elif config.get('local_copy_dir'):
|
||||
@ -825,8 +836,10 @@ def main():
|
||||
sync_from_localcopy(repo_section, local_copy_dir)
|
||||
else:
|
||||
update_localcopy(repo_section, local_copy_dir)
|
||||
for serverwebroot in config.get('serverwebroot', []):
|
||||
update_serverwebroot(serverwebroot, repo_section)
|
||||
if config.get('serverwebroot'):
|
||||
update_serverwebroots(
|
||||
config['serverwebroot'], repo_section, standardwebroot
|
||||
)
|
||||
if config.get('servergitmirrors', []):
|
||||
# update_servergitmirrors will take care of multiple mirrors so don't need a foreach
|
||||
servergitmirrors = config.get('servergitmirrors', [])
|
||||
|
@ -26,10 +26,10 @@ import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import ruamel.yaml
|
||||
import shutil
|
||||
import tempfile
|
||||
import urllib.parse
|
||||
import yaml
|
||||
import zipfile
|
||||
import calendar
|
||||
import qrcode
|
||||
@ -1416,7 +1416,7 @@ def add_mirrors_to_repodict(repo_section, repodict):
|
||||
)
|
||||
)
|
||||
with mirrors_yml.open() as fp:
|
||||
mirrors_config = yaml.safe_load(fp)
|
||||
mirrors_config = ruamel.yaml.YAML(typ='safe').load(fp)
|
||||
if not isinstance(mirrors_config, list):
|
||||
msg = _('{path} is not list, but a {datatype}!')
|
||||
raise TypeError(
|
||||
|
@ -17,9 +17,11 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from argparse import ArgumentParser
|
||||
import difflib
|
||||
import re
|
||||
import sys
|
||||
import platform
|
||||
import ruamel.yaml
|
||||
import urllib.parse
|
||||
from pathlib import Path
|
||||
|
||||
@ -739,6 +741,43 @@ def check_certificate_pinned_binaries(app):
|
||||
return
|
||||
|
||||
|
||||
def lint_config(arg):
|
||||
path = Path(arg)
|
||||
passed = True
|
||||
yamllintresult = common.run_yamllint(path)
|
||||
if yamllintresult:
|
||||
print(yamllintresult)
|
||||
passed = False
|
||||
|
||||
with path.open() as fp:
|
||||
data = ruamel.yaml.YAML(typ='safe').load(fp)
|
||||
common.config_type_check(arg, data)
|
||||
|
||||
if path.name == 'mirrors.yml':
|
||||
import pycountry
|
||||
|
||||
valid_country_codes = [c.alpha_2 for c in pycountry.countries]
|
||||
for mirror in data:
|
||||
code = mirror.get('countryCode')
|
||||
if code and code not in valid_country_codes:
|
||||
passed = False
|
||||
msg = _(
|
||||
'{path}: "{code}" is not a valid ISO_3166-1 alpha-2 country code!'
|
||||
).format(path=str(path), code=code)
|
||||
if code.upper() in valid_country_codes:
|
||||
m = [code.upper()]
|
||||
else:
|
||||
m = difflib.get_close_matches(
|
||||
code.upper(), valid_country_codes, 2, 0.5
|
||||
)
|
||||
if m:
|
||||
msg += ' '
|
||||
msg += _('Did you mean {code}?').format(code=', '.join(sorted(m)))
|
||||
print(msg)
|
||||
|
||||
return passed
|
||||
|
||||
|
||||
def main():
|
||||
global config, options
|
||||
|
||||
@ -772,6 +811,38 @@ def main():
|
||||
load_antiFeatures_config()
|
||||
load_categories_config()
|
||||
|
||||
if options.force_yamllint:
|
||||
import yamllint # throw error if it is not installed
|
||||
|
||||
yamllint # make pyflakes ignore this
|
||||
|
||||
paths = list()
|
||||
for arg in options.appid:
|
||||
if (
|
||||
arg == 'config.yml'
|
||||
or Path(arg).parent.name == 'config'
|
||||
or Path(arg).parent.parent.name == 'config' # localized
|
||||
):
|
||||
paths.append(arg)
|
||||
|
||||
failed = 0
|
||||
if paths:
|
||||
for path in paths:
|
||||
options.appid.remove(path)
|
||||
if not lint_config(path):
|
||||
failed += 1
|
||||
# an empty list of appids means check all apps, avoid that if files were given
|
||||
if not options.appid:
|
||||
sys.exit(failed)
|
||||
|
||||
if not lint_metadata(options):
|
||||
failed += 1
|
||||
|
||||
if failed:
|
||||
sys.exit(failed)
|
||||
|
||||
|
||||
def lint_metadata(options):
|
||||
# Get all apps...
|
||||
allapps = metadata.read_metadata(options.appid)
|
||||
apps = common.read_app_args(options.appid, allapps, False)
|
||||
@ -791,11 +862,6 @@ def main():
|
||||
if app.Disabled:
|
||||
continue
|
||||
|
||||
if options.force_yamllint:
|
||||
import yamllint # throw error if it is not installed
|
||||
|
||||
yamllint # make pyflakes ignore this
|
||||
|
||||
# only run yamllint when linting individual apps.
|
||||
if options.appid or options.force_yamllint:
|
||||
# run yamllint on app metadata
|
||||
@ -856,8 +922,7 @@ def main():
|
||||
anywarns = True
|
||||
print("%s: %s" % (appid, warn))
|
||||
|
||||
if anywarns:
|
||||
sys.exit(1)
|
||||
return not anywarns
|
||||
|
||||
|
||||
# A compiled, public domain list of official SPDX license tags. generated
|
||||
|
@ -49,10 +49,10 @@ from binascii import hexlify
|
||||
|
||||
from . import _
|
||||
from . import common
|
||||
from . import index
|
||||
from . import metadata
|
||||
from .common import DEFAULT_LOCALE
|
||||
from .exception import BuildException, FDroidException, VerificationException
|
||||
import fdroidserver.index
|
||||
|
||||
from PIL import Image, PngImagePlugin
|
||||
|
||||
@ -524,6 +524,94 @@ def insert_obbs(repodir, apps, apks):
|
||||
break
|
||||
|
||||
|
||||
VERSION_STRING_RE = re.compile(r'^([0-9]+)\.([0-9]+)\.([0-9]+)$')
|
||||
|
||||
|
||||
def version_string_to_int(version):
|
||||
"""
|
||||
Convert sermver version designation to version code.
|
||||
|
||||
Approximately convert a [Major].[Minor].[Patch] version string
|
||||
consisting of numeric characters (0-9) and periods to a number. The
|
||||
exponents are chosen such that it still fits in the 64bit JSON/Android range.
|
||||
"""
|
||||
m = VERSION_STRING_RE.match(version)
|
||||
if not m:
|
||||
raise ValueError(f"invalid version string '{version}'")
|
||||
major = int(m.group(1))
|
||||
minor = int(m.group(2))
|
||||
patch = int(m.group(3))
|
||||
return major * 10**12 + minor * 10**6 + patch
|
||||
|
||||
|
||||
def parse_ipa(ipa_path, file_size, sha256):
|
||||
from biplist import readPlist
|
||||
|
||||
ipa = {
|
||||
"apkName": os.path.basename(ipa_path),
|
||||
"hash": sha256,
|
||||
"hashType": "sha256",
|
||||
"size": file_size,
|
||||
}
|
||||
|
||||
with zipfile.ZipFile(ipa_path) as ipa_zip:
|
||||
for info in ipa_zip.infolist():
|
||||
if re.match("Payload/[^/]*.app/Info.plist", info.filename):
|
||||
with ipa_zip.open(info) as plist_file:
|
||||
plist = readPlist(plist_file)
|
||||
ipa["packageName"] = plist["CFBundleIdentifier"]
|
||||
# https://developer.apple.com/documentation/bundleresources/information_property_list/cfbundleshortversionstring
|
||||
ipa["versionCode"] = version_string_to_int(plist["CFBundleShortVersionString"])
|
||||
ipa["versionName"] = plist["CFBundleShortVersionString"]
|
||||
return ipa
|
||||
|
||||
|
||||
def scan_repo_for_ipas(apkcache, repodir, knownapks):
|
||||
"""Scan for IPA files in a given repo directory.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
apkcache
|
||||
cache dictionary containting cached file infos from previous runs
|
||||
repodir
|
||||
repo directory to scan
|
||||
knownapks
|
||||
list of all known files, as per metadata.read_metadata
|
||||
|
||||
Returns
|
||||
-------
|
||||
ipas
|
||||
list of file infos for ipa files in ./repo folder
|
||||
cachechanged
|
||||
ture if new ipa files were found and added to `apkcache`
|
||||
"""
|
||||
cachechanged = False
|
||||
ipas = []
|
||||
for ipa_path in glob.glob(os.path.join(repodir, '*.ipa')):
|
||||
ipa_name = os.path.basename(ipa_path)
|
||||
|
||||
file_size = os.stat(ipa_path).st_size
|
||||
if file_size == 0:
|
||||
raise FDroidException(_('{path} is zero size!')
|
||||
.format(path=ipa_path))
|
||||
|
||||
sha256 = common.sha256sum(ipa_path)
|
||||
ipa = apkcache.get(ipa_name, {})
|
||||
|
||||
if ipa.get('hash') != sha256:
|
||||
ipa = fdroidserver.update.parse_ipa(ipa_path, file_size, sha256)
|
||||
apkcache[ipa_name] = ipa
|
||||
cachechanged = True
|
||||
|
||||
added = knownapks.recordapk(ipa_name, ipa['packageName'])
|
||||
if added:
|
||||
ipa['added'] = added
|
||||
|
||||
ipas.append(ipa)
|
||||
|
||||
return ipas, cachechanged
|
||||
|
||||
|
||||
def translate_per_build_anti_features(apps, apks):
|
||||
"""Grab the anti-features list from the build metadata.
|
||||
|
||||
@ -1121,7 +1209,10 @@ def insert_localized_app_metadata(apps):
|
||||
|
||||
|
||||
def scan_repo_files(apkcache, repodir, knownapks, use_date_from_file=False):
|
||||
"""Scan a repo for all files with an extension except APK/OBB.
|
||||
"""Scan a repo for all files with an extension except APK/OBB/IPA.
|
||||
|
||||
This allows putting all kinds of files into repostories. E.g. Media Files,
|
||||
Zip archives, ...
|
||||
|
||||
Parameters
|
||||
----------
|
||||
@ -1138,22 +1229,29 @@ def scan_repo_files(apkcache, repodir, knownapks, use_date_from_file=False):
|
||||
repo_files = []
|
||||
repodir = repodir.encode()
|
||||
for name in os.listdir(repodir):
|
||||
# skip files based on file extensions, that are handled elsewhere
|
||||
file_extension = common.get_file_extension(name)
|
||||
if file_extension in ('apk', 'obb'):
|
||||
if file_extension in ('apk', 'obb', 'ipa'):
|
||||
continue
|
||||
|
||||
# skip source tarballs generated by fdroidserver
|
||||
filename = os.path.join(repodir, name)
|
||||
name_utf8 = name.decode()
|
||||
if filename.endswith(b'_src.tar.gz'):
|
||||
logging.debug(_('skipping source tarball: {path}')
|
||||
.format(path=filename.decode()))
|
||||
continue
|
||||
|
||||
# skip all other files generated by fdroidserver
|
||||
if not common.is_repo_file(filename):
|
||||
continue
|
||||
|
||||
stat = os.stat(filename)
|
||||
if stat.st_size == 0:
|
||||
raise FDroidException(_('{path} is zero size!')
|
||||
.format(path=filename))
|
||||
|
||||
# load file infos from cache if not stale
|
||||
shasum = common.sha256sum(filename)
|
||||
usecache = False
|
||||
if name_utf8 in apkcache:
|
||||
@ -1166,6 +1264,7 @@ def scan_repo_files(apkcache, repodir, knownapks, use_date_from_file=False):
|
||||
logging.debug(_("Ignoring stale cache data for {apkfilename}")
|
||||
.format(apkfilename=name_utf8))
|
||||
|
||||
# scan file if info wasn't in cache
|
||||
if not usecache:
|
||||
logging.debug(_("Processing {apkfilename}").format(apkfilename=name_utf8))
|
||||
repo_file = collections.OrderedDict()
|
||||
@ -2253,6 +2352,11 @@ def main():
|
||||
options.use_date_from_apk)
|
||||
cachechanged = cachechanged or fcachechanged
|
||||
apks += files
|
||||
|
||||
ipas, icachechanged = scan_repo_for_ipas(apkcache, repodirs[0], knownapks)
|
||||
cachechanged = cachechanged or icachechanged
|
||||
apks += ipas
|
||||
|
||||
appid_has_apks = set()
|
||||
appid_has_repo_files = set()
|
||||
remove_apks = []
|
||||
@ -2328,7 +2432,7 @@ def main():
|
||||
if len(repodirs) > 1:
|
||||
archive_old_apks(apps, apks, archapks, repodirs[0], repodirs[1], config['archive_older'])
|
||||
archived_apps = prepare_apps(apps, archapks, repodirs[1])
|
||||
index.make(archived_apps, archapks, repodirs[1], True)
|
||||
fdroidserver.index.make(archived_apps, archapks, repodirs[1], True)
|
||||
|
||||
repoapps = prepare_apps(apps, apks, repodirs[0])
|
||||
|
||||
@ -2341,13 +2445,13 @@ def main():
|
||||
app_dict = dict()
|
||||
app_dict[appid] = app
|
||||
if os.path.isdir(repodir):
|
||||
index.make(app_dict, apks, repodir, False)
|
||||
fdroidserver.index.make(app_dict, apks, repodir, False)
|
||||
else:
|
||||
logging.info(_('Skipping index generation for {appid}').format(appid=appid))
|
||||
return
|
||||
|
||||
# Make the index for the main repo...
|
||||
index.make(repoapps, apks, repodirs[0], False)
|
||||
fdroidserver.index.make(repoapps, apks, repodirs[0], False)
|
||||
|
||||
git_remote = config.get('binary_transparency_remote')
|
||||
if git_remote or os.path.isdir(os.path.join('binary_transparency', '.git')):
|
||||
|
4
setup.py
4
setup.py
@ -108,7 +108,11 @@ setup(
|
||||
'sdkmanager >= 0.6.4',
|
||||
'yamllint',
|
||||
],
|
||||
# Some requires are only needed for very limited cases:
|
||||
# * biplist is only used for parsing Apple .ipa files
|
||||
# * pycountry is only for linting config/mirrors.yml
|
||||
extras_require={
|
||||
'optional': ['biplist', 'pycountry'],
|
||||
'test': ['pyjks', 'html5print'],
|
||||
'docs': [
|
||||
'sphinx',
|
||||
|
BIN
tests/com.fake.IpaApp_1000000000001.ipa
Normal file
BIN
tests/com.fake.IpaApp_1000000000001.ipa
Normal file
Binary file not shown.
@ -1655,8 +1655,8 @@ class CommonTest(unittest.TestCase):
|
||||
fdroidserver.common.options.quiet = False
|
||||
fdroidserver.common.config = {}
|
||||
fdroidserver.common.config['serverwebroot'] = [
|
||||
'example.com:/var/www/fdroid/',
|
||||
'example.com:/var/www/fbot/',
|
||||
{'url': 'example.com:/var/www/fdroid/'},
|
||||
{'url': 'example.com:/var/www/fbot/'},
|
||||
]
|
||||
fdroidserver.common.config['deploy_process_logs'] = True
|
||||
fdroidserver.common.config['identity_file'] = 'ssh/id_rsa'
|
||||
@ -1718,7 +1718,7 @@ class CommonTest(unittest.TestCase):
|
||||
|
||||
fdroidserver.common.options = mock.Mock()
|
||||
fdroidserver.common.config = {}
|
||||
fdroidserver.common.config['serverwebroot'] = [fakeserver]
|
||||
fdroidserver.common.config['serverwebroot'] = [{'url': fakeserver}]
|
||||
fdroidserver.common.config['identity_file'] = 'ssh/id_rsa'
|
||||
|
||||
def assert_subprocess_call(cmd):
|
||||
@ -2838,6 +2838,60 @@ class CommonTest(unittest.TestCase):
|
||||
with self.assertRaises(TypeError):
|
||||
fdroidserver.common.load_localized_config(CATEGORIES_CONFIG_NAME, 'repo')
|
||||
|
||||
def test_config_type_check_config_yml_dict(self):
|
||||
fdroidserver.common.config_type_check('config.yml', dict())
|
||||
|
||||
def test_config_type_check_config_yml_list(self):
|
||||
with self.assertRaises(TypeError):
|
||||
fdroidserver.common.config_type_check('config.yml', list())
|
||||
|
||||
def test_config_type_check_config_yml_set(self):
|
||||
with self.assertRaises(TypeError):
|
||||
fdroidserver.common.config_type_check('config.yml', set())
|
||||
|
||||
def test_config_type_check_config_yml_str(self):
|
||||
with self.assertRaises(TypeError):
|
||||
fdroidserver.common.config_type_check('config.yml', str())
|
||||
|
||||
def test_config_type_check_mirrors_list(self):
|
||||
fdroidserver.common.config_type_check('config/mirrors.yml', list())
|
||||
|
||||
def test_config_type_check_mirrors_dict(self):
|
||||
with self.assertRaises(TypeError):
|
||||
fdroidserver.common.config_type_check('config/mirrors.yml', dict())
|
||||
|
||||
def test_config_type_check_mirrors_set(self):
|
||||
with self.assertRaises(TypeError):
|
||||
fdroidserver.common.config_type_check('config/mirrors.yml', set())
|
||||
|
||||
def test_config_type_check_mirrors_str(self):
|
||||
with self.assertRaises(TypeError):
|
||||
fdroidserver.common.config_type_check('config/mirrors.yml', str())
|
||||
|
||||
def test_config_serverwebroot_str(self):
|
||||
os.chdir(self.testdir)
|
||||
Path('config.yml').write_text("""serverwebroot: 'foo@example.com:/var/www'""")
|
||||
self.assertEqual(
|
||||
[{'url': 'foo@example.com:/var/www/'}],
|
||||
fdroidserver.common.read_config()['serverwebroot'],
|
||||
)
|
||||
|
||||
def test_config_serverwebroot_list(self):
|
||||
os.chdir(self.testdir)
|
||||
Path('config.yml').write_text("""serverwebroot:\n - foo@example.com:/var/www""")
|
||||
self.assertEqual(
|
||||
[{'url': 'foo@example.com:/var/www/'}],
|
||||
fdroidserver.common.read_config()['serverwebroot'],
|
||||
)
|
||||
|
||||
def test_config_serverwebroot_dict(self):
|
||||
os.chdir(self.testdir)
|
||||
Path('config.yml').write_text("""serverwebroot:\n - url: 'foo@example.com:/var/www'""")
|
||||
self.assertEqual(
|
||||
[{'url': 'foo@example.com:/var/www/'}],
|
||||
fdroidserver.common.read_config()['serverwebroot'],
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
os.chdir(os.path.dirname(__file__))
|
||||
|
@ -32,29 +32,76 @@ class DeployTest(unittest.TestCase):
|
||||
self._td = mkdtemp()
|
||||
self.testdir = self._td.name
|
||||
|
||||
fdroidserver.deploy.options = mock.Mock()
|
||||
fdroidserver.deploy.config = {}
|
||||
|
||||
def tearDown(self):
|
||||
self._td.cleanup()
|
||||
|
||||
def test_update_serverwebroots_bad_None(self):
|
||||
with self.assertRaises(TypeError):
|
||||
fdroidserver.deploy.update_serverwebroots(None, 'repo')
|
||||
|
||||
def test_update_serverwebroots_bad_int(self):
|
||||
with self.assertRaises(TypeError):
|
||||
fdroidserver.deploy.update_serverwebroots(9, 'repo')
|
||||
|
||||
def test_update_serverwebroots_bad_float(self):
|
||||
with self.assertRaises(TypeError):
|
||||
fdroidserver.deploy.update_serverwebroots(1.0, 'repo')
|
||||
|
||||
def test_update_serverwebroots(self):
|
||||
"""rsync works with file paths, so this test uses paths for the URLs"""
|
||||
os.chdir(self.testdir)
|
||||
repo = Path('repo')
|
||||
repo.mkdir()
|
||||
fake_apk = repo / 'fake.apk'
|
||||
with fake_apk.open('w') as fp:
|
||||
fp.write('not an APK, but has the right filename')
|
||||
url0 = Path('url0/fdroid')
|
||||
url0.mkdir(parents=True)
|
||||
url1 = Path('url1/fdroid')
|
||||
url1.mkdir(parents=True)
|
||||
|
||||
dest_apk0 = url0 / fake_apk
|
||||
dest_apk1 = url1 / fake_apk
|
||||
self.assertFalse(dest_apk0.is_file())
|
||||
self.assertFalse(dest_apk1.is_file())
|
||||
fdroidserver.deploy.update_serverwebroots(
|
||||
[
|
||||
{'url': str(url0)},
|
||||
{'url': str(url1)},
|
||||
],
|
||||
str(repo),
|
||||
)
|
||||
self.assertTrue(dest_apk0.is_file())
|
||||
self.assertTrue(dest_apk1.is_file())
|
||||
|
||||
def test_update_serverwebroots_url_does_not_end_with_fdroid(self):
|
||||
with self.assertRaises(SystemExit):
|
||||
fdroidserver.deploy.update_serverwebroots([{'url': 'url'}], 'repo')
|
||||
|
||||
def test_update_serverwebroots_bad_ssh_url(self):
|
||||
with self.assertRaises(SystemExit):
|
||||
fdroidserver.deploy.update_serverwebroots(
|
||||
[{'url': 'f@b.ar::/path/to/fdroid'}], 'repo'
|
||||
)
|
||||
|
||||
def test_update_serverwebroots_unsupported_ssh_url(self):
|
||||
with self.assertRaises(SystemExit):
|
||||
fdroidserver.deploy.update_serverwebroots([{'url': 'ssh://nope'}], 'repo')
|
||||
|
||||
def test_update_serverwebroot(self):
|
||||
"""rsync works with file paths, so this test uses paths for the URLs"""
|
||||
os.chdir(self.testdir)
|
||||
repo = Path('repo')
|
||||
repo.mkdir(parents=True)
|
||||
fake_apk = repo / 'fake.apk'
|
||||
with fake_apk.open('w') as fp:
|
||||
fp.write('not an APK, but has the right filename')
|
||||
serverwebroot = Path('serverwebroot')
|
||||
serverwebroot.mkdir()
|
||||
url = Path('url')
|
||||
url.mkdir()
|
||||
|
||||
# setup parameters for this test run
|
||||
fdroidserver.deploy.options.identity_file = None
|
||||
fdroidserver.deploy.config['make_current_version_link'] = False
|
||||
|
||||
dest_apk = Path(serverwebroot) / fake_apk
|
||||
dest_apk = url / fake_apk
|
||||
self.assertFalse(dest_apk.is_file())
|
||||
fdroidserver.deploy.update_serverwebroot(str(serverwebroot), 'repo')
|
||||
fdroidserver.deploy.update_serverwebroot({'url': str(url)}, 'repo')
|
||||
self.assertTrue(dest_apk.is_file())
|
||||
|
||||
@mock.patch.dict(os.environ, clear=True)
|
||||
@ -66,13 +113,13 @@ class DeployTest(unittest.TestCase):
|
||||
|
||||
def test_update_serverwebroot_make_cur_version_link(self):
|
||||
# setup parameters for this test run
|
||||
fdroidserver.deploy.options = mock.Mock()
|
||||
fdroidserver.deploy.options.no_checksum = True
|
||||
fdroidserver.deploy.options.identity_file = None
|
||||
fdroidserver.deploy.options.verbose = False
|
||||
fdroidserver.deploy.options.quiet = True
|
||||
fdroidserver.deploy.options.identity_file = None
|
||||
fdroidserver.deploy.config['make_current_version_link'] = True
|
||||
serverwebroot = "example.com:/var/www/fdroid"
|
||||
fdroidserver.deploy.config = {'make_current_version_link': True}
|
||||
url = "example.com:/var/www/fdroid"
|
||||
repo_section = 'repo'
|
||||
|
||||
# setup function for asserting subprocess.call invocations
|
||||
@ -123,7 +170,7 @@ class DeployTest(unittest.TestCase):
|
||||
'--safe-links',
|
||||
'--quiet',
|
||||
'repo',
|
||||
serverwebroot,
|
||||
url,
|
||||
],
|
||||
)
|
||||
elif call_iteration == 2:
|
||||
@ -152,18 +199,19 @@ class DeployTest(unittest.TestCase):
|
||||
os.symlink('repo/com.example.sym.apk.asc', 'Sym.apk.asc')
|
||||
os.symlink('repo/com.example.sym.apk.sig', 'Sym.apk.sig')
|
||||
with mock.patch('subprocess.call', side_effect=update_server_webroot_call):
|
||||
fdroidserver.deploy.update_serverwebroot(serverwebroot, repo_section)
|
||||
fdroidserver.deploy.update_serverwebroot({'url': url}, repo_section)
|
||||
self.assertEqual(call_iteration, 3, 'expected 3 invocations of subprocess.call')
|
||||
|
||||
def test_update_serverwebroot_with_id_file(self):
|
||||
# setup parameters for this test run
|
||||
fdroidserver.deploy.options.no_chcksum = False
|
||||
fdroidserver.deploy.options = mock.Mock()
|
||||
fdroidserver.deploy.options.identity_file = None
|
||||
fdroidserver.deploy.options.no_checksum = True
|
||||
fdroidserver.deploy.options.verbose = True
|
||||
fdroidserver.deploy.options.quiet = False
|
||||
fdroidserver.deploy.options.identity_file = None
|
||||
fdroidserver.deploy.config['identity_file'] = './id_rsa'
|
||||
fdroidserver.deploy.config['make_current_version_link'] = False
|
||||
serverwebroot = "example.com:/var/www/fdroid"
|
||||
fdroidserver.deploy.config = {'identity_file': './id_rsa'}
|
||||
url = "example.com:/var/www/fdroid"
|
||||
repo_section = 'archive'
|
||||
|
||||
# setup function for asserting subprocess.call invocations
|
||||
@ -204,7 +252,7 @@ class DeployTest(unittest.TestCase):
|
||||
'--exclude',
|
||||
'archive/index.xml',
|
||||
'archive',
|
||||
serverwebroot,
|
||||
url,
|
||||
],
|
||||
)
|
||||
elif call_iteration == 1:
|
||||
@ -220,7 +268,7 @@ class DeployTest(unittest.TestCase):
|
||||
'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i '
|
||||
+ fdroidserver.deploy.config['identity_file'],
|
||||
'archive',
|
||||
serverwebroot,
|
||||
url,
|
||||
],
|
||||
)
|
||||
else:
|
||||
@ -229,7 +277,7 @@ class DeployTest(unittest.TestCase):
|
||||
return 0
|
||||
|
||||
with mock.patch('subprocess.call', side_effect=update_server_webroot_call):
|
||||
fdroidserver.deploy.update_serverwebroot(serverwebroot, repo_section)
|
||||
fdroidserver.deploy.update_serverwebroot({'url': url}, repo_section)
|
||||
self.assertEqual(call_iteration, 2, 'expected 2 invocations of subprocess.call')
|
||||
|
||||
@unittest.skipIf(
|
||||
|
@ -65,7 +65,7 @@ if not os.path.isdir('metadata'):
|
||||
sys.exit(1)
|
||||
|
||||
repo = git.Repo(localmodule)
|
||||
savedir = os.path.join('metadata', 'dump_' + repo.git.describe())
|
||||
savedir = os.path.join('metadata', 'dump_' + repo.git.rev_parse('HEAD'))
|
||||
if not os.path.isdir(savedir):
|
||||
os.mkdir(savedir)
|
||||
|
||||
|
47
tests/get-country-region-data.py
Executable file
47
tests/get-country-region-data.py
Executable file
@ -0,0 +1,47 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# This generates a list of ISO_3166-1 alpha 2 country codes for use in lint.
|
||||
|
||||
import collections
|
||||
import os
|
||||
import re
|
||||
import requests
|
||||
import requests_cache
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
|
||||
def main():
|
||||
# we want all the data
|
||||
url = 'https://api.worldbank.org/v2/country?format=json&per_page=500'
|
||||
r = requests.get(url, timeout=30)
|
||||
data = r.json()
|
||||
if data[0]['pages'] != 1:
|
||||
print(
|
||||
'ERROR: %d pages in data, this script only reads one page!'
|
||||
% data[0]['pages']
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
iso2Codes = set()
|
||||
ISO3166_1_alpha_2_codes = set()
|
||||
names = dict()
|
||||
regions = collections.defaultdict(set)
|
||||
for country in data[1]:
|
||||
iso2Code = country['iso2Code']
|
||||
iso2Codes.add(iso2Code)
|
||||
if country['region']['value'] == 'Aggregates':
|
||||
continue
|
||||
if re.match(r'[A-Z][A-Z]', iso2Code):
|
||||
ISO3166_1_alpha_2_codes.add(iso2Code)
|
||||
names[iso2Code] = country['name']
|
||||
regions[country['region']['value']].add(country['name'])
|
||||
for code in sorted(ISO3166_1_alpha_2_codes):
|
||||
print(f" '{code}', # " + names[code])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
requests_cache.install_cache(
|
||||
os.path.join(tempfile.gettempdir(), os.path.basename(__file__) + '.cache')
|
||||
)
|
||||
main()
|
@ -5,6 +5,7 @@
|
||||
import logging
|
||||
import optparse
|
||||
import os
|
||||
import ruamel.yaml
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
@ -368,6 +369,75 @@ class LintTest(unittest.TestCase):
|
||||
app = fdroidserver.metadata.App({'Categories': ['bar']})
|
||||
self.assertEqual(0, len(list(fdroidserver.lint.check_categories(app))))
|
||||
|
||||
def test_lint_config_basic_mirrors_yml(self):
|
||||
os.chdir(self.testdir)
|
||||
yaml = ruamel.yaml.YAML(typ='safe')
|
||||
with Path('mirrors.yml').open('w') as fp:
|
||||
yaml.dump([{'url': 'https://example.com/fdroid/repo'}], fp)
|
||||
self.assertTrue(fdroidserver.lint.lint_config('mirrors.yml'))
|
||||
|
||||
def test_lint_config_mirrors_yml_kenya_countryCode(self):
|
||||
os.chdir(self.testdir)
|
||||
yaml = ruamel.yaml.YAML(typ='safe')
|
||||
with Path('mirrors.yml').open('w') as fp:
|
||||
yaml.dump([{'url': 'https://foo.com/fdroid/repo', 'countryCode': 'KE'}], fp)
|
||||
self.assertTrue(fdroidserver.lint.lint_config('mirrors.yml'))
|
||||
|
||||
def test_lint_config_mirrors_yml_invalid_countryCode(self):
|
||||
"""WV is "indeterminately reserved" so it should never be used."""
|
||||
os.chdir(self.testdir)
|
||||
yaml = ruamel.yaml.YAML(typ='safe')
|
||||
with Path('mirrors.yml').open('w') as fp:
|
||||
yaml.dump([{'url': 'https://foo.com/fdroid/repo', 'countryCode': 'WV'}], fp)
|
||||
self.assertFalse(fdroidserver.lint.lint_config('mirrors.yml'))
|
||||
|
||||
def test_lint_config_mirrors_yml_alpha3_countryCode(self):
|
||||
"""Only ISO 3166-1 alpha 2 are supported"""
|
||||
os.chdir(self.testdir)
|
||||
yaml = ruamel.yaml.YAML(typ='safe')
|
||||
with Path('mirrors.yml').open('w') as fp:
|
||||
yaml.dump([{'url': 'https://de.com/fdroid/repo', 'countryCode': 'DEU'}], fp)
|
||||
self.assertFalse(fdroidserver.lint.lint_config('mirrors.yml'))
|
||||
|
||||
def test_lint_config_mirrors_yml_one_invalid_countryCode(self):
|
||||
"""WV is "indeterminately reserved" so it should never be used."""
|
||||
os.chdir(self.testdir)
|
||||
yaml = ruamel.yaml.YAML(typ='safe')
|
||||
with Path('mirrors.yml').open('w') as fp:
|
||||
yaml.dump(
|
||||
[
|
||||
{'url': 'https://bar.com/fdroid/repo', 'countryCode': 'BA'},
|
||||
{'url': 'https://foo.com/fdroid/repo', 'countryCode': 'FO'},
|
||||
{'url': 'https://wv.com/fdroid/repo', 'countryCode': 'WV'},
|
||||
],
|
||||
fp,
|
||||
)
|
||||
self.assertFalse(fdroidserver.lint.lint_config('mirrors.yml'))
|
||||
|
||||
def test_lint_config_bad_mirrors_yml_dict(self):
|
||||
os.chdir(self.testdir)
|
||||
Path('mirrors.yml').write_text('baz: [foo, bar]\n')
|
||||
with self.assertRaises(TypeError):
|
||||
fdroidserver.lint.lint_config('mirrors.yml')
|
||||
|
||||
def test_lint_config_bad_mirrors_yml_float(self):
|
||||
os.chdir(self.testdir)
|
||||
Path('mirrors.yml').write_text('1.0\n')
|
||||
with self.assertRaises(TypeError):
|
||||
fdroidserver.lint.lint_config('mirrors.yml')
|
||||
|
||||
def test_lint_config_bad_mirrors_yml_int(self):
|
||||
os.chdir(self.testdir)
|
||||
Path('mirrors.yml').write_text('1\n')
|
||||
with self.assertRaises(TypeError):
|
||||
fdroidserver.lint.lint_config('mirrors.yml')
|
||||
|
||||
def test_lint_config_bad_mirrors_yml_str(self):
|
||||
os.chdir(self.testdir)
|
||||
Path('mirrors.yml').write_text('foo\n')
|
||||
with self.assertRaises(TypeError):
|
||||
fdroidserver.lint.lint_config('mirrors.yml')
|
||||
|
||||
|
||||
class LintAntiFeaturesTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
|
@ -167,12 +167,21 @@ class UpdateTest(unittest.TestCase):
|
||||
fdroidserver.update.insert_localized_app_metadata(apps)
|
||||
|
||||
appdir = os.path.join('repo', 'info.guardianproject.urzip', 'en-US')
|
||||
self.assertTrue(os.path.isfile(os.path.join(
|
||||
self.assertTrue(
|
||||
os.path.isfile(
|
||||
os.path.join(
|
||||
appdir, 'icon_NJXNzMcyf-v9i5a1ElJi0j9X1LvllibCa48xXYPlOqQ=.png'
|
||||
)
|
||||
)
|
||||
)
|
||||
self.assertTrue(
|
||||
os.path.isfile(
|
||||
os.path.join(
|
||||
appdir,
|
||||
'icon_NJXNzMcyf-v9i5a1ElJi0j9X1LvllibCa48xXYPlOqQ=.png')))
|
||||
self.assertTrue(os.path.isfile(os.path.join(
|
||||
appdir,
|
||||
'featureGraphic_GFRT5BovZsENGpJq1HqPODGWBRPWQsx25B95Ol5w_wU=.png')))
|
||||
'featureGraphic_GFRT5BovZsENGpJq1HqPODGWBRPWQsx25B95Ol5w_wU=.png',
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
self.assertEqual(6, len(apps))
|
||||
for packageName, app in apps.items():
|
||||
@ -1894,7 +1903,10 @@ class UpdateTest(unittest.TestCase):
|
||||
with open('repo/index-v2.json') as fp:
|
||||
index = json.load(fp)
|
||||
self.assertEqual(
|
||||
{'System': {'name': {'en-US': 'System Apps'}}, 'Time': {'name': {'en-US': 'Time'}}},
|
||||
{
|
||||
'System': {'name': {'en-US': 'System Apps'}},
|
||||
'Time': {'name': {'en-US': 'Time'}},
|
||||
},
|
||||
index['repo'][CATEGORIES_CONFIG_NAME],
|
||||
)
|
||||
|
||||
@ -1922,6 +1934,111 @@ class UpdateTest(unittest.TestCase):
|
||||
index['repo'][CATEGORIES_CONFIG_NAME],
|
||||
)
|
||||
|
||||
def test_parse_ipa(self):
|
||||
try:
|
||||
import biplist # Fedora does not have a biplist package
|
||||
|
||||
biplist # silence the linters
|
||||
except ImportError as e:
|
||||
self.skipTest(str(e))
|
||||
ipa_path = os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)),
|
||||
'com.fake.IpaApp_1000000000001.ipa',
|
||||
)
|
||||
result = fdroidserver.update.parse_ipa(ipa_path, 'fake_size', 'fake_sha')
|
||||
self.maxDiff = None
|
||||
self.assertDictEqual(
|
||||
result,
|
||||
{
|
||||
'apkName': 'com.fake.IpaApp_1000000000001.ipa',
|
||||
'hash': 'fake_sha',
|
||||
'hashType': 'sha256',
|
||||
'packageName': 'org.onionshare.OnionShare',
|
||||
'size': 'fake_size',
|
||||
'versionCode': 1000000000001,
|
||||
'versionName': '1.0.1',
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class TestUpdateVersionStringToInt(unittest.TestCase):
|
||||
def test_version_string_to_int(self):
|
||||
self.assertEqual(
|
||||
fdroidserver.update.version_string_to_int("1.2.3"), 1000002000003
|
||||
)
|
||||
self.assertEqual(fdroidserver.update.version_string_to_int("0.0.0003"), 3)
|
||||
self.assertEqual(fdroidserver.update.version_string_to_int("0.0.0"), 0)
|
||||
self.assertEqual(
|
||||
fdroidserver.update.version_string_to_int("4321.321.21"), 4321000321000021
|
||||
)
|
||||
self.assertEqual(
|
||||
fdroidserver.update.version_string_to_int("18446744.073709.551615"),
|
||||
18446744073709551615,
|
||||
)
|
||||
|
||||
def test_version_string_to_int_value_errors(self):
|
||||
with self.assertRaises(ValueError):
|
||||
fdroidserver.update.version_string_to_int("1.2.3a")
|
||||
with self.assertRaises(ValueError):
|
||||
fdroidserver.update.version_string_to_int("asdfasdf")
|
||||
with self.assertRaises(ValueError):
|
||||
fdroidserver.update.version_string_to_int("1.2.-3")
|
||||
with self.assertRaises(ValueError):
|
||||
fdroidserver.update.version_string_to_int("-1.2.-3")
|
||||
with self.assertRaises(ValueError):
|
||||
fdroidserver.update.version_string_to_int("0.0.0x3")
|
||||
|
||||
|
||||
class TestScanRepoForIpas(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.maxDiff = None
|
||||
|
||||
def test_scan_repo_for_ipas_no_cache(self):
|
||||
self.maxDiff = None
|
||||
with tempfile.TemporaryDirectory() as tmpdir, TmpCwd(tmpdir):
|
||||
os.mkdir("repo")
|
||||
with open('repo/abc.Def_123.ipa', 'w') as f:
|
||||
f.write('abc')
|
||||
with open('repo/xyz.XXX_123.ipa', 'w') as f:
|
||||
f.write('xyz')
|
||||
|
||||
apkcache = mock.MagicMock()
|
||||
# apkcache['a'] = 1
|
||||
repodir = "repo"
|
||||
knownapks = mock.MagicMock()
|
||||
|
||||
def mocked_parse(p, s, c):
|
||||
# pylint: disable=unused-argument
|
||||
return {'packageName': 'abc' if 'abc' in p else 'xyz'}
|
||||
|
||||
with mock.patch('fdroidserver.update.parse_ipa', mocked_parse):
|
||||
ipas, checkchanged = fdroidserver.update.scan_repo_for_ipas(
|
||||
apkcache, repodir, knownapks
|
||||
)
|
||||
|
||||
self.assertEqual(checkchanged, True)
|
||||
self.assertEqual(len(ipas), 2)
|
||||
package_names_in_ipas = [x['packageName'] for x in ipas]
|
||||
self.assertTrue('abc' in package_names_in_ipas)
|
||||
self.assertTrue('xyz' in package_names_in_ipas)
|
||||
|
||||
apkcache_setter_package_name = [
|
||||
x.args[1]['packageName'] for x in apkcache.__setitem__.mock_calls
|
||||
]
|
||||
self.assertTrue('abc' in apkcache_setter_package_name)
|
||||
self.assertTrue('xyz' in apkcache_setter_package_name)
|
||||
self.assertEqual(apkcache.__setitem__.call_count, 2)
|
||||
|
||||
knownapks.recordapk.call_count = 2
|
||||
self.assertTrue(
|
||||
unittest.mock.call('abc.Def_123.ipa', 'abc')
|
||||
in knownapks.recordapk.mock_calls
|
||||
)
|
||||
self.assertTrue(
|
||||
unittest.mock.call('xyz.XXX_123.ipa', 'xyz')
|
||||
in knownapks.recordapk.mock_calls
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
os.chdir(os.path.dirname(__file__))
|
||||
@ -1938,4 +2055,6 @@ if __name__ == "__main__":
|
||||
|
||||
newSuite = unittest.TestSuite()
|
||||
newSuite.addTest(unittest.makeSuite(UpdateTest))
|
||||
newSuite.addTest(unittest.makeSuite(TestUpdateVersionStringToInt))
|
||||
newSuite.addTest(unittest.makeSuite(TestScanRepoForIpas))
|
||||
unittest.main(failfast=False)
|
||||
|
Loading…
Reference in New Issue
Block a user