1
0
mirror of https://gitlab.com/fdroid/fdroidserver.git synced 2024-10-02 09:10:11 +02:00

rewrite docstrings to match numpy style guide

This commit is contained in:
Benedikt Brückmann 2021-06-07 12:26:57 +02:00
parent d168b9c05b
commit 1e943a22df
22 changed files with 559 additions and 396 deletions

View File

@ -425,6 +425,7 @@ Build documentation:
image: python:3.9-buster
script:
- pip install -e .[docs]
- pydocstyle fdroidserver
- cd docs
- sphinx-apidoc -o ./source ../fdroidserver -M -e
- sphinx-autogen -o generated source/*.rst

View File

@ -69,9 +69,13 @@ def print_help(available_plugins=None):
def preparse_plugin(module_name, module_dir):
"""simple regex based parsing for plugin scripts,
so we don't have to import them when we just need the summary,
but not plan on executing this particular plugin."""
"""No summary.
Simple regex based parsing for plugin scripts.
So we don't have to import them when we just need the summary,
but not plan on executing this particular plugin.
"""
if '.' in module_name:
raise ValueError("No '.' allowed in fdroid plugin modules: '{}'"
.format(module_name))

View File

@ -13,8 +13,7 @@
#
# -- ; }}}1
"""
copy/extract/patch apk signatures
"""Copy/extract/patch apk signatures.
apksigcopier is a tool for copying APK signatures from a signed APK to an
unsigned one (in order to verify reproducible builds).
@ -129,8 +128,7 @@ class APKZipInfo(ReproducibleZipInfo):
def noautoyes(value):
"""
Turns False into NO, None into AUTO, and True into YES.
"""Turn False into NO, None into AUTO, and True into YES.
>>> from apksigcopier import noautoyes, NO, AUTO, YES
>>> noautoyes(False) == NO == noautoyes(NO)
@ -152,7 +150,8 @@ def noautoyes(value):
def is_meta(filename):
"""
"""No summary.
Returns whether filename is a v1 (JAR) signature file (.SF), signature block
file (.RSA, .DSA, or .EC), or manifest (MANIFEST.MF).
@ -162,7 +161,7 @@ def is_meta(filename):
def exclude_from_copying(filename):
"""fdroidserver always wants JAR Signature files to be excluded"""
"""Fdroidserver always wants JAR Signature files to be excluded."""
return is_meta(filename)
@ -198,17 +197,17 @@ def exclude_from_copying(filename):
# FIXME: makes certain assumptions and doesn't handle all valid ZIP files!
def copy_apk(unsigned_apk, output_apk):
"""
Copy APK like apksigner would, excluding files matched by
exclude_from_copying().
Returns max date_time.
"""Copy APK like apksigner would, excluding files matched by exclude_from_copying().
The following global variables (which default to False), can be set to
override the default behaviour:
* set exclude_all_meta=True to exclude all metadata files
* set copy_extra_bytes=True to copy extra bytes after data (e.g. a v2 sig)
Returns
-------
max date_time.
"""
with zipfile.ZipFile(unsigned_apk, "r") as zf:
infos = zf.infolist()
@ -410,9 +409,10 @@ def patch_v2_sig(extracted_v2_sig, output_apk):
def patch_apk(extracted_meta, extracted_v2_sig, unsigned_apk, output_apk):
"""
Patch extracted_meta + extracted_v2_sig (if not None) onto unsigned_apk and
save as output_apk.
"""Patch extracted_meta + extracted_v2_sig.
Patches extracted_meta + extracted_v2_sig (if not None)
onto unsigned_apk and save as output_apk.
"""
date_time = copy_apk(unsigned_apk, output_apk)
patch_meta(extracted_meta, output_apk, date_time=date_time)
@ -421,8 +421,7 @@ def patch_apk(extracted_meta, extracted_v2_sig, unsigned_apk, output_apk):
def do_extract(signed_apk, output_dir, v1_only=NO):
"""
Extract signatures from signed_apk and save in output_dir.
"""Extract signatures from signed_apk and save in output_dir.
The v1_only parameter controls whether the absence of a v1 signature is
considered an error or not:
@ -457,8 +456,7 @@ def do_extract(signed_apk, output_dir, v1_only=NO):
def do_patch(metadata_dir, unsigned_apk, output_apk, v1_only=NO):
"""
Patch signatures from metadata_dir onto unsigned_apk and save as output_apk.
"""Patch signatures from metadata_dir onto unsigned_apk and save as output_apk.
The v1_only parameter controls whether the absence of a v1 signature is
considered an error or not:
@ -498,8 +496,7 @@ def do_patch(metadata_dir, unsigned_apk, output_apk, v1_only=NO):
def do_copy(signed_apk, unsigned_apk, output_apk, v1_only=NO):
"""
Copy signatures from signed_apk onto unsigned_apk and save as output_apk.
"""Copy signatures from signed_apk onto unsigned_apk and save as output_apk.
The v1_only parameter controls whether the absence of a v1 signature is
considered an error or not:

View File

@ -1,9 +1,8 @@
"""
"""Simple thread based asynchronous file reader for Python.
AsynchronousFileReader
======================
Simple thread based asynchronous file reader for Python.
see https://github.com/soxofaan/asynchronousfilereader
MIT License
@ -22,10 +21,9 @@ except ImportError:
class AsynchronousFileReader(threading.Thread):
"""
Helper class to implement asynchronous reading of a file
in a separate thread. Pushes read lines on a queue to
be consumed in another thread.
"""Helper class to implement asynchronous reading of a file in a separate thread.
Pushes read lines on a queue to be consumed in another thread.
"""
def __init__(self, fd, queue=None, autostart=True):
@ -40,9 +38,7 @@ class AsynchronousFileReader(threading.Thread):
self.start()
def run(self):
"""
The body of the tread: read lines and put them on the queue.
"""
"""Read lines and put them on the queue (the body of the tread)."""
while True:
line = self._fd.readline()
if not line:
@ -50,15 +46,11 @@ class AsynchronousFileReader(threading.Thread):
self.queue.put(line)
def eof(self):
"""
Check whether there is no more content to expect.
"""
"""Check whether there is no more content to expect."""
return not self.is_alive() and self.queue.empty()
def readlines(self):
"""
Get currently available lines.
"""
"""Get currently available lines."""
while not self.queue.empty():
yield self.queue.get()

View File

@ -67,7 +67,6 @@ def build_server(app, build, vcs, build_dir, output_dir, log_dir, force):
target folder for the build result
force
"""
global buildserverid
try:
@ -325,7 +324,7 @@ def force_gradle_build_tools(build_dir, build_tools):
def transform_first_char(string, method):
"""Uses method() on the first character of string."""
"""Use method() on the first character of string."""
if len(string) == 0:
return string
if len(string) == 1:
@ -338,11 +337,10 @@ def add_failed_builds_entry(failed_builds, appid, build, entry):
def get_metadata_from_apk(app, build, apkfile):
"""get the required metadata from the built APK
"""Get the required metadata from the built APK.
versionName is allowed to be a blank string, i.e. ''
VersionName is allowed to be a blank string, i.e. ''
"""
appid, versionCode, versionName = common.get_apk_id(apkfile)
native_code = common.get_native_code(apkfile)
@ -833,8 +831,7 @@ def build_local(app, build, vcs, build_dir, output_dir, log_dir, srclib_dir, ext
def trybuild(app, build, build_dir, output_dir, log_dir, also_check_dir,
srclib_dir, extlib_dir, tmp_dir, repo_dir, vcs, test,
server, force, onserver, refresh):
"""
Build a particular version of an application, if it needs building.
"""Build a particular version of an application, if it needs building.
Parameters
----------
@ -857,7 +854,6 @@ def trybuild(app, build, build_dir, output_dir, log_dir, also_check_dir,
Boolean
True if the build was done, False if it wasn't necessary.
"""
dest_file = common.get_release_filename(app, build)
dest = os.path.join(output_dir, dest_file)
@ -890,7 +886,7 @@ def trybuild(app, build, build_dir, output_dir, log_dir, also_check_dir,
def force_halt_build(timeout):
"""Halt the currently running Vagrant VM, to be called from a Timer"""
"""Halt the currently running Vagrant VM, to be called from a Timer."""
logging.error(_('Force halting build after {0} sec timeout!').format(timeout))
timeout_event.set()
vm = vmtools.get_build_vm('builder')
@ -898,8 +894,13 @@ def force_halt_build(timeout):
def parse_commandline():
"""Parse the command line. Returns options, parser."""
"""Parse the command line.
Returns
-------
options
parser
"""
parser = argparse.ArgumentParser(usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]")
common.setup_global_opts(parser)
parser.add_argument("appid", nargs='*', help=_("application ID with optional versionCode in the form APPID[:VERCODE]"))

View File

@ -363,6 +363,7 @@ def check_gplay(app):
def try_init_submodules(app, last_build, vcs):
"""Try to init submodules if the last build entry used them.
They might have been removed from the app's repo in the meantime,
so if we can't find any submodules we continue with the updates check.
If there is any other error in initializing them then we stop the check.
@ -589,8 +590,7 @@ def checkupdates_app(app):
def status_update_json(processed, failed):
"""Output a JSON file with metadata about this run"""
"""Output a JSON file with metadata about this run."""
logging.debug(_('Outputting JSON'))
output = common.setup_status_output(start_timestamp)
if processed:

View File

@ -215,7 +215,7 @@ def _add_java_paths_to_config(pathlist, thisconfig):
def fill_config_defaults(thisconfig):
"""Fill in the global config dict with relevant defaults
"""Fill in the global config dict with relevant defaults.
For config values that have a path that can be expanded, e.g. an
env var or a ~/, this will store the original value using "_orig"
@ -480,7 +480,6 @@ def parse_human_readable_size(size):
def assert_config_keystore(config):
"""Check weather keystore is configured correctly and raise exception if not."""
nosigningkey = False
if 'repo_keyalias' not in config:
nosigningkey = True
@ -507,7 +506,7 @@ def assert_config_keystore(config):
def find_apksigner(config):
"""Searches for the best version apksigner and adds it to the config.
"""Search for the best version apksigner and adds it to the config.
Returns the best version of apksigner following this algorithm:
@ -643,7 +642,8 @@ def get_local_metadata_files():
def read_pkg_args(appid_versionCode_pairs, allow_vercodes=False):
"""
"""No summary.
Parameters
----------
appids
@ -780,8 +780,7 @@ apk_release_filename_with_sigfp = re.compile(r'(?P<appid>[a-zA-Z0-9_\.]+)_(?P<ve
def apk_parse_release_filename(apkname):
"""Parses the name of an APK file according the F-Droids APK naming
scheme and returns the tokens.
"""Parse the name of an APK file according the F-Droids APK naming scheme.
WARNING: Returned values don't necessarily represent the APKs actual
properties, the are just paresed from the file name.
@ -823,7 +822,6 @@ def getsrcname(app, build):
def get_build_dir(app):
"""Get the dir that this app will be built in."""
if app.RepoType == 'srclib':
return Path('build/srclib') / app.Repo
@ -973,7 +971,9 @@ class vcs:
return None
def gotorevision(self, rev, refresh=True):
"""Take the local repository to a clean version of the given
"""Take the local repository to a clean version of the given revision.
Take the local repository to a clean version of the given
revision, which is specificed in the VCS's native
format. Beforehand, the repository can be dirty, or even
non-existent. If the repository does already exist locally, it
@ -1030,7 +1030,9 @@ class vcs:
raise exc
def gotorevisionx(self, rev): # pylint: disable=unused-argument
"""Derived classes need to implement this.
"""No summary.
Derived classes need to implement this.
It's called once basic checking has been performed.
"""
@ -1059,7 +1061,7 @@ class vcs:
raise VCSException('getref not supported for this vcs type')
def getsrclib(self):
"""Returns the srclib (name, path) used in setting up the current revision, or None."""
"""Return the srclib (name, path) used in setting up the current revision, or None."""
return self.srclib
@ -1106,7 +1108,9 @@ class vcs_git(vcs):
envs=envs, cwd=cwd, output=output)
def checkrepo(self):
"""If the local directory exists, but is somehow not a git repository,
"""No summary.
If the local directory exists, but is somehow not a git repository,
git will traverse up the directory tree until it finds one
that is (i.e. fdroidserver) and then we'll proceed to destroy
it! This is called as a safety check.
@ -1251,7 +1255,9 @@ class vcs_gitsvn(vcs):
return ['git', 'svn', '--version']
def checkrepo(self):
"""If the local directory exists, but is somehow not a git repository,
"""No summary.
If the local directory exists, but is somehow not a git repository,
git will traverse up the directory tree until it finds one that
is (i.e. fdroidserver) and then we'll proceed to destory it!
This is called as a safety check.
@ -1470,7 +1476,7 @@ class vcs_bzr(vcs):
return ['bzr', '--version']
def bzr(self, args, envs=dict(), cwd=None, output=True):
'''Prevent bzr from ever using SSH to avoid security vulns'''
"""Prevent bzr from ever using SSH to avoid security vulns."""
envs.update({
'BZR_SSH': 'false',
})
@ -1555,7 +1561,6 @@ def retrieve_string_singleline(app_dir, string, xmlfiles=None):
def manifest_paths(app_dir, flavours):
"""Return list of existing files that will be used to find the highest vercode."""
# TODO: Remove this in Python3.6
app_dir = str(app_dir)
possible_manifests = \
@ -1653,8 +1658,8 @@ def app_matches_packagename(app, package):
def parse_androidmanifests(paths, app):
"""
Extract some information from the AndroidManifest.xml at the given path.
"""Extract some information from the AndroidManifest.xml at the given path.
Returns (version, vercode, package), any or all of which might be None.
All values returned are strings.
@ -1662,7 +1667,6 @@ def parse_androidmanifests(paths, app):
this code assumes the files use UTF-8.
https://sites.google.com/a/android.com/tools/knownissues/encoding
"""
ignoreversions = app.UpdateCheckIgnore
ignoresearch = re.compile(ignoreversions).search if ignoreversions else None
@ -1886,7 +1890,7 @@ def get_all_gradle_and_manifests(build_dir):
def get_gradle_subdir(build_dir, paths):
"""get the subdir where the gradle build is based"""
"""Get the subdir where the gradle build is based."""
first_gradle_dir = None
for path in paths:
if not first_gradle_dir:
@ -2123,7 +2127,7 @@ gradle_version_regex = re.compile(r"[^/]*'com\.android\.tools\.build:gradle:([^\
def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=False, refresh=True):
""" Prepare the source code for a particular build.
"""Prepare the source code for a particular build.
Parameters
----------
@ -2411,7 +2415,7 @@ def natural_key(s):
def check_system_clock(dt_obj, path):
"""Check if system clock is updated based on provided date
"""Check if system clock is updated based on provided date.
If an APK has files newer than the system time, suggest updating
the system clock. This is useful for offline systems, used for
@ -2478,8 +2482,12 @@ class KnownApks:
def recordapk(self, apkName, app, default_date=None):
"""
Record an APK (if it's new, otherwise does nothing)
Returns the date it was added as a datetime instance
Record an APK (if it's new, otherwise does nothing).
Returns
-------
datetime
the date it was added as a datetime instance.
"""
if apkName not in self.apks:
if default_date is None:
@ -2490,8 +2498,9 @@ class KnownApks:
return added
def getapp(self, apkname):
"""Look up information - given the 'apkname', returns (app id, date added/None).
"""Look up information - given the 'apkname'.
Returns (app id, date added/None).
Or returns None for an unknown apk.
"""
if apkname in self.apks:
@ -2499,7 +2508,7 @@ class KnownApks:
return None
def getlatest(self, num):
"""Get the most recent 'num' apps added to the repo, as a list of package ids with the most recent first"""
"""Get the most recent 'num' apps added to the repo, as a list of package ids with the most recent first."""
apps = {}
for apk, app in self.apks.items():
appid, added = app
@ -2523,8 +2532,7 @@ def get_file_extension(filename):
def use_androguard():
"""Report if androguard is available, and config its debug logging"""
"""Report if androguard is available, and config its debug logging."""
try:
import androguard
if use_androguard.show_path:
@ -2556,7 +2564,6 @@ def ensure_final_value(packageName, arsc, value):
Resource ID instead of the actual value. This checks whether
the value is actually a resId, then performs the Android
Resource lookup as needed.
"""
if value:
returnValue = value
@ -2572,7 +2579,7 @@ def ensure_final_value(packageName, arsc, value):
def is_apk_and_debuggable(apkfile):
"""Returns True if the given file is an APK and is debuggable.
"""Return True if the given file is an APK and is debuggable.
Parse only <application android:debuggable=""> from the APK.
@ -2700,8 +2707,10 @@ def get_apk_id_aapt(apkfile):
def get_native_code(apkfile):
"""aapt checks if there are architecture folders under the lib/ folder
so we are simulating the same behaviour"""
"""Aapt checks if there are architecture folders under the lib/ folder.
We are simulating the same behaviour.
"""
arch_re = re.compile("^lib/(.*)/.*$")
archset = set()
with ZipFile(apkfile) as apk:
@ -3110,7 +3119,7 @@ def metadata_get_sigdir(appid, vercode=None):
def metadata_find_developer_signature(appid, vercode=None):
"""Tries to find the developer signature for given appid.
"""Try to find the developer signature for given appid.
This picks the first signature file found in metadata an returns its
signature.
@ -3148,7 +3157,7 @@ def metadata_find_developer_signature(appid, vercode=None):
def metadata_find_signing_files(appid, vercode):
"""Gets a list of signed manifests and signatures.
"""Get a list of signed manifests and signatures.
Parameters
----------
@ -3166,7 +3175,6 @@ def metadata_find_signing_files(appid, vercode):
References
----------
* https://docs.oracle.com/javase/tutorial/deployment/jar/intro.html
* https://source.android.com/security/apksigning/v2
* https://source.android.com/security/apksigning/v3
@ -3219,6 +3227,7 @@ class ClonedZipInfo(zipfile.ZipInfo):
cloning ZipInfo entries. https://bugs.python.org/issue43547
"""
def __init__(self, zinfo):
self.original = zinfo
for k in self.__slots__:
@ -3243,7 +3252,7 @@ def apk_has_v1_signatures(apkfile):
def apk_strip_v1_signatures(signed_apk, strip_manifest=False):
"""Removes signatures from APK.
"""Remove signatures from APK.
Parameters
----------
@ -3283,7 +3292,7 @@ def _zipalign(unsigned_apk, aligned_apk):
def apk_implant_signatures(apkpath, outpath, manifest):
"""Implants a signature from metadata into an APK.
"""Implant a signature from metadata into an APK.
Note: this changes there supplied APK in place. So copy it if you
need the original to be preserved.
@ -3297,7 +3306,6 @@ def apk_implant_signatures(apkpath, outpath, manifest):
References
----------
* https://docs.oracle.com/javase/tutorial/deployment/jar/intro.html
* https://source.android.com/security/apksigning/v2
* https://source.android.com/security/apksigning/v3
@ -3308,7 +3316,7 @@ def apk_implant_signatures(apkpath, outpath, manifest):
def apk_extract_signatures(apkpath, outdir):
"""Extracts a signature files from APK and puts them into target directory.
"""Extract a signature files from APK and puts them into target directory.
Parameters
----------
@ -3319,7 +3327,6 @@ def apk_extract_signatures(apkpath, outdir):
References
----------
* https://docs.oracle.com/javase/tutorial/deployment/jar/intro.html
* https://source.android.com/security/apksigning/v2
* https://source.android.com/security/apksigning/v3
@ -3329,9 +3336,9 @@ def apk_extract_signatures(apkpath, outdir):
def get_min_sdk_version(apk):
"""
This wraps the androguard function to always return and int and fall back to 1
if we can't get a valid minsdk version
"""Wrap the androguard function to always return and int.
Fall back to 1 if we can't get a valid minsdk version.
Parameters
----------
@ -3450,7 +3457,7 @@ def verify_apks(signed_apk, unsigned_apk, tmp_dir, v1_only=None):
def verify_jar_signature(jar):
"""Verifies the signature of a given JAR file.
"""Verify the signature of a given JAR file.
jarsigner is very shitty: unsigned JARs pass as "verified"! So
this has to turn on -strict then check for result 4, since this
@ -3627,8 +3634,9 @@ def compare_apks(apk1, apk2, tmp_dir, log_dir=None):
def set_command_in_config(command):
"""Try to find specified command in the path, if it hasn't been
manually set in config.yml. If found, it is added to the config
"""Try to find specified command in the path, if it hasn't been manually set in config.yml.
If found, it is added to the config
dict. The return value says whether the command is available.
"""
@ -3734,15 +3742,14 @@ def genkeystore(localconfig):
def get_cert_fingerprint(pubkey):
"""Generate a certificate fingerprint the same way keytool does it (but with slightly different formatting).
"""
"""Generate a certificate fingerprint the same way keytool does it (but with slightly different formatting)."""
digest = hashlib.sha256(pubkey).digest()
ret = [' '.join("%02X" % b for b in bytearray(digest))]
return " ".join(ret)
def get_certificate(signature_block_file):
"""Extracts a DER certificate from JAR Signature's "Signature Block File".
"""Extract a DER certificate from JAR Signature's "Signature Block File".
Parameters
----------
@ -4194,7 +4201,7 @@ def run_yamllint(path, indent=0):
def sha256sum(filename):
'''Calculate the sha256 of the given file'''
"""Calculate the sha256 of the given file."""
sha = hashlib.sha256()
with open(filename, 'rb') as f:
while True:
@ -4206,7 +4213,7 @@ def sha256sum(filename):
def sha256base64(filename):
'''Calculate the sha256 of the given file as URL-safe base64'''
"""Calculate the sha256 of the given file as URL-safe base64."""
hasher = hashlib.sha256()
with open(filename, 'rb') as f:
while True:
@ -4218,7 +4225,7 @@ def sha256base64(filename):
def get_ndk_version(ndk_path):
"""Get the version info from the metadata in the NDK package
"""Get the version info from the metadata in the NDK package.
Since r11, the info is nice and easy to find in
sources.properties. Before, there was a kludgey format in
@ -4238,7 +4245,7 @@ def get_ndk_version(ndk_path):
def auto_install_ndk(build):
"""auto-install the NDK in the build, this assumes its in a buildserver guest VM
"""Auto-install the NDK in the build, this assumes its in a buildserver guest VM.
Download, verify, and install the NDK version as specified via the
"ndk:" field in the build entry. As it uncompresses the zipball,
@ -4276,11 +4283,10 @@ def auto_install_ndk(build):
def _install_ndk(ndk):
"""Install specified NDK if it is not already installed
"""Install specified NDK if it is not already installed.
Parameters
----------
ndk
The NDK version to install, either in "release" form (r21e) or
"revision" form (21.4.7075529).

View File

@ -47,14 +47,13 @@ REMOTE_HOSTNAME_REGEX = re.compile(r'\W*\w+\W+(\w+).*')
def update_awsbucket(repo_section):
'''
Upload the contents of the directory `repo_section` (including
subdirectories) to the AWS S3 "bucket". The contents of that subdir of the
"""Upload the contents of the directory `repo_section` (including subdirectories) to the AWS S3 "bucket".
The contents of that subdir of the
bucket will first be deleted.
Requires AWS credentials set in config.yml: awsaccesskeyid, awssecretkey
'''
"""
logging.debug('Syncing "' + repo_section + '" to Amazon S3 bucket "'
+ config['awsbucket'] + '"')
@ -65,7 +64,7 @@ def update_awsbucket(repo_section):
def update_awsbucket_s3cmd(repo_section):
'''upload using the CLI tool s3cmd, which provides rsync-like sync
"""Upload using the CLI tool s3cmd, which provides rsync-like sync.
The upload is done in multiple passes to reduce the chance of
interfering with an existing client-server interaction. In the
@ -74,8 +73,7 @@ def update_awsbucket_s3cmd(repo_section):
the third/last pass, the indexes are uploaded, and any removed
files are deleted from the server. The last pass is the only pass
to use a full MD5 checksum of all files to detect changes.
'''
"""
logging.debug(_('Using s3cmd to sync with: {url}')
.format(url=config['awsbucket']))
@ -142,14 +140,16 @@ def update_awsbucket_s3cmd(repo_section):
def update_awsbucket_libcloud(repo_section):
'''
"""No summary.
Upload the contents of the directory `repo_section` (including
subdirectories) to the AWS S3 "bucket". The contents of that subdir of the
subdirectories) to the AWS S3 "bucket".
The contents of that subdir of the
bucket will first be deleted.
Requires AWS credentials set in config.yml: awsaccesskeyid, awssecretkey
'''
"""
logging.debug(_('using Apache libcloud to sync with {url}')
.format(url=config['awsbucket']))
@ -280,14 +280,14 @@ def update_serverwebroot(serverwebroot, repo_section):
def sync_from_localcopy(repo_section, local_copy_dir):
'''Syncs the repo from "local copy dir" filesystem to this box
"""Sync the repo from "local copy dir" filesystem to this box.
In setups that use offline signing, this is the last step that
syncs the repo from the "local copy dir" e.g. a thumb drive to the
repo on the local filesystem. That local repo is then used to
push to all the servers that are configured.
'''
"""
logging.info('Syncing from local_copy_dir to this repo.')
# trailing slashes have a meaning in rsync which is not needed here, so
# make sure both paths have exactly one trailing slash
@ -302,13 +302,13 @@ def sync_from_localcopy(repo_section, local_copy_dir):
def update_localcopy(repo_section, local_copy_dir):
'''copy data from offline to the "local copy dir" filesystem
"""Copy data from offline to the "local copy dir" filesystem.
This updates the copy of this repo used to shuttle data from an
offline signing machine to the online machine, e.g. on a thumb
drive.
'''
"""
# local_copy_dir is guaranteed to have a trailing slash in main() below
common.local_rsync(options, repo_section, local_copy_dir)
@ -319,7 +319,7 @@ def update_localcopy(repo_section, local_copy_dir):
def _get_size(start_path='.'):
'''get size of all files in a dir https://stackoverflow.com/a/1392549'''
"""Get size of all files in a dir https://stackoverflow.com/a/1392549."""
total_size = 0
for root, dirs, files in os.walk(start_path):
for f in files:
@ -329,7 +329,7 @@ def _get_size(start_path='.'):
def update_servergitmirrors(servergitmirrors, repo_section):
'''update repo mirrors stored in git repos
"""Update repo mirrors stored in git repos.
This is a hack to use public git repos as F-Droid repos. It
recreates the git repo from scratch each time, so that there is no
@ -339,7 +339,7 @@ def update_servergitmirrors(servergitmirrors, repo_section):
For history, there is the archive section, and there is the binary
transparency log.
'''
"""
import git
from clint.textui import progress
if config.get('local_copy_dir') \
@ -623,7 +623,7 @@ def upload_apk_to_virustotal(virustotal_apikey, packageName, apkName, hash,
def push_binary_transparency(git_repo_path, git_remote):
'''push the binary transparency git repo to the specifed remote.
"""Push the binary transparency git repo to the specifed remote.
If the remote is a local directory, make sure it exists, and is a
git repo. This is used to move this git repo from an offline
@ -636,7 +636,7 @@ def push_binary_transparency(git_repo_path, git_remote):
case, git_remote is a dir on the local file system, e.g. a thumb
drive.
'''
"""
import git
logging.info(_('Pushing binary transparency log to {url}')

View File

@ -33,8 +33,7 @@ start_timestamp = time.gmtime()
def status_update_json(signed):
"""Output a JSON file with metadata about this run"""
"""Output a JSON file with metadata about this run."""
logging.debug(_('Outputting JSON'))
output = common.setup_status_output(start_timestamp)
if signed:

View File

@ -49,12 +49,18 @@ def make(apps, apks, repodir, archive):
This requires properly initialized options and config objects.
:param apps: OrderedDict of apps to go into the index, each app should have
at least one associated apk
:param apks: list of apks to go into the index
:param repodir: the repo directory
:param archive: True if this is the archive repo, False if it's the
main one.
Parameters
----------
apps
OrderedDict of apps to go into the index, each app should have
at least one associated apk
apks
list of apks to go into the index
repodir
the repo directory
archive
True if this is the archive repo, False if it's the
main one.
"""
from fdroidserver.update import METADATA_VERSION
@ -583,12 +589,16 @@ def _copy_to_local_copy_dir(repodir, f):
def v1_sort_packages(packages, fdroid_signing_key_fingerprints):
"""Sorts the supplied list to ensure a deterministic sort order for
package entries in the index file. This sort-order also expresses
"""Sort the supplied list to ensure a deterministic sort order for package entries in the index file.
This sort-order also expresses
installation preference to the clients.
(First in this list = first to install)
:param packages: list of packages which need to be sorted before but into index file.
Parameters
----------
packages
list of packages which need to be sorted before but into index file.
"""
GROUP_DEV_SIGNED = 1
GROUP_FDROID_SIGNED = 2
@ -618,10 +628,7 @@ def v1_sort_packages(packages, fdroid_signing_key_fingerprints):
def make_v0(apps, apks, repodir, repodict, requestsdict, fdroid_signing_key_fingerprints):
"""
aka index.jar aka index.xml
"""
"""Aka index.jar aka index.xml."""
doc = Document()
def addElement(name, value, doc, parent):
@ -641,7 +648,7 @@ def make_v0(apps, apks, repodir, repodict, requestsdict, fdroid_signing_key_fing
addElement(name, value, doc, parent)
def addElementCheckLocalized(name, app, key, doc, parent, default=''):
"""Fill in field from metadata or localized block
"""Fill in field from metadata or localized block.
For name/summary/description, they can come only from the app source,
or from a dir in fdroiddata. They can be entirely missing from the
@ -652,7 +659,6 @@ def make_v0(apps, apks, repodir, repodict, requestsdict, fdroid_signing_key_fing
alpha- sort order.
"""
el = doc.createElement(name)
value = app.get(key)
lkey = key[:1].lower() + key[1:]
@ -965,9 +971,12 @@ def make_v0(apps, apks, repodir, repodict, requestsdict, fdroid_signing_key_fing
def extract_pubkey():
"""
Extracts and returns the repository's public key from the keystore.
:return: public key in hex, repository fingerprint
"""Extract and return the repository's public key from the keystore.
Returns
-------
public key in hex
repository fingerprint
"""
if 'repo_pubkey' in common.config:
pubkey = unhexlify(common.config['repo_pubkey'])
@ -991,7 +1000,7 @@ def extract_pubkey():
def get_mirror_service_urls(url):
'''Get direct URLs from git service for use by fdroidclient
"""Get direct URLs from git service for use by fdroidclient.
Via 'servergitmirrors', fdroidserver can create and push a mirror
to certain well known git services like gitlab or github. This
@ -999,8 +1008,7 @@ def get_mirror_service_urls(url):
branch in git. The files are then accessible via alternate URLs,
where they are served in their raw format via a CDN rather than
from git.
'''
"""
if url.startswith('git@'):
url = re.sub(r'^git@([^:]+):(.+)', r'https://\1/\2', url)
@ -1038,15 +1046,19 @@ def get_mirror_service_urls(url):
def download_repo_index(url_str, etag=None, verify_fingerprint=True, timeout=600):
"""Downloads and verifies index file, then returns its data.
"""Download and verifies index file, then returns its data.
Downloads the repository index from the given :param url_str and
verifies the repository's fingerprint if :param verify_fingerprint
is not False.
:raises: VerificationException() if the repository could not be verified
Raises
------
VerificationException() if the repository could not be verified
:return: A tuple consisting of:
Returns
-------
A tuple consisting of:
- The index in JSON format or None if the index did not change
- The new eTag as returned by the HTTP request
@ -1077,15 +1089,18 @@ def download_repo_index(url_str, etag=None, verify_fingerprint=True, timeout=600
def get_index_from_jar(jarfile, fingerprint=None):
"""Returns the data, public key, and fingerprint from index-v1.jar
"""Return the data, public key, and fingerprint from index-v1.jar.
:param fingerprint is the SHA-256 fingerprint of signing key. Only
hex digits count, all other chars will can be discarded.
Parameters
----------
fingerprint is the SHA-256 fingerprint of signing key. Only
hex digits count, all other chars will can be discarded.
:raises: VerificationException() if the repository could not be verified
Raises
------
VerificationException() if the repository could not be verified
"""
logging.debug(_('Verifying index signature:'))
common.verify_jar_signature(jarfile)
with zipfile.ZipFile(jarfile) as jar:
@ -1099,13 +1114,20 @@ def get_index_from_jar(jarfile, fingerprint=None):
def get_public_key_from_jar(jar):
"""
Get the public key and its fingerprint from a JAR file.
"""Get the public key and its fingerprint from a JAR file.
:raises: VerificationException() if the JAR was not signed exactly once
Raises
------
VerificationException() if the JAR was not signed exactly once
:param jar: a zipfile.ZipFile object
:return: the public key from the jar and its fingerprint
Parameters
----------
jar
a zipfile.ZipFile object
Returns
-------
the public key from the jar and its fingerprint
"""
# extract certificate from jar
certs = [n for n in jar.namelist() if common.SIGNATURE_BLOCK_FILE_REGEX.match(n)]

View File

@ -36,7 +36,7 @@ options = None
def disable_in_config(key, value):
'''write a key/value to the local config.yml, then comment it out'''
"""Write a key/value to the local config.yml, then comment it out."""
import yaml
with open('config.yml') as f:
data = f.read()

View File

@ -249,7 +249,11 @@ def get_lastbuild(builds):
def check_update_check_data_url(app):
<<<<<<< HEAD
"""UpdateCheckData must have a valid HTTPS URL to protect checkupdates runs"""
=======
"""UpdateCheckData must have a valid HTTPS URL to protect checkupdates runs."""
>>>>>>> c380427b (rewrite docstrings to match numpy style guide)
if app.UpdateCheckData and app.UpdateCheckMode == 'HTTP':
urlcode, codeex, urlver, verex = app.UpdateCheckData.split('|')
for url in (urlcode, urlver):
@ -503,7 +507,7 @@ def check_format(app):
def check_license_tag(app):
'''Ensure all license tags contain only valid/approved values'''
"""Ensure all license tags contain only valid/approved values."""
if config['lint_licenses'] is None:
return
if app.License not in config['lint_licenses']:
@ -555,8 +559,7 @@ def check_extlib_dir(apps):
def check_app_field_types(app):
"""Check the fields have valid data types"""
"""Check the fields have valid data types."""
for field in app.keys():
v = app.get(field)
t = metadata.fieldtype(field)
@ -599,7 +602,7 @@ def check_app_field_types(app):
def check_for_unsupported_metadata_files(basedir=""):
"""Checks whether any non-metadata files are in metadata/"""
"""Check whether any non-metadata files are in metadata/."""
basedir = Path(basedir)
global config
@ -633,8 +636,7 @@ def check_for_unsupported_metadata_files(basedir=""):
def check_current_version_code(app):
"""Check that the CurrentVersionCode is currently available"""
"""Check that the CurrentVersionCode is currently available."""
archive_policy = app.get('ArchivePolicy')
if archive_policy and archive_policy.split()[0] == "0":
return

View File

@ -44,7 +44,7 @@ VALID_USERNAME_REGEX = re.compile(r'^[a-z\d](?:[a-z\d/._-]){0,38}$', re.IGNORECA
def _warn_or_exception(value, cause=None):
'''output warning or Exception depending on -W'''
"""Output warning or Exception depending on -W."""
if warnings_action == 'ignore':
pass
elif warnings_action == 'error':
@ -326,7 +326,7 @@ class Build(dict):
return 'ant'
def ndk_path(self):
"""Returns the path to the first configured NDK or an empty string"""
"""Return the path to the first configured NDK or an empty string."""
ndk = self.ndk
if isinstance(ndk, list):
ndk = self.ndk[0]
@ -368,8 +368,7 @@ def flagtype(name):
class FieldValidator():
"""
Designates App metadata field types and checks that it matches
"""Designate App metadata field types and checks that it matches.
'name' - The long name of the field type
'matching' - List of possible values or regex expression
@ -545,7 +544,7 @@ def read_srclibs():
def read_metadata(appids={}, sort_by_time=False):
"""Return a list of App instances sorted newest first
"""Return a list of App instances sorted newest first.
This reads all of the metadata files in a 'data' repository, then
builds a list of App instances from those files. The list is
@ -555,7 +554,6 @@ def read_metadata(appids={}, sort_by_time=False):
appids is a dict with appids a keys and versionCodes as values.
"""
# Always read the srclibs before the apps, since they can use a srlib as
# their source repository.
read_srclibs()
@ -723,7 +721,7 @@ def _decode_bool(s):
def parse_metadata(metadatapath):
"""parse metadata file, also checking the source repo for .fdroid.yml
"""Parse metadata file, also checking the source repo for .fdroid.yml.
If this is a metadata file from fdroiddata, it will first load the
source repo type and URL from fdroiddata, then read .fdroid.yml if
@ -777,7 +775,7 @@ def parse_metadata(metadatapath):
def parse_yaml_metadata(mf, app):
"""Parse the .yml file and post-process it
"""Parse the .yml file and post-process it.
Clean metadata .yml files can be used directly, but in order to
make a better user experience for people editing .yml files, there
@ -787,7 +785,6 @@ def parse_yaml_metadata(mf, app):
overall process.
"""
try:
yamldata = yaml.load(mf, Loader=SafeLoader)
except yaml.YAMLError as e:
@ -836,7 +833,7 @@ def parse_yaml_metadata(mf, app):
def post_parse_yaml_metadata(yamldata):
"""transform yaml metadata to our internal data format"""
"""Transform yaml metadata to our internal data format."""
for build in yamldata.get('Builds', []):
for flag in build.keys():
_flagtype = flagtype(flag)
@ -859,10 +856,13 @@ def post_parse_yaml_metadata(yamldata):
def write_yaml(mf, app):
"""Write metadata in yaml format.
:param mf: active file discriptor for writing
:param app: app metadata to written to the yaml file
Parameters
----------
mf
active file discriptor for writing
app
app metadata to written to the yaml file
"""
# import rumael.yaml and check version
try:
import ruamel.yaml
@ -992,6 +992,6 @@ def write_metadata(metadatapath, app):
def add_metadata_arguments(parser):
'''add common command line flags related to metadata processing'''
"""Add common command line flags related to metadata processing."""
parser.add_argument("-W", choices=['error', 'warn', 'ignore'], default='error',
help=_("force metadata errors (default) to be warnings, or to be ignored."))

View File

@ -75,7 +75,7 @@ def main():
fingerprint = urllib.parse.parse_qs(query).get('fingerprint')
def _append_to_url_path(*args):
'''Append the list of path components to URL, keeping the rest the same'''
"""Append the list of path components to URL, keeping the rest the same."""
newpath = posixpath.join(path, *args)
return urllib.parse.urlunparse((scheme, hostname, newpath, params, query, fragment))

View File

@ -38,16 +38,22 @@ def download_file(url, local_filename=None, dldir='tmp'):
def http_get(url, etag=None, timeout=600):
"""
Downloads the content from the given URL by making a GET request.
"""Download the content from the given URL by making a GET request.
If an ETag is given, it will do a HEAD request first, to see if the content changed.
:param url: The URL to download from.
:param etag: The last ETag to be used for the request (optional).
:return: A tuple consisting of:
- The raw content that was downloaded or None if it did not change
- The new eTag as returned by the HTTP request
Parameters
----------
url
The URL to download from.
etag
The last ETag to be used for the request (optional).
Returns
-------
A tuple consisting of:
- The raw content that was downloaded or None if it did not change
- The new eTag as returned by the HTTP request
"""
# TODO disable TLS Session IDs and TLS Session Tickets
# (plain text cookie visible to anyone who can see the network traffic)

View File

@ -45,7 +45,6 @@ start_timestamp = time.gmtime()
def publish_source_tarball(apkfilename, unsigned_dir, output_dir):
"""Move the source tarball into the output directory..."""
tarfilename = apkfilename[:-4] + '_src.tar.gz'
tarfile = os.path.join(unsigned_dir, tarfilename)
if os.path.exists(tarfile):
@ -56,7 +55,9 @@ def publish_source_tarball(apkfilename, unsigned_dir, output_dir):
def key_alias(appid):
"""Get the alias which F-Droid uses to indentify the singing key
"""No summary.
Get the alias which F-Droid uses to indentify the singing key
for this App in F-Droids keystore.
"""
if config and 'keyaliases' in config and appid in config['keyaliases']:
@ -74,9 +75,7 @@ def key_alias(appid):
def read_fingerprints_from_keystore():
"""Obtain a dictionary containing all singning-key fingerprints which
are managed by F-Droid, grouped by appid.
"""
"""Obtain a dictionary containing all singning-key fingerprints which are managed by F-Droid, grouped by appid."""
env_vars = {'LC_ALL': 'C.UTF-8',
'FDROID_KEY_STORE_PASS': config['keystorepass']}
cmd = [config['keytool'], '-list',
@ -101,8 +100,9 @@ def read_fingerprints_from_keystore():
def sign_sig_key_fingerprint_list(jar_file):
"""sign the list of app-signing key fingerprints which is
used primaryily by fdroid update to determine which APKs
"""Sign the list of app-signing key fingerprints.
This is used primaryily by fdroid update to determine which APKs
where built and signed by F-Droid and which ones were
manually added by users.
"""
@ -125,6 +125,7 @@ def sign_sig_key_fingerprint_list(jar_file):
def store_stats_fdroid_signing_key_fingerprints(appids, indent=None):
"""Store list of all signing-key fingerprints for given appids to HD.
This list will later on be needed by fdroid update.
"""
if not os.path.exists('stats'):
@ -143,8 +144,7 @@ def store_stats_fdroid_signing_key_fingerprints(appids, indent=None):
def status_update_json(generatedKeys, signedApks):
"""Output a JSON file with metadata about this run"""
"""Output a JSON file with metadata about this run."""
logging.debug(_('Outputting JSON'))
output = common.setup_status_output(start_timestamp)
output['apksigner'] = shutil.which(config.get('apksigner', ''))
@ -158,8 +158,8 @@ def status_update_json(generatedKeys, signedApks):
def check_for_key_collisions(allapps):
"""
Make sure there's no collision in keyaliases from apps.
"""Make sure there's no collision in keyaliases from apps.
It was suggested at
https://dev.guardianproject.info/projects/bazaar/wiki/FDroid_Audit
that a package could be crafted, such that it would use the same signing
@ -168,9 +168,16 @@ def check_for_key_collisions(allapps):
the colliding ID would be something that would be a) a valid package ID,
and b) a sane-looking ID that would make its way into the repo.
Nonetheless, to be sure, before publishing we check that there are no
collisions, and refuse to do any publishing if that's the case...
:param allapps a dict of all apps to process
:return: a list of all aliases corresponding to allapps
collisions, and refuse to do any publishing if that's the case.
Parameters
----------
allapps
a dict of all apps to process
Returns
-------
a list of all aliases corresponding to allapps
"""
allaliases = []
for appid in allapps:
@ -185,9 +192,12 @@ def check_for_key_collisions(allapps):
def create_key_if_not_existing(keyalias):
"""
Ensures a signing key with the given keyalias exists
:return: boolean, True if a new key was created, false otherwise
"""Ensure a signing key with the given keyalias exists.
Returns
-------
boolean
True if a new key was created, False otherwise
"""
# See if we already have a key for this application, and
# if not generate one...

View File

@ -104,7 +104,7 @@ def get_gradle_compile_commands(build):
def scan_binary(apkfile):
"""Scan output of apkanalyzer for known non-free classes
"""Scan output of apkanalyzer for known non-free classes.
apkanalyzer produces useful output when it can run, but it does
not support all recent JDK versions, and also some DEX versions,
@ -112,7 +112,6 @@ def scan_binary(apkfile):
to run without exiting with an error.
"""
logging.info(_('Scanning APK with apkanalyzer for known non-free classes.'))
result = common.SdkToolsPopen(["apkanalyzer", "dex", "packages", "--defined-only", apkfile], output=False)
if result.returncode != 0:
@ -130,10 +129,12 @@ def scan_binary(apkfile):
def scan_source(build_dir, build=metadata.Build()):
"""Scan the source code in the given directory (and all subdirectories)
and return the number of fatal problems encountered
"""Scan the source code in the given directory (and all subdirectories).
Returns
-------
the number of fatal problems encountered.
"""
count = 0
allowlisted = [
@ -193,10 +194,18 @@ def scan_source(build_dir, build=metadata.Build()):
return False
def ignoreproblem(what, path_in_build_dir):
"""
:param what: string describing the problem, will be printed in log messages
:param path_in_build_dir: path to the file relative to `build`-dir
"returns: 0 as we explicitly ignore the file, so don't count an error
"""No summary.
Parameters
----------
what: string
describing the problem, will be printed in log messages
path_in_build_dir
path to the file relative to `build`-dir
Returns
-------
0 as we explicitly ignore the file, so don't count an error
"""
msg = ('Ignoring %s at %s' % (what, path_in_build_dir))
logging.info(msg)
@ -205,11 +214,20 @@ def scan_source(build_dir, build=metadata.Build()):
return 0
def removeproblem(what, path_in_build_dir, filepath):
"""
:param what: string describing the problem, will be printed in log messages
:param path_in_build_dir: path to the file relative to `build`-dir
:param filepath: Path (relative to our current path) to the file
"returns: 0 as we deleted the offending file
"""No summary.
Parameters
----------
what: string
describing the problem, will be printed in log messages
path_in_build_dir
path to the file relative to `build`-dir
filepath
Path (relative to our current path) to the file
Returns
-------
0 as we deleted the offending file
"""
msg = ('Removing %s at %s' % (what, path_in_build_dir))
logging.info(msg)
@ -225,10 +243,18 @@ def scan_source(build_dir, build=metadata.Build()):
return 0
def warnproblem(what, path_in_build_dir):
"""
:param what: string describing the problem, will be printed in log messages
:param path_in_build_dir: path to the file relative to `build`-dir
:returns: 0, as warnings don't count as errors
"""No summary.
Parameters
----------
what: string
describing the problem, will be printed in log messages
path_in_build_dir
path to the file relative to `build`-dir
Returns
-------
0, as warnings don't count as errors
"""
if toignore(path_in_build_dir):
return 0
@ -238,13 +264,22 @@ def scan_source(build_dir, build=metadata.Build()):
return 0
def handleproblem(what, path_in_build_dir, filepath):
"""Dispatches to problem handlers (ignore, delete, warn) or returns 1
for increasing the error count
"""Dispatches to problem handlers (ignore, delete, warn).
Or returns 1 for increasing the error count.
:param what: string describing the problem, will be printed in log messages
:param path_in_build_dir: path to the file relative to `build`-dir
:param filepath: Path (relative to our current path) to the file
:returns: 0 if the problem was ignored/deleted/is only a warning, 1 otherwise
Parameters
----------
what: string
describing the problem, will be printed in log messages
path_in_build_dir
path to the file relative to `build`-dir
filepath
Path (relative to our current path) to the file
Returns
-------
0 if the problem was ignored/deleted/is only a warning, 1 otherwise
"""
if toignore(path_in_build_dir):
return ignoreproblem(what, path_in_build_dir)

View File

@ -32,8 +32,7 @@ start_timestamp = time.gmtime()
def sign_jar(jar):
"""
Sign a JAR file with Java's jarsigner.
"""Sign a JAR file with Java's jarsigner.
This method requires a properly initialized config object.
@ -60,8 +59,7 @@ def sign_jar(jar):
def sign_index_v1(repodir, json_name):
"""
Sign index-v1.json to make index-v1.jar
"""Sign index-v1.json to make index-v1.jar.
This is a bit different than index.jar: instead of their being index.xml
and index_unsigned.jar, the presence of index-v1.json means that there is
@ -78,8 +76,7 @@ def sign_index_v1(repodir, json_name):
def status_update_json(signed):
"""Output a JSON file with metadata about this run"""
"""Output a JSON file with metadata about this run."""
logging.debug(_('Outputting JSON'))
output = common.setup_status_output(start_timestamp)
if signed:

View File

@ -1,23 +1,24 @@
#!/usr/bin/env python
'''
Python-Tail - Unix tail follow implementation in Python.
"""Python-Tail - Unix tail follow implementation in Python.
python-tail can be used to monitor changes to a file.
Example:
import tail
# Create a tail instance
t = tail.Tail('file-to-be-followed')
# Register a callback function to be called when a new line is found in the followed file.
# If no callback function is registerd, new lines would be printed to standard out.
t.register_callback(callback_function)
# Follow the file with 5 seconds as sleep time between iterations.
# If sleep time is not provided 1 second is used as the default time.
t.follow(s=5) '''
Example
-------
>>> import tail
>>>
>>> # Create a tail instance
>>> t = tail.Tail('file-to-be-followed')
>>>
>>> # Register a callback function to be called when a new line is found in the followed file.
>>> # If no callback function is registerd, new lines would be printed to standard out.
>>> t.register_callback(callback_function)
>>>
>>> # Follow the file with 5 seconds as sleep time between iterations.
>>> # If sleep time is not provided 1 second is used as the default time.
>>> t.follow(s=5)
"""
# Author - Kasun Herath <kasunh01 at gmail.com>
# Source - https://github.com/kasun/python-tail
@ -32,42 +33,49 @@ import threading
class Tail(object):
''' Represents a tail command. '''
"""Represents a tail command."""
def __init__(self, tailed_file):
''' Initiate a Tail instance.
Check for file validity, assigns callback function to standard out.
"""Initiate a Tail instance.
Arguments:
tailed_file - File to be followed. '''
Check for file validity, assigns callback function to standard out.
Parameters
----------
tailed_file
File to be followed.
"""
self.check_file_validity(tailed_file)
self.tailed_file = tailed_file
self.callback = sys.stdout.write
self.t_stop = threading.Event()
def start(self, s=1):
'''Start tailing a file in a background thread.
Arguments:
s - Number of seconds to wait between each iteration; Defaults to 3.
'''
"""Start tailing a file in a background thread.
Parameters
----------
s
Number of seconds to wait between each iteration; Defaults to 3.
"""
t = threading.Thread(target=self.follow, args=(s,))
t.start()
def stop(self):
'''Stop a background tail.
'''
"""Stop a background tail."""
self.t_stop.set()
def follow(self, s=1):
''' Do a tail follow. If a callback function is registered it is called with every new line.
"""Do a tail follow.
If a callback function is registered it is called with every new line.
Else printed to standard out.
Arguments:
s - Number of seconds to wait between each iteration; Defaults to 1. '''
Parameters
----------
s
Number of seconds to wait between each iteration; Defaults to 1.
"""
with open(self.tailed_file) as file_:
# Go to the end of file
file_.seek(0, 2)
@ -82,11 +90,11 @@ class Tail(object):
time.sleep(s)
def register_callback(self, func):
''' Overrides default callback function to provided function. '''
"""Override default callback function to provided function."""
self.callback = func
def check_file_validity(self, file_):
''' Check whether the a given file exists, readable and is a file '''
"""Check whether the a given file exists, readable and is a file."""
if not os.access(file_, os.F_OK):
raise TailError("File '%s' does not exist" % (file_))
if not os.access(file_, os.R_OK):

View File

@ -128,13 +128,16 @@ def disabled_algorithms_allowed():
def status_update_json(apps, apks):
"""Output a JSON file with metadata about this `fdroid update` run
"""Output a JSON file with metadata about this `fdroid update` run.
:param apps: fully populated list of all applications
:param apks: all to be published apks
Parameters
----------
apps
fully populated list of all applications
apks
all to be published apks
"""
logging.debug(_('Outputting JSON'))
output = common.setup_status_output(start_timestamp)
output['antiFeatures'] = dict()
@ -194,10 +197,14 @@ def status_update_json(apps, apks):
def update_wiki(apps, apks):
"""Update the wiki
"""Update the wiki.
:param apps: fully populated list of all applications
:param apks: all apks, except...
Parameters
----------
apps
fully populated list of all applications
apks
all apks, except...
"""
logging.info("Updating wiki")
wikicat = 'Apps'
@ -422,9 +429,14 @@ def update_wiki(apps, apks):
def delete_disabled_builds(apps, apkcache, repodirs):
"""Delete disabled build outputs.
:param apps: list of all applications, as per metadata.read_metadata
:param apkcache: current apk cache information
:param repodirs: the repo directories to process
Parameters
----------
apps
list of all applications, as per metadata.read_metadata
apkcache
current apk cache information
repodirs
the repo directories to process
"""
for appid, app in apps.items():
for build in app.get('Builds', []):
@ -480,9 +492,12 @@ def resize_icon(iconpath, density):
def resize_all_icons(repodirs):
"""Resize all icons that exceed the max size
"""Resize all icons that exceed the max size.
:param repodirs: the repo directories to process
Parameters
----------
repodirs
the repo directories to process
"""
for repodir in repodirs:
for density in screen_densities:
@ -504,12 +519,17 @@ def getsig(apkpath):
md5 digest algorithm. This is not the same as the standard X.509
certificate fingerprint.
:param apkpath: path to the apk
:returns: A string containing the md5 of the signature of the apk or None
if an error occurred.
Parameters
----------
apkpath
path to the apk
Returns
-------
A string containing the md5 of the signature of the apk or None
if an error occurred.
"""
cert_encoded = common.get_first_signer_certificate(apkpath)
if not cert_encoded:
return None
@ -521,7 +541,7 @@ def get_cache_file():
def get_cache():
"""Get the cached dict of the APK index
"""Get the cached dict of the APK index.
Gather information about all the apk files in the repo directory,
using cached data if possible. Some of the index operations take a
@ -533,7 +553,9 @@ def get_cache():
those cases, there is no easy way to know what has changed from
the cache, so just rerun the whole thing.
:return: apkcache
Returns
-------
apkcache
"""
apkcachefile = get_cache_file()
@ -582,7 +604,7 @@ def write_cache(apkcache):
def get_icon_bytes(apkzip, iconsrc):
'''ZIP has no official encoding, UTF-* and CP437 are defacto'''
"""ZIP has no official encoding, UTF-* and CP437 are defacto."""
try:
return apkzip.read(iconsrc)
except KeyError:
@ -590,7 +612,7 @@ def get_icon_bytes(apkzip, iconsrc):
def has_known_vulnerability(filename):
"""checks for known vulnerabilities in the APK
"""Check for known vulnerabilities in the APK.
Checks OpenSSL .so files in the APK to see if they are a known vulnerable
version. Google also enforces this:
@ -603,7 +625,6 @@ def has_known_vulnerability(filename):
Janus is similar to Master Key but is perhaps easier to scan for.
https://www.guardsquare.com/en/blog/new-android-vulnerability-allows-attackers-modify-apps-without-affecting-their-signatures
"""
found_vuln = False
# statically load this pattern
@ -649,8 +670,9 @@ def has_known_vulnerability(filename):
def insert_obbs(repodir, apps, apks):
"""Scans the .obb files in a given repo directory and adds them to the
relevant APK instances. OBB files have versionCodes like APK
"""Scan the .obb files in a given repo directory and adds them to the relevant APK instances.
OBB files have versionCodes like APK
files, and they are loosely associated. If there is an OBB file
present, then any APK with the same or higher versionCode will use
that OBB file. There are two OBB types: main and patch, each APK
@ -658,12 +680,16 @@ def insert_obbs(repodir, apps, apks):
https://developer.android.com/google/play/expansion-files.html
:param repodir: repo directory to scan
:param apps: list of current, valid apps
:param apks: current information on all APKs
Parameters
----------
repodir
repo directory to scan
apps
list of current, valid apps
apks
current information on all APKs
"""
def obbWarnDelete(f, msg):
logging.warning(msg + ' ' + f)
if options.delete_unknown:
@ -715,7 +741,7 @@ def insert_obbs(repodir, apps, apks):
def translate_per_build_anti_features(apps, apks):
"""Grab the anti-features list from the build metadata
"""Grab the anti-features list from the build metadata.
For most Anti-Features, they are really most applicable per-APK,
not for an app. An app can fix a vulnerability, add/remove
@ -729,7 +755,6 @@ def translate_per_build_anti_features(apps, apks):
from the build 'antifeatures' field, not directly included.
"""
antiFeatures = dict()
for packageName, app in apps.items():
d = dict()
@ -749,7 +774,7 @@ def translate_per_build_anti_features(apps, apks):
def _get_localized_dict(app, locale):
'''get the dict to add localized store metadata to'''
"""Get the dict to add localized store metadata to."""
if 'localized' not in app:
app['localized'] = collections.OrderedDict()
if locale not in app['localized']:
@ -758,7 +783,7 @@ def _get_localized_dict(app, locale):
def _set_localized_text_entry(app, locale, key, f):
"""Read a fastlane/triple-t metadata file and add an entry to the app
"""Read a fastlane/triple-t metadata file and add an entry to the app.
This reads more than the limit, in case there is leading or
trailing whitespace to be stripped
@ -779,7 +804,7 @@ def _set_localized_text_entry(app, locale, key, f):
def _set_author_entry(app, key, f):
"""read a fastlane/triple-t author file and add the entry to the app
"""Read a fastlane/triple-t author file and add the entry to the app.
This reads more than the limit, in case there is leading or
trailing whitespace to be stripped
@ -796,7 +821,7 @@ def _set_author_entry(app, key, f):
def _strip_and_copy_image(in_file, outpath):
"""Remove any metadata from image and copy it to new path
"""Remove any metadata from image and copy it to new path.
Sadly, image metadata like EXIF can be used to exploit devices.
It is not used at all in the F-Droid ecosystem, so its much safer
@ -861,8 +886,7 @@ def _strip_and_copy_image(in_file, outpath):
def _get_base_hash_extension(f):
'''split a graphic/screenshot filename into base, sha256, and extension
'''
"""Split a graphic/screenshot filename into base, sha256, and extension."""
base, extension = common.get_extension(f)
sha256_index = base.find('_')
if sha256_index > 0:
@ -871,7 +895,7 @@ def _get_base_hash_extension(f):
def sanitize_funding_yml_entry(entry):
"""FUNDING.yml comes from upstream repos, entries must be sanitized"""
"""FUNDING.yml comes from upstream repos, entries must be sanitized."""
if type(entry) not in (bytes, int, float, list, str):
return
if isinstance(entry, bytes):
@ -894,7 +918,7 @@ def sanitize_funding_yml_entry(entry):
def sanitize_funding_yml_name(name):
"""Sanitize usernames that come from FUNDING.yml"""
"""Sanitize usernames that come from FUNDING.yml."""
entry = sanitize_funding_yml_entry(name)
if entry:
m = metadata.VALID_USERNAME_REGEX.match(entry)
@ -904,7 +928,7 @@ def sanitize_funding_yml_name(name):
def insert_funding_yml_donation_links(apps):
"""include donation links from FUNDING.yml in app's source repo
"""Include donation links from FUNDING.yml in app's source repo.
GitHub made a standard file format for declaring donation
links. This parses that format from upstream repos to include in
@ -917,7 +941,6 @@ def insert_funding_yml_donation_links(apps):
https://help.github.com/en/articles/displaying-a-sponsor-button-in-your-repository#about-funding-files
"""
if not os.path.isdir('build'):
return # nothing to do
for packageName, app in apps.items():
@ -989,7 +1012,7 @@ def insert_funding_yml_donation_links(apps):
def copy_triple_t_store_metadata(apps):
"""Include store metadata from the app's source repo
"""Include store metadata from the app's source repo.
The Triple-T Gradle Play Publisher is a plugin that has a standard
file layout for all of the metadata and graphics that the Google
@ -1007,7 +1030,6 @@ def copy_triple_t_store_metadata(apps):
https://github.com/Triple-T/gradle-play-publisher/blob/2.1.0/README.md#publishing-listings
"""
if not os.path.isdir('build'):
return # nothing to do
@ -1112,7 +1134,7 @@ def copy_triple_t_store_metadata(apps):
def insert_localized_app_metadata(apps):
"""scans standard locations for graphics and localized text
"""Scan standard locations for graphics and localized text.
Scans for localized description files, changelogs, store graphics, and
screenshots and adds them to the app metadata. Each app's source repo root
@ -1139,7 +1161,6 @@ def insert_localized_app_metadata(apps):
See also our documentation page:
https://f-droid.org/en/docs/All_About_Descriptions_Graphics_and_Screenshots/#in-the-apps-build-metadata-in-an-fdroiddata-collection
"""
sourcedirs = glob.glob(os.path.join('build', '[A-Za-z]*', 'src', '[A-Za-z]*', 'fastlane', 'metadata', 'android', '[a-z][a-z]*'))
sourcedirs += glob.glob(os.path.join('build', '[A-Za-z]*', 'fastlane', 'metadata', 'android', '[a-z][a-z]*'))
sourcedirs += glob.glob(os.path.join('build', '[A-Za-z]*', 'metadata', '[a-z][a-z]*'))
@ -1259,15 +1280,19 @@ def insert_localized_app_metadata(apps):
def scan_repo_files(apkcache, repodir, knownapks, use_date_from_file=False):
"""Scan a repo for all files with an extension except APK/OBB
"""Scan a repo for all files with an extension except APK/OBB.
:param apkcache: current cached info about all repo files
:param repodir: repo directory to scan
:param knownapks: list of all known files, as per metadata.read_metadata
:param use_date_from_file: use date from file (instead of current date)
for newly added files
Parameters
----------
apkcache
current cached info about all repo files
repodir
repo directory to scan
knownapks
list of all known files, as per metadata.read_metadata
use_date_from_file
use date from file (instead of current date) for newly added files
"""
cachechanged = False
repo_files = []
repodir = repodir.encode()
@ -1343,14 +1368,22 @@ def scan_repo_files(apkcache, repodir, knownapks, use_date_from_file=False):
def scan_apk(apk_file):
"""
Scans an APK file and returns dictionary with metadata of the APK.
"""Scan an APK file and returns dictionary with metadata of the APK.
Attention: This does *not* verify that the APK signature is correct.
:param apk_file: The (ideally absolute) path to the APK file
:raises BuildException
:return A dict containing APK metadata
Parameters
----------
apk_file
The (ideally absolute) path to the APK file
Raises
------
BuildException
Returns
-------
A dict containing APK metadata
"""
apk = {
'hash': common.sha256sum(apk_file),
@ -1397,7 +1430,7 @@ def scan_apk(apk_file):
def _get_apk_icons_src(apkfile, icon_name):
"""Extract the paths to the app icon in all available densities
"""Extract the paths to the app icon in all available densities.
The folder name is normally generated by the Android Tools, but
there is nothing that prevents people from using whatever DPI
@ -1423,7 +1456,7 @@ def _get_apk_icons_src(apkfile, icon_name):
def _sanitize_sdk_version(value):
"""Sanitize the raw values from androguard to handle bad values
"""Sanitize the raw values from androguard to handle bad values.
minSdkVersion/targetSdkVersion/maxSdkVersion must be integers, but
that doesn't stop devs from doing strange things like setting them
@ -1564,23 +1597,33 @@ def scan_apk_androguard(apk, apkfile):
def process_apk(apkcache, apkfilename, repodir, knownapks, use_date_from_apk=False,
allow_disabled_algorithms=False, archive_bad_sig=False):
"""Processes the apk with the given filename in the given repo directory.
"""Process the apk with the given filename in the given repo directory.
This also extracts the icons.
:param apkcache: current apk cache information
:param apkfilename: the filename of the apk to scan
:param repodir: repo directory to scan
:param knownapks: known apks info
:param use_date_from_apk: use date from APK (instead of current date)
for newly added APKs
:param allow_disabled_algorithms: allow APKs with valid signatures that include
disabled algorithms in the signature (e.g. MD5)
:param archive_bad_sig: move APKs with a bad signature to the archive
:returns: (skip, apk, cachechanged) where skip is a boolean indicating whether to skip this apk,
apk is the scanned apk information, and cachechanged is True if the apkcache got changed.
"""
Parameters
----------
apkcache
current apk cache information
apkfilename
the filename of the apk to scan
repodir
repo directory to scan
knownapks
known apks info
use_date_from_apk
use date from APK (instead of current date) for newly added APKs
allow_disabled_algorithms
allow APKs with valid signatures that include
disabled algorithms in the signature (e.g. MD5)
archive_bad_sig
move APKs with a bad signature to the archive
Returns
-------
(skip, apk, cachechanged) where skip is a boolean indicating whether to skip this apk,
apk is the scanned apk information, and cachechanged is True if the apkcache got changed.
"""
apk = {}
apkfile = os.path.join(repodir, apkfilename)
@ -1699,19 +1742,26 @@ def process_apk(apkcache, apkfilename, repodir, knownapks, use_date_from_apk=Fal
def process_apks(apkcache, repodir, knownapks, use_date_from_apk=False):
"""Processes the apks in the given repo directory.
"""Process the apks in the given repo directory.
This also extracts the icons.
:param apkcache: current apk cache information
:param repodir: repo directory to scan
:param knownapks: known apks info
:param use_date_from_apk: use date from APK (instead of current date)
for newly added APKs
:returns: (apks, cachechanged) where apks is a list of apk information,
and cachechanged is True if the apkcache got changed.
"""
Parameters
----------
apkcache
current apk cache information
repodir
repo directory to scan
knownapks
b known apks info
use_date_from_apk
use date from APK (instead of current date) for newly added APKs
Returns
-------
(apks, cachechanged) where apks is a list of apk information,
and cachechanged is True if the apkcache got changed.
"""
cachechanged = False
for icon_dir in get_all_icon_dirs(repodir):
@ -1737,19 +1787,28 @@ def process_apks(apkcache, repodir, knownapks, use_date_from_apk=False):
def extract_apk_icons(icon_filename, apk, apkzip, repo_dir):
"""Extracts PNG icons from an APK with the supported pixel densities
"""Extract PNG icons from an APK with the supported pixel densities.
Extracts icons from the given APK zip in various densities, saves
them into given repo directory and stores their names in the APK
metadata dictionary. If the icon is an XML icon, then this tries
to find PNG icon that can replace it.
:param icon_filename: A string representing the icon's file name
:param apk: A populated dictionary containing APK metadata.
Needs to have 'icons_src' key
:param apkzip: An opened zipfile.ZipFile of the APK file
:param repo_dir: The directory of the APK's repository
:return: A list of icon densities that are missing
Parameters
----------
icon_filename
A string representing the icon's file name
apk
A populated dictionary containing APK metadata.
Needs to have 'icons_src' key
apkzip
An opened zipfile.ZipFile of the APK file
repo_dir
The directory of the APK's repository
Returns
-------
A list of icon densities that are missing
"""
res_name_re = re.compile(r'res/(drawable|mipmap)-(x*[hlm]dpi|anydpi).*/(.*)_[0-9]+dp.(png|xml)')
@ -1820,13 +1879,14 @@ def extract_apk_icons(icon_filename, apk, apkzip, repo_dir):
def fill_missing_icon_densities(empty_densities, icon_filename, apk, repo_dir):
"""
Resize existing PNG icons for densities missing in the APK to ensure all densities are available
"""Resize existing PNG icons for densities missing in the APK to ensure all densities are available.
:param empty_densities: A list of icon densities that are missing
:param icon_filename: A string representing the icon's file name
:param apk: A populated dictionary containing APK metadata. Needs to have 'icons' key
:param repo_dir: The directory of the APK's repository
Parameters
----------
empty_densities: A list of icon densities that are missing
icon_filename: A string representing the icon's file name
apk: A populated dictionary containing APK metadata. Needs to have 'icons' key
repo_dir: The directory of the APK's repository
"""
# First try resizing down to not lose quality
@ -1889,8 +1949,10 @@ def fill_missing_icon_densities(empty_densities, icon_filename, apk, repo_dir):
def apply_info_from_latest_apk(apps, apks):
"""
"""No summary.
Some information from the apks needs to be applied up to the application level.
When doing this, we use the info from the most recent version's apk.
We deal with figuring out when the app was added and last updated at the same time.
"""
@ -1920,7 +1982,7 @@ def apply_info_from_latest_apk(apps, apks):
def make_categories_txt(repodir, categories):
'''Write a category list in the repo to allow quick access'''
"""Write a category list in the repo to allow quick access."""
catdata = ''
for cat in sorted(categories):
catdata += cat + '\n'
@ -1982,8 +2044,7 @@ def archive_old_apks(apps, apks, archapks, repodir, archivedir, defaultkeepversi
def move_apk_between_sections(from_dir, to_dir, apk):
"""move an APK from repo to archive or vice versa"""
"""Move an APK from repo to archive or vice versa."""
def _move_file(from_dir, to_dir, filename, ignore_missing):
from_path = os.path.join(from_dir, filename)
if ignore_missing and not os.path.exists(from_path):
@ -2033,15 +2094,14 @@ def add_apks_to_per_app_repos(repodir, apks):
def create_metadata_from_template(apk):
'''create a new metadata file using internal or external template
"""Create a new metadata file using internal or external template.
Generate warnings for apk's with no metadata (or create skeleton
metadata files, if requested on the command line). Though the
template file is YAML, this uses neither pyyaml nor ruamel.yaml
since those impose things on the metadata file made from the
template: field sort order, empty field value, formatting, etc.
'''
"""
if os.path.exists('template.yml'):
with open('template.yml') as f:
metatxt = f.read()
@ -2086,7 +2146,8 @@ def create_metadata_from_template(apk):
def read_added_date_from_all_apks(apps, apks):
"""
"""No summary.
Added dates come from the stats/known_apks.txt file but are
read when scanning apks and thus need to be applied form apk
level to app level for _all_ apps and not only from non-archived
@ -2107,7 +2168,7 @@ def read_added_date_from_all_apks(apps, apks):
def insert_missing_app_names_from_apks(apps, apks):
"""Use app name from APK if it is not set in the metadata
"""Use app name from APK if it is not set in the metadata.
Name -> localized -> from APK
@ -2148,7 +2209,7 @@ def insert_missing_app_names_from_apks(apps, apks):
def get_apps_with_packages(apps, apks):
"""Returns a deepcopy of that subset apps that actually has any associated packages. Skips disabled apps."""
"""Return a deepcopy of that subset apps that actually has any associated packages. Skips disabled apps."""
appsWithPackages = collections.OrderedDict()
for packageName in apps:
app = apps[packageName]
@ -2165,12 +2226,20 @@ def get_apps_with_packages(apps, apks):
def prepare_apps(apps, apks, repodir):
"""Encapsulates all necessary preparation steps before we can build an index out of apps and apks.
"""Encapsulate all necessary preparation steps before we can build an index out of apps and apks.
:param apps: All apps as read from metadata
:param apks: list of apks that belong into repo, this gets modified in place
:param repodir: the target repository directory, metadata files will be copied here
:return: the relevant subset of apps (as a deepcopy)
Parameters
----------
apps
All apps as read from metadata
apks
list of apks that belong into repo, this gets modified in place
repodir
the target repository directory, metadata files will be copied here
Returns
-------
the relevant subset of apps (as a deepcopy)
"""
apps_with_packages = get_apps_with_packages(apps, apks)
apply_info_from_latest_apk(apps_with_packages, apks)

View File

@ -69,7 +69,7 @@ class Decoder(json.JSONDecoder):
def _add_diffoscope_info(d):
"""Add diffoscope setup metadata to provided dict under 'diffoscope' key
"""Add diffoscope setup metadata to provided dict under 'diffoscope' key.
The imports are broken out at stages since various versions of
diffoscope support various parts of these.
@ -112,7 +112,7 @@ def _add_diffoscope_info(d):
def write_json_report(url, remote_apk, unsigned_apk, compare_result):
"""write out the results of the verify run to JSON
"""Write out the results of the verify run to JSON.
This builds up reports on the repeated runs of `fdroid verify` on
a set of apps. It uses the timestamps on the compared files to
@ -120,7 +120,6 @@ def write_json_report(url, remote_apk, unsigned_apk, compare_result):
repeatedly.
"""
jsonfile = unsigned_apk + '.json'
if os.path.exists(jsonfile):
with open(jsonfile) as fp:

View File

@ -79,15 +79,24 @@ def _check_output(cmd, cwd=None):
def get_build_vm(srvdir, provider=None):
"""Factory function for getting FDroidBuildVm instances.
"""No summary.
Factory function for getting FDroidBuildVm instances.
This function tries to figure out what hypervisor should be used
and creates an object for controlling a build VM.
:param srvdir: path to a directory which contains a Vagrantfile
:param provider: optionally this parameter allows specifiying an
specific vagrant provider.
:returns: FDroidBuildVm instance.
Parameters
----------
srvdir
path to a directory which contains a Vagrantfile
provider
optionally this parameter allows specifiying an
specific vagrant provider.
Returns
-------
FDroidBuildVm instance.
"""
abssrvdir = abspath(srvdir)
@ -171,9 +180,9 @@ class FDroidBuildVm():
This is intended to be a hypervisor independent, fault tolerant
wrapper around the vagrant functions we use.
"""
def __init__(self, srvdir):
"""Create new server class.
"""
"""Create new server class."""
self.srvdir = srvdir
self.srvname = basename(srvdir) + '_default'
self.vgrntfile = os.path.join(srvdir, 'Vagrantfile')
@ -252,7 +261,7 @@ class FDroidBuildVm():
self.vgrnt.package(output=output)
def vagrant_uuid_okay(self):
'''Having an uuid means that vagrant up has run successfully.'''
"""Having an uuid means that vagrant up has run successfully."""
if self.srvuuid is None:
return False
return True
@ -282,9 +291,14 @@ class FDroidBuildVm():
def box_add(self, boxname, boxfile, force=True):
"""Add vagrant box to vagrant.
:param boxname: name assigned to local deployment of box
:param boxfile: path to box file
:param force: overwrite existing box image (default: True)
Parameters
----------
boxname
name assigned to local deployment of box
boxfile
path to box file
force
overwrite existing box image (default: True)
"""
boxfile = abspath(boxfile)
if not isfile(boxfile):
@ -304,10 +318,11 @@ class FDroidBuildVm():
shutil.rmtree(boxpath)
def sshinfo(self):
"""Get ssh connection info for a vagrant VM
"""Get ssh connection info for a vagrant VM.
:returns: A dictionary containing 'hostname', 'port', 'user'
and 'idfile'
Returns
-------
A dictionary containing 'hostname', 'port', 'user' and 'idfile'
"""
import paramiko
try: