deploy: automatically convert to black format

This commit is contained in:
Hans-Christoph Steiner 2024-04-23 18:41:08 +02:00 committed by Michael Pöhn
parent cb04d801d8
commit 79f148167a
2 changed files with 176 additions and 100 deletions

View File

@ -119,17 +119,20 @@ def update_awsbucket_s3cmd(repo_section):
files are deleted from the server. The last pass is the only pass files are deleted from the server. The last pass is the only pass
to use a full MD5 checksum of all files to detect changes. to use a full MD5 checksum of all files to detect changes.
""" """
logging.debug(_('Using s3cmd to sync with: {url}') logging.debug(_('Using s3cmd to sync with: {url}').format(url=config['awsbucket']))
.format(url=config['awsbucket']))
if os.path.exists(USER_S3CFG): if os.path.exists(USER_S3CFG):
logging.info(_('Using "{path}" for configuring s3cmd.').format(path=USER_S3CFG)) logging.info(_('Using "{path}" for configuring s3cmd.').format(path=USER_S3CFG))
configfilename = USER_S3CFG configfilename = USER_S3CFG
else: else:
fd = os.open(AUTO_S3CFG, os.O_CREAT | os.O_TRUNC | os.O_WRONLY, 0o600) fd = os.open(AUTO_S3CFG, os.O_CREAT | os.O_TRUNC | os.O_WRONLY, 0o600)
logging.debug(_('Creating "{path}" for configuring s3cmd.').format(path=AUTO_S3CFG)) logging.debug(
_('Creating "{path}" for configuring s3cmd.').format(path=AUTO_S3CFG)
)
os.write(fd, '[default]\n'.encode('utf-8')) os.write(fd, '[default]\n'.encode('utf-8'))
os.write(fd, ('access_key = ' + config['awsaccesskeyid'] + '\n').encode('utf-8')) os.write(
fd, ('access_key = ' + config['awsaccesskeyid'] + '\n').encode('utf-8')
)
os.write(fd, ('secret_key = ' + config['awssecretkey'] + '\n').encode('utf-8')) os.write(fd, ('secret_key = ' + config['awssecretkey'] + '\n').encode('utf-8'))
os.close(fd) os.close(fd)
configfilename = AUTO_S3CFG configfilename = AUTO_S3CFG
@ -137,11 +140,11 @@ def update_awsbucket_s3cmd(repo_section):
s3bucketurl = 's3://' + config['awsbucket'] s3bucketurl = 's3://' + config['awsbucket']
s3cmd = [config['s3cmd'], '--config=' + configfilename] s3cmd = [config['s3cmd'], '--config=' + configfilename]
if subprocess.call(s3cmd + ['info', s3bucketurl]) != 0: if subprocess.call(s3cmd + ['info', s3bucketurl]) != 0:
logging.warning(_('Creating new S3 bucket: {url}') logging.warning(_('Creating new S3 bucket: {url}').format(url=s3bucketurl))
.format(url=s3bucketurl))
if subprocess.call(s3cmd + ['mb', s3bucketurl]) != 0: if subprocess.call(s3cmd + ['mb', s3bucketurl]) != 0:
logging.error(_('Failed to create S3 bucket: {url}') logging.error(
.format(url=s3bucketurl)) _('Failed to create S3 bucket: {url}').format(url=s3bucketurl)
)
raise FDroidException() raise FDroidException()
s3cmd_sync = s3cmd + ['sync', '--acl-public'] s3cmd_sync = s3cmd + ['sync', '--acl-public']
@ -168,8 +171,11 @@ def update_awsbucket_s3cmd(repo_section):
if returncode != 0: if returncode != 0:
raise FDroidException() raise FDroidException()
logging.debug(_('s3cmd sync indexes {path} to {url} and delete') logging.debug(
.format(path=repo_section, url=s3url)) _('s3cmd sync indexes {path} to {url} and delete').format(
path=repo_section, url=s3url
)
)
s3cmd_sync.append('--delete-removed') s3cmd_sync.append('--delete-removed')
s3cmd_sync.append('--delete-after') s3cmd_sync.append('--delete-after')
if options.no_checksum: if options.no_checksum:
@ -191,22 +197,28 @@ def update_awsbucket_libcloud(repo_section):
Requires AWS credentials set in config.yml: awsaccesskeyid, awssecretkey Requires AWS credentials set in config.yml: awsaccesskeyid, awssecretkey
""" """
logging.debug(_('using Apache libcloud to sync with {url}') logging.debug(
.format(url=config['awsbucket'])) _('using Apache libcloud to sync with {url}').format(url=config['awsbucket'])
)
import libcloud.security import libcloud.security
libcloud.security.VERIFY_SSL_CERT = True libcloud.security.VERIFY_SSL_CERT = True
from libcloud.storage.types import Provider, ContainerDoesNotExistError from libcloud.storage.types import Provider, ContainerDoesNotExistError
from libcloud.storage.providers import get_driver from libcloud.storage.providers import get_driver
if not config.get('awsaccesskeyid') or not config.get('awssecretkey'): if not config.get('awsaccesskeyid') or not config.get('awssecretkey'):
raise FDroidException( raise FDroidException(
_('To use awsbucket, awssecretkey and awsaccesskeyid must also be set in config.yml!')) _(
'To use awsbucket, awssecretkey and awsaccesskeyid must also be set in config.yml!'
)
)
awsbucket = config['awsbucket'] awsbucket = config['awsbucket']
if os.path.exists(USER_S3CFG): if os.path.exists(USER_S3CFG):
raise FDroidException(_('"{path}" exists but s3cmd is not installed!') raise FDroidException(
.format(path=USER_S3CFG)) _('"{path}" exists but s3cmd is not installed!').format(path=USER_S3CFG)
)
cls = get_driver(Provider.S3) cls = get_driver(Provider.S3)
driver = cls(config['awsaccesskeyid'], config['awssecretkey']) driver = cls(config['awsaccesskeyid'], config['awssecretkey'])
@ -214,8 +226,7 @@ def update_awsbucket_libcloud(repo_section):
container = driver.get_container(container_name=awsbucket) container = driver.get_container(container_name=awsbucket)
except ContainerDoesNotExistError: except ContainerDoesNotExistError:
container = driver.create_container(container_name=awsbucket) container = driver.create_container(container_name=awsbucket)
logging.info(_('Created new container "{name}"') logging.info(_('Created new container "{name}"').format(name=container.name))
.format(name=container.name))
upload_dir = 'fdroid/' + repo_section upload_dir = 'fdroid/' + repo_section
objs = dict() objs = dict()
@ -260,10 +271,12 @@ def update_awsbucket_libcloud(repo_section):
path = os.path.relpath(file_to_upload) path = os.path.relpath(file_to_upload)
logging.info(f' uploading {path} to s3://{awsbucket}/{object_name}') logging.info(f' uploading {path} to s3://{awsbucket}/{object_name}')
with open(file_to_upload, 'rb') as iterator: with open(file_to_upload, 'rb') as iterator:
obj = driver.upload_object_via_stream(iterator=iterator, obj = driver.upload_object_via_stream(
container=container, iterator=iterator,
object_name=object_name, container=container,
extra=extra) object_name=object_name,
extra=extra,
)
# delete the remnants in the bucket, they do not exist locally # delete the remnants in the bucket, they do not exist locally
while objs: while objs:
object_name, obj = objs.popitem() object_name, obj = objs.popitem()
@ -306,9 +319,15 @@ def update_serverwebroot(serverwebroot, repo_section):
if options and options.quiet: if options and options.quiet:
rsyncargs += ['--quiet'] rsyncargs += ['--quiet']
if options and options.identity_file: if options and options.identity_file:
rsyncargs += ['-e', 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + options.identity_file] rsyncargs += [
'-e',
'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + options.identity_file,
]
elif config and config.get('identity_file'): elif config and config.get('identity_file'):
rsyncargs += ['-e', 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + config['identity_file']] rsyncargs += [
'-e',
'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + config['identity_file'],
]
url = serverwebroot['url'] url = serverwebroot['url']
logging.info('rsyncing ' + repo_section + ' to ' + url) logging.info('rsyncing ' + repo_section + ' to ' + url)
excludes = _get_index_excludes(repo_section) excludes = _get_index_excludes(repo_section)
@ -319,8 +338,7 @@ def update_serverwebroot(serverwebroot, repo_section):
# upload "current version" symlinks if requested # upload "current version" symlinks if requested
if config and config.get('make_current_version_link') and repo_section == 'repo': if config and config.get('make_current_version_link') and repo_section == 'repo':
links_to_upload = [] links_to_upload = []
for f in glob.glob('*.apk') \ for f in glob.glob('*.apk') + glob.glob('*.apk.asc') + glob.glob('*.apk.sig'):
+ glob.glob('*.apk.asc') + glob.glob('*.apk.sig'):
if os.path.islink(f): if os.path.islink(f):
links_to_upload.append(f) links_to_upload.append(f)
if len(links_to_upload) > 0: if len(links_to_upload) > 0:
@ -368,9 +386,11 @@ def sync_from_localcopy(repo_section, local_copy_dir):
logging.info('Syncing from local_copy_dir to this repo.') logging.info('Syncing from local_copy_dir to this repo.')
# trailing slashes have a meaning in rsync which is not needed here, so # trailing slashes have a meaning in rsync which is not needed here, so
# make sure both paths have exactly one trailing slash # make sure both paths have exactly one trailing slash
common.local_rsync(options, common.local_rsync(
os.path.join(local_copy_dir, repo_section).rstrip('/') + '/', options,
repo_section.rstrip('/') + '/') os.path.join(local_copy_dir, repo_section).rstrip('/') + '/',
repo_section.rstrip('/') + '/',
)
offline_copy = os.path.join(local_copy_dir, BINARY_TRANSPARENCY_DIR) offline_copy = os.path.join(local_copy_dir, BINARY_TRANSPARENCY_DIR)
if os.path.exists(os.path.join(offline_copy, '.git')): if os.path.exists(os.path.join(offline_copy, '.git')):
@ -419,9 +439,11 @@ def update_servergitmirrors(servergitmirrors, repo_section):
""" """
import git import git
from clint.textui import progress from clint.textui import progress
if config.get('local_copy_dir') \
and not config.get('sync_from_local_copy_dir'): if config.get('local_copy_dir') and not config.get('sync_from_local_copy_dir'):
logging.debug(_('Offline machine, skipping git mirror generation until `fdroid deploy`')) logging.debug(
_('Offline machine, skipping git mirror generation until `fdroid deploy`')
)
return return
# right now we support only 'repo' git-mirroring # right now we support only 'repo' git-mirroring
@ -438,19 +460,25 @@ def update_servergitmirrors(servergitmirrors, repo_section):
dotgit_size = _get_size(dotgit) dotgit_size = _get_size(dotgit)
dotgit_over_limit = dotgit_size > config['git_mirror_size_limit'] dotgit_over_limit = dotgit_size > config['git_mirror_size_limit']
if os.path.isdir(dotgit) and dotgit_over_limit: if os.path.isdir(dotgit) and dotgit_over_limit:
logging.warning(_('Deleting git-mirror history, repo is too big ({size} max {limit})') logging.warning(
.format(size=dotgit_size, limit=config['git_mirror_size_limit'])) _(
'Deleting git-mirror history, repo is too big ({size} max {limit})'
).format(size=dotgit_size, limit=config['git_mirror_size_limit'])
)
shutil.rmtree(dotgit) shutil.rmtree(dotgit)
if options.no_keep_git_mirror_archive and dotgit_over_limit: if options.no_keep_git_mirror_archive and dotgit_over_limit:
logging.warning(_('Deleting archive, repo is too big ({size} max {limit})') logging.warning(
.format(size=dotgit_size, limit=config['git_mirror_size_limit'])) _('Deleting archive, repo is too big ({size} max {limit})').format(
size=dotgit_size, limit=config['git_mirror_size_limit']
)
)
archive_path = os.path.join(git_mirror_path, 'fdroid', 'archive') archive_path = os.path.join(git_mirror_path, 'fdroid', 'archive')
shutil.rmtree(archive_path, ignore_errors=True) shutil.rmtree(archive_path, ignore_errors=True)
# rsync is very particular about trailing slashes # rsync is very particular about trailing slashes
common.local_rsync(options, common.local_rsync(
repo_section.rstrip('/') + '/', options, repo_section.rstrip('/') + '/', git_repodir.rstrip('/') + '/'
git_repodir.rstrip('/') + '/') )
# use custom SSH command if identity_file specified # use custom SSH command if identity_file specified
ssh_cmd = 'ssh -oBatchMode=yes' ssh_cmd = 'ssh -oBatchMode=yes'
@ -488,6 +516,7 @@ def update_servergitmirrors(servergitmirrors, repo_section):
def update(self, op_code, current, maximum=None, message=None): def update(self, op_code, current, maximum=None, message=None):
if isinstance(maximum, float): if isinstance(maximum, float):
progressbar.show(current, maximum) progressbar.show(current, maximum)
progress = MyProgressPrinter() progress = MyProgressPrinter()
else: else:
progress = None progress = None
@ -537,10 +566,12 @@ def update_servergitmirrors(servergitmirrors, repo_section):
GIT_BRANCH, force=True, set_upstream=True, progress=progress GIT_BRANCH, force=True, set_upstream=True, progress=progress
) )
for pushinfo in pushinfos: for pushinfo in pushinfos:
if pushinfo.flags & (git.remote.PushInfo.ERROR if pushinfo.flags & (
| git.remote.PushInfo.REJECTED git.remote.PushInfo.ERROR
| git.remote.PushInfo.REMOTE_FAILURE | git.remote.PushInfo.REJECTED
| git.remote.PushInfo.REMOTE_REJECTED): | git.remote.PushInfo.REMOTE_FAILURE
| git.remote.PushInfo.REMOTE_REJECTED
):
# Show potentially useful messages from git remote # Show potentially useful messages from git remote
for line in progress.other_lines: for line in progress.other_lines:
if line.startswith('remote:'): if line.startswith('remote:'):
@ -561,6 +592,7 @@ def update_servergitmirrors(servergitmirrors, repo_section):
def upload_to_android_observatory(repo_section): def upload_to_android_observatory(repo_section):
import requests import requests
requests # stop unused import warning requests # stop unused import warning
if options.verbose: if options.verbose:
@ -582,9 +614,12 @@ def upload_apk_to_android_observatory(path):
from lxml.html import fromstring from lxml.html import fromstring
apkfilename = os.path.basename(path) apkfilename = os.path.basename(path)
r = requests.post('https://androidobservatory.org/', r = requests.post(
data={'q': common.sha256sum(path), 'searchby': 'hash'}, 'https://androidobservatory.org/',
headers=net.HEADERS, timeout=300) data={'q': common.sha256sum(path), 'searchby': 'hash'},
headers=net.HEADERS,
timeout=300,
)
if r.status_code == 200: if r.status_code == 200:
# from now on XPath will be used to retrieve the message in the HTML # from now on XPath will be used to retrieve the message in the HTML
# androidobservatory doesn't have a nice API to talk with # androidobservatory doesn't have a nice API to talk with
@ -601,22 +636,30 @@ def upload_apk_to_android_observatory(path):
page = 'https://androidobservatory.org' page = 'https://androidobservatory.org'
if href: if href:
message = (_('Found {apkfilename} at {url}') message = _('Found {apkfilename} at {url}').format(
.format(apkfilename=apkfilename, url=(page + href))) apkfilename=apkfilename, url=(page + href)
)
logging.debug(message) logging.debug(message)
return return
# upload the file with a post request # upload the file with a post request
logging.info(_('Uploading {apkfilename} to androidobservatory.org') logging.info(
.format(apkfilename=apkfilename)) _('Uploading {apkfilename} to androidobservatory.org').format(
r = requests.post('https://androidobservatory.org/upload', apkfilename=apkfilename
files={'apk': (apkfilename, open(path, 'rb'))}, )
headers=net.HEADERS, )
allow_redirects=False, timeout=300) r = requests.post(
'https://androidobservatory.org/upload',
files={'apk': (apkfilename, open(path, 'rb'))},
headers=net.HEADERS,
allow_redirects=False,
timeout=300,
)
def upload_to_virustotal(repo_section, virustotal_apikey): def upload_to_virustotal(repo_section, virustotal_apikey):
import requests import requests
requests # stop unused import warning requests # stop unused import warning
if repo_section == 'repo': if repo_section == 'repo':
@ -635,25 +678,24 @@ def upload_to_virustotal(repo_section, virustotal_apikey):
upload_apk_to_virustotal(virustotal_apikey, **package) upload_apk_to_virustotal(virustotal_apikey, **package)
def upload_apk_to_virustotal(virustotal_apikey, packageName, apkName, hash, def upload_apk_to_virustotal(
versionCode, **kwargs): virustotal_apikey, packageName, apkName, hash, versionCode, **kwargs
):
import requests import requests
logging.getLogger("urllib3").setLevel(logging.WARNING) logging.getLogger("urllib3").setLevel(logging.WARNING)
logging.getLogger("requests").setLevel(logging.WARNING) logging.getLogger("requests").setLevel(logging.WARNING)
outputfilename = os.path.join('virustotal', outputfilename = os.path.join(
packageName + '_' + str(versionCode) 'virustotal', packageName + '_' + str(versionCode) + '_' + hash + '.json'
+ '_' + hash + '.json') )
if os.path.exists(outputfilename): if os.path.exists(outputfilename):
logging.debug(apkName + ' results are in ' + outputfilename) logging.debug(apkName + ' results are in ' + outputfilename)
return outputfilename return outputfilename
repofilename = os.path.join('repo', apkName) repofilename = os.path.join('repo', apkName)
logging.info('Checking if ' + repofilename + ' is on virustotal') logging.info('Checking if ' + repofilename + ' is on virustotal')
headers = { headers = {"User-Agent": "F-Droid"}
"User-Agent": "F-Droid"
}
if 'headers' in kwargs: if 'headers' in kwargs:
for k, v in kwargs['headers'].items(): for k, v in kwargs['headers'].items():
headers[k] = v headers[k] = v
@ -700,8 +742,11 @@ def upload_apk_to_virustotal(virustotal_apikey, packageName, apkName, hash,
size = os.path.getsize(repofilename) size = os.path.getsize(repofilename)
if size > 200000000: if size > 200000000:
# VirusTotal API 200MB hard limit # VirusTotal API 200MB hard limit
logging.error(_('{path} more than 200MB, manually upload: {url}') logging.error(
.format(path=repofilename, url=manual_url)) _('{path} more than 200MB, manually upload: {url}').format(
path=repofilename, url=manual_url
)
)
elif size > 32000000: elif size > 32000000:
# VirusTotal API requires fetching a URL to upload bigger files # VirusTotal API requires fetching a URL to upload bigger files
query_url = ( query_url = (
@ -712,17 +757,21 @@ def upload_apk_to_virustotal(virustotal_apikey, packageName, apkName, hash,
if r.status_code == 200: if r.status_code == 200:
upload_url = r.json().get('upload_url') upload_url = r.json().get('upload_url')
elif r.status_code == 403: elif r.status_code == 403:
logging.error(_('VirusTotal API key cannot upload files larger than 32MB, ' logging.error(
+ 'use {url} to upload {path}.') _(
.format(path=repofilename, url=manual_url)) 'VirusTotal API key cannot upload files larger than 32MB, '
+ 'use {url} to upload {path}.'
).format(path=repofilename, url=manual_url)
)
else: else:
r.raise_for_status() r.raise_for_status()
else: else:
upload_url = 'https://www.virustotal.com/vtapi/v2/file/scan' upload_url = 'https://www.virustotal.com/vtapi/v2/file/scan'
if upload_url: if upload_url:
logging.info(_('Uploading {apkfilename} to virustotal') logging.info(
.format(apkfilename=repofilename)) _('Uploading {apkfilename} to virustotal').format(apkfilename=repofilename)
)
r = requests.post( r = requests.post(
upload_url, upload_url,
data=apikey, data=apikey,
@ -730,8 +779,11 @@ def upload_apk_to_virustotal(virustotal_apikey, packageName, apkName, hash,
files={'file': (apkName, open(repofilename, 'rb'))}, files={'file': (apkName, open(repofilename, 'rb'))},
timeout=300, timeout=300,
) )
logging.debug(_('If this upload fails, try manually uploading to {url}') logging.debug(
.format(url=manual_url)) _('If this upload fails, try manually uploading to {url}').format(
url=manual_url
)
)
r.raise_for_status() r.raise_for_status()
response = r.json() response = r.json()
logging.info(response['verbose_msg'] + " " + response['permalink']) logging.info(response['verbose_msg'] + " " + response['permalink'])
@ -756,8 +808,7 @@ def push_binary_transparency(git_repo_path, git_remote):
""" """
import git import git
logging.info(_('Pushing binary transparency log to {url}') logging.info(_('Pushing binary transparency log to {url}').format(url=git_remote))
.format(url=git_remote))
if os.path.isdir(os.path.dirname(git_remote)): if os.path.isdir(os.path.dirname(git_remote)):
# from offline machine to thumbdrive # from offline machine to thumbdrive
@ -794,14 +845,29 @@ def main():
parser = ArgumentParser() parser = ArgumentParser()
common.setup_global_opts(parser) common.setup_global_opts(parser)
parser.add_argument("-i", "--identity-file", default=None, parser.add_argument(
help=_("Specify an identity file to provide to SSH for rsyncing")) "-i",
parser.add_argument("--local-copy-dir", default=None, "--identity-file",
help=_("Specify a local folder to sync the repo to")) default=None,
parser.add_argument("--no-checksum", action="store_true", default=False, help=_("Specify an identity file to provide to SSH for rsyncing"),
help=_("Don't use rsync checksums")) )
parser.add_argument("--no-keep-git-mirror-archive", action="store_true", default=False, parser.add_argument(
help=_("If a git mirror gets to big, allow the archive to be deleted")) "--local-copy-dir",
default=None,
help=_("Specify a local folder to sync the repo to"),
)
parser.add_argument(
"--no-checksum",
action="store_true",
default=False,
help=_("Don't use rsync checksums"),
)
parser.add_argument(
"--no-keep-git-mirror-archive",
action="store_true",
default=False,
help=_("If a git mirror gets to big, allow the archive to be deleted"),
)
options = parser.parse_args() options = parser.parse_args()
config = common.read_config(options) config = common.read_config(options)
@ -822,17 +888,23 @@ def main():
logging.error(_('local_copy_dir must be directory, not a file!')) logging.error(_('local_copy_dir must be directory, not a file!'))
sys.exit(1) sys.exit(1)
if not os.path.exists(os.path.dirname(fdroiddir)): if not os.path.exists(os.path.dirname(fdroiddir)):
logging.error(_('The root dir for local_copy_dir "{path}" does not exist!') logging.error(
.format(path=os.path.dirname(fdroiddir))) _('The root dir for local_copy_dir "{path}" does not exist!').format(
path=os.path.dirname(fdroiddir)
)
)
sys.exit(1) sys.exit(1)
if not os.path.isabs(fdroiddir): if not os.path.isabs(fdroiddir):
logging.error(_('local_copy_dir must be an absolute path!')) logging.error(_('local_copy_dir must be an absolute path!'))
sys.exit(1) sys.exit(1)
repobase = os.path.basename(fdroiddir) repobase = os.path.basename(fdroiddir)
if standardwebroot and repobase != 'fdroid': if standardwebroot and repobase != 'fdroid':
logging.error(_('local_copy_dir does not end with "fdroid", ' logging.error(
+ 'perhaps you meant: "{path}"') _(
.format(path=fdroiddir + '/fdroid')) 'local_copy_dir does not end with "fdroid", '
+ 'perhaps you meant: "{path}"'
).format(path=fdroiddir + '/fdroid')
)
sys.exit(1) sys.exit(1)
if local_copy_dir[-1] != '/': if local_copy_dir[-1] != '/':
local_copy_dir += '/' local_copy_dir += '/'
@ -840,16 +912,20 @@ def main():
if not os.path.exists(fdroiddir): if not os.path.exists(fdroiddir):
os.mkdir(fdroiddir) os.mkdir(fdroiddir)
if not config.get('awsbucket') \ if (
and not config.get('serverwebroot') \ not config.get('awsbucket')
and not config.get('servergitmirrors') \ and not config.get('serverwebroot')
and not config.get('androidobservatory') \ and not config.get('servergitmirrors')
and not config.get('binary_transparency_remote') \ and not config.get('androidobservatory')
and not config.get('virustotal_apikey') \ and not config.get('binary_transparency_remote')
and local_copy_dir is None: and not config.get('virustotal_apikey')
logging.warning(_('No option set! Edit your config.yml to set at least one of these:') and local_copy_dir is None
+ '\nserverwebroot, servergitmirrors, local_copy_dir, awsbucket, ' ):
+ 'virustotal_apikey, androidobservatory, or binary_transparency_remote') logging.warning(
_('No option set! Edit your config.yml to set at least one of these:')
+ '\nserverwebroot, servergitmirrors, local_copy_dir, awsbucket, '
+ 'virustotal_apikey, androidobservatory, or binary_transparency_remote'
)
sys.exit(1) sys.exit(1)
repo_sections = ['repo'] repo_sections = ['repo']
@ -860,8 +936,10 @@ def main():
if config['per_app_repos']: if config['per_app_repos']:
repo_sections += common.get_per_app_repos() repo_sections += common.get_per_app_repos()
if os.path.isdir('unsigned') or (local_copy_dir is not None if os.path.isdir('unsigned') or (
and os.path.isdir(os.path.join(local_copy_dir, 'unsigned'))): local_copy_dir is not None
and os.path.isdir(os.path.join(local_copy_dir, 'unsigned'))
):
repo_sections.append('unsigned') repo_sections.append('unsigned')
for repo_section in repo_sections: for repo_section in repo_sections:
@ -886,8 +964,7 @@ def main():
binary_transparency_remote = config.get('binary_transparency_remote') binary_transparency_remote = config.get('binary_transparency_remote')
if binary_transparency_remote: if binary_transparency_remote:
push_binary_transparency(BINARY_TRANSPARENCY_DIR, push_binary_transparency(BINARY_TRANSPARENCY_DIR, binary_transparency_remote)
binary_transparency_remote)
common.write_status_json(common.setup_status_output(start_timestamp)) common.write_status_json(common.setup_status_output(start_timestamp))
sys.exit(0) sys.exit(0)

View File

@ -38,7 +38,6 @@ force-exclude = '''(
| fdroidserver/build\.py | fdroidserver/build\.py
| fdroidserver/checkupdates\.py | fdroidserver/checkupdates\.py
| fdroidserver/common\.py | fdroidserver/common\.py
| fdroidserver/deploy\.py
| fdroidserver/import_subcommand\.py | fdroidserver/import_subcommand\.py
| fdroidserver/index\.py | fdroidserver/index\.py
| fdroidserver/metadata\.py | fdroidserver/metadata\.py