1
0
mirror of https://gitlab.com/fdroid/fdroidserver.git synced 2024-09-21 04:10:37 +02:00

chore(deploy): rename index_only variable to is_index_only for Python idiomaticness

This commit is contained in:
proletarius101 2024-06-04 20:30:35 +08:00
parent b48e297ec2
commit 4c0e096a99
No known key found for this signature in database

View File

@ -97,7 +97,7 @@ def _get_index_includes(base_dir):
return index_includes return index_includes
def update_awsbucket(repo_section, index_only=False, verbose=False, quiet=False): def update_awsbucket(repo_section, is_index_only=False, verbose=False, quiet=False):
"""Upload the contents of the directory `repo_section` (including subdirectories) to the AWS S3 "bucket". """Upload the contents of the directory `repo_section` (including subdirectories) to the AWS S3 "bucket".
The contents of that subdir of the The contents of that subdir of the
@ -117,26 +117,26 @@ def update_awsbucket(repo_section, index_only=False, verbose=False, quiet=False)
logging.warning( logging.warning(
'No syncing tool set in config.yml!. Defaulting to using s3cmd' 'No syncing tool set in config.yml!. Defaulting to using s3cmd'
) )
update_awsbucket_s3cmd(repo_section, index_only) update_awsbucket_s3cmd(repo_section, is_index_only)
if config['s3cmd'] is True and config['rclone'] is True: if config['s3cmd'] is True and config['rclone'] is True:
logging.warning( logging.warning(
'Both syncing tools set in config.yml!. Defaulting to using s3cmd' 'Both syncing tools set in config.yml!. Defaulting to using s3cmd'
) )
update_awsbucket_s3cmd(repo_section, index_only) update_awsbucket_s3cmd(repo_section, is_index_only)
if config['s3cmd'] is True and config['rclone'] is not True: if config['s3cmd'] is True and config['rclone'] is not True:
update_awsbucket_s3cmd(repo_section, index_only) update_awsbucket_s3cmd(repo_section, is_index_only)
if config['rclone'] is True and config['s3cmd'] is not True: if config['rclone'] is True and config['s3cmd'] is not True:
update_remote_storage_with_rclone(repo_section, verbose, quiet) update_remote_storage_with_rclone(repo_section, verbose, quiet)
elif common.set_command_in_config('s3cmd'): elif common.set_command_in_config('s3cmd'):
update_awsbucket_s3cmd(repo_section, index_only) update_awsbucket_s3cmd(repo_section, is_index_only)
elif common.set_command_in_config('rclone'): elif common.set_command_in_config('rclone'):
update_remote_storage_with_rclone(repo_section, verbose, quiet) update_remote_storage_with_rclone(repo_section, verbose, quiet)
else: else:
update_awsbucket_libcloud(repo_section, index_only) update_awsbucket_libcloud(repo_section, is_index_only)
def update_awsbucket_s3cmd(repo_section, index_only=False): def update_awsbucket_s3cmd(repo_section, is_index_only=False):
"""Upload using the CLI tool s3cmd, which provides rsync-like sync. """Upload using the CLI tool s3cmd, which provides rsync-like sync.
The upload is done in multiple passes to reduce the chance of The upload is done in multiple passes to reduce the chance of
@ -190,7 +190,7 @@ def update_awsbucket_s3cmd(repo_section, index_only=False):
) )
) )
if index_only: if is_index_only:
logging.debug(_('s3cmd syncs indexes from {path} to {url} and deletes removed') logging.debug(_('s3cmd syncs indexes from {path} to {url} and deletes removed')
.format(path=repo_section, url=s3url)) .format(path=repo_section, url=s3url))
sync_indexes_flags = [] sync_indexes_flags = []
@ -357,7 +357,7 @@ def update_remote_storage_with_rclone(repo_section, verbose=False, quiet=False):
raise FDroidException() raise FDroidException()
def update_awsbucket_libcloud(repo_section, index_only=False): def update_awsbucket_libcloud(repo_section, is_index_only=False):
"""No summary. """No summary.
Upload the contents of the directory `repo_section` (including Upload the contents of the directory `repo_section` (including
@ -405,7 +405,7 @@ def update_awsbucket_libcloud(repo_section, index_only=False):
if obj.name.startswith(upload_dir + '/'): if obj.name.startswith(upload_dir + '/'):
objs[obj.name] = obj objs[obj.name] = obj
if index_only: if is_index_only:
index_files = [f"{os.getcwd()}/{name}" for name in _get_index_file_paths(repo_section)] index_files = [f"{os.getcwd()}/{name}" for name in _get_index_file_paths(repo_section)]
files_to_upload = [os.path.join(root, name) for root, dirs, files in os.walk(os.path.join(os.getcwd(), repo_section)) for name in files] files_to_upload = [os.path.join(root, name) for root, dirs, files in os.walk(os.path.join(os.getcwd(), repo_section)) for name in files]
files_to_upload = list(set(files_to_upload) & set(index_files)) files_to_upload = list(set(files_to_upload) & set(index_files))
@ -507,9 +507,9 @@ def update_serverwebroot(serverwebroot, repo_section):
'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + config['identity_file'], 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + config['identity_file'],
] ]
url = serverwebroot['url'] url = serverwebroot['url']
index_only = serverwebroot.get('index_only', False) is_index_only = serverwebroot.get('index_only', False)
logging.info('rsyncing ' + repo_section + ' to ' + url) logging.info('rsyncing ' + repo_section + ' to ' + url)
if index_only: if is_index_only:
rsyncargs += _get_index_file_paths(repo_section) rsyncargs += _get_index_file_paths(repo_section)
rsyncargs += [f'{url}/{repo_section}/'] rsyncargs += [f'{url}/{repo_section}/']
logging.info(rsyncargs) logging.info(rsyncargs)