2016-01-04 16:33:20 +01:00
|
|
|
#!/usr/bin/env python3
|
2012-03-11 14:17:37 +01:00
|
|
|
#
|
|
|
|
# server.py - part of the FDroid server tools
|
2015-01-11 16:33:41 +01:00
|
|
|
# Copyright (C) 2010-15, Ciaran Gultnieks, ciaran@ciarang.com
|
2012-03-11 14:17:37 +01:00
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU Affero General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU Affero General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU Affero General Public License
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
import sys
|
2014-10-11 05:50:27 +02:00
|
|
|
import glob
|
2014-04-18 00:20:36 +02:00
|
|
|
import hashlib
|
2012-03-11 14:17:37 +01:00
|
|
|
import os
|
2014-07-03 02:54:52 +02:00
|
|
|
import paramiko
|
|
|
|
import pwd
|
2012-03-11 14:17:37 +01:00
|
|
|
import subprocess
|
2015-09-04 11:37:05 +02:00
|
|
|
from argparse import ArgumentParser
|
2014-01-27 17:08:54 +01:00
|
|
|
import logging
|
2017-02-10 18:38:25 +01:00
|
|
|
import shutil
|
2016-01-04 17:37:35 +01:00
|
|
|
|
|
|
|
from . import common
|
2013-10-31 16:37:39 +01:00
|
|
|
|
2013-11-01 12:10:57 +01:00
|
|
|
config = None
|
|
|
|
options = None
|
2012-03-11 14:17:37 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-04-16 04:41:28 +02:00
|
|
|
def update_awsbucket(repo_section):
|
|
|
|
'''
|
|
|
|
Upload the contents of the directory `repo_section` (including
|
|
|
|
subdirectories) to the AWS S3 "bucket". The contents of that subdir of the
|
|
|
|
bucket will first be deleted.
|
|
|
|
|
|
|
|
Requires AWS credentials set in config.py: awsaccesskeyid, awssecretkey
|
|
|
|
'''
|
|
|
|
|
2014-10-14 23:12:47 +02:00
|
|
|
logging.debug('Syncing "' + repo_section + '" to Amazon S3 bucket "'
|
|
|
|
+ config['awsbucket'] + '"')
|
|
|
|
|
2014-04-16 04:41:28 +02:00
|
|
|
import libcloud.security
|
|
|
|
libcloud.security.VERIFY_SSL_CERT = True
|
2014-04-23 05:11:58 +02:00
|
|
|
from libcloud.storage.types import Provider, ContainerDoesNotExistError
|
2014-04-16 04:41:28 +02:00
|
|
|
from libcloud.storage.providers import get_driver
|
|
|
|
|
2014-04-24 01:52:37 +02:00
|
|
|
if not config.get('awsaccesskeyid') or not config.get('awssecretkey'):
|
2014-04-16 04:41:28 +02:00
|
|
|
logging.error('To use awsbucket, you must set awssecretkey and awsaccesskeyid in config.py!')
|
|
|
|
sys.exit(1)
|
|
|
|
awsbucket = config['awsbucket']
|
|
|
|
|
|
|
|
cls = get_driver(Provider.S3)
|
|
|
|
driver = cls(config['awsaccesskeyid'], config['awssecretkey'])
|
2014-04-23 05:11:58 +02:00
|
|
|
try:
|
|
|
|
container = driver.get_container(container_name=awsbucket)
|
|
|
|
except ContainerDoesNotExistError:
|
|
|
|
container = driver.create_container(container_name=awsbucket)
|
|
|
|
logging.info('Created new container "' + container.name + '"')
|
2014-04-16 04:41:28 +02:00
|
|
|
|
|
|
|
upload_dir = 'fdroid/' + repo_section
|
2014-04-18 00:20:36 +02:00
|
|
|
objs = dict()
|
2014-04-16 04:41:28 +02:00
|
|
|
for obj in container.list_objects():
|
|
|
|
if obj.name.startswith(upload_dir + '/'):
|
2014-04-18 00:20:36 +02:00
|
|
|
objs[obj.name] = obj
|
2014-04-16 04:41:28 +02:00
|
|
|
|
|
|
|
for root, _, files in os.walk(os.path.join(os.getcwd(), repo_section)):
|
|
|
|
for name in files:
|
2014-04-18 00:20:36 +02:00
|
|
|
upload = False
|
2014-04-16 04:41:28 +02:00
|
|
|
file_to_upload = os.path.join(root, name)
|
|
|
|
object_name = 'fdroid/' + os.path.relpath(file_to_upload, os.getcwd())
|
2014-05-07 16:13:22 +02:00
|
|
|
if object_name not in objs:
|
2014-04-18 00:20:36 +02:00
|
|
|
upload = True
|
|
|
|
else:
|
|
|
|
obj = objs.pop(object_name)
|
|
|
|
if obj.size != os.path.getsize(file_to_upload):
|
|
|
|
upload = True
|
|
|
|
else:
|
|
|
|
# if the sizes match, then compare by MD5
|
|
|
|
md5 = hashlib.md5()
|
|
|
|
with open(file_to_upload, 'rb') as f:
|
|
|
|
while True:
|
|
|
|
data = f.read(8192)
|
|
|
|
if not data:
|
|
|
|
break
|
|
|
|
md5.update(data)
|
|
|
|
if obj.hash != md5.hexdigest():
|
|
|
|
s3url = 's3://' + awsbucket + '/' + obj.name
|
|
|
|
logging.info(' deleting ' + s3url)
|
|
|
|
if not driver.delete_object(obj):
|
|
|
|
logging.warn('Could not delete ' + s3url)
|
|
|
|
upload = True
|
2014-04-16 04:41:28 +02:00
|
|
|
|
2014-04-18 00:20:36 +02:00
|
|
|
if upload:
|
2014-06-22 21:28:33 +02:00
|
|
|
logging.debug(' uploading "' + file_to_upload + '"...')
|
2014-05-02 04:24:48 +02:00
|
|
|
extra = {'acl': 'public-read'}
|
2014-04-18 00:20:36 +02:00
|
|
|
if file_to_upload.endswith('.sig'):
|
|
|
|
extra['content_type'] = 'application/pgp-signature'
|
|
|
|
elif file_to_upload.endswith('.asc'):
|
|
|
|
extra['content_type'] = 'application/pgp-signature'
|
|
|
|
logging.info(' uploading ' + os.path.relpath(file_to_upload)
|
2014-04-24 01:55:53 +02:00
|
|
|
+ ' to s3://' + awsbucket + '/' + object_name)
|
2015-08-25 16:29:03 +02:00
|
|
|
with open(file_to_upload, 'rb') as iterator:
|
|
|
|
obj = driver.upload_object_via_stream(iterator=iterator,
|
|
|
|
container=container,
|
|
|
|
object_name=object_name,
|
|
|
|
extra=extra)
|
2014-04-18 00:20:36 +02:00
|
|
|
# delete the remnants in the bucket, they do not exist locally
|
|
|
|
while objs:
|
|
|
|
object_name, obj = objs.popitem()
|
|
|
|
s3url = 's3://' + awsbucket + '/' + object_name
|
|
|
|
if object_name.startswith(upload_dir):
|
|
|
|
logging.warn(' deleting ' + s3url)
|
|
|
|
driver.delete_object(obj)
|
|
|
|
else:
|
|
|
|
logging.info(' skipping ' + s3url)
|
2013-12-02 23:29:51 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-07-14 21:03:58 +02:00
|
|
|
def update_serverwebroot(serverwebroot, repo_section):
|
2014-10-11 05:47:16 +02:00
|
|
|
# use a checksum comparison for accurate comparisons on different
|
|
|
|
# filesystems, for example, FAT has a low resolution timestamp
|
2015-05-01 05:36:10 +02:00
|
|
|
rsyncargs = ['rsync', '--archive', '--delete-after', '--safe-links']
|
2015-01-11 18:46:42 +01:00
|
|
|
if not options.no_checksum:
|
2015-01-11 16:33:41 +01:00
|
|
|
rsyncargs.append('--checksum')
|
2014-04-16 02:47:03 +02:00
|
|
|
if options.verbose:
|
|
|
|
rsyncargs += ['--verbose']
|
|
|
|
if options.quiet:
|
|
|
|
rsyncargs += ['--quiet']
|
2014-06-05 22:21:12 +02:00
|
|
|
if options.identity_file is not None:
|
|
|
|
rsyncargs += ['-e', 'ssh -i ' + options.identity_file]
|
|
|
|
if 'identity_file' in config:
|
|
|
|
rsyncargs += ['-e', 'ssh -i ' + config['identity_file']]
|
2014-06-05 21:50:21 +02:00
|
|
|
indexxml = os.path.join(repo_section, 'index.xml')
|
2014-04-16 02:00:31 +02:00
|
|
|
indexjar = os.path.join(repo_section, 'index.jar')
|
2015-05-01 05:36:10 +02:00
|
|
|
# Upload the first time without the index files and delay the deletion as
|
|
|
|
# much as possible, that keeps the repo functional while this update is
|
|
|
|
# running. Then once it is complete, rerun the command again to upload
|
|
|
|
# the index files. Always using the same target with rsync allows for
|
|
|
|
# very strict settings on the receiving server, you can literally specify
|
|
|
|
# the one rsync command that is allowed to run in ~/.ssh/authorized_keys.
|
|
|
|
# (serverwebroot is guaranteed to have a trailing slash in common.py)
|
|
|
|
logging.info('rsyncing ' + repo_section + ' to ' + serverwebroot)
|
2014-04-16 02:47:03 +02:00
|
|
|
if subprocess.call(rsyncargs +
|
2014-06-05 21:50:21 +02:00
|
|
|
['--exclude', indexxml, '--exclude', indexjar,
|
2014-07-14 21:03:58 +02:00
|
|
|
repo_section, serverwebroot]) != 0:
|
2014-04-16 02:00:31 +02:00
|
|
|
sys.exit(1)
|
2014-10-11 05:47:16 +02:00
|
|
|
if subprocess.call(rsyncargs + [repo_section, serverwebroot]) != 0:
|
2014-04-16 02:00:31 +02:00
|
|
|
sys.exit(1)
|
2014-10-11 05:50:27 +02:00
|
|
|
# upload "current version" symlinks if requested
|
|
|
|
if config['make_current_version_link'] and repo_section == 'repo':
|
|
|
|
links_to_upload = []
|
|
|
|
for f in glob.glob('*.apk') \
|
|
|
|
+ glob.glob('*.apk.asc') + glob.glob('*.apk.sig'):
|
|
|
|
if os.path.islink(f):
|
|
|
|
links_to_upload.append(f)
|
|
|
|
if len(links_to_upload) > 0:
|
|
|
|
if subprocess.call(rsyncargs + links_to_upload + [serverwebroot]) != 0:
|
|
|
|
sys.exit(1)
|
2012-03-11 14:17:37 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-06-26 20:18:29 +02:00
|
|
|
def _local_sync(fromdir, todir):
|
2015-04-28 18:23:42 +02:00
|
|
|
rsyncargs = ['rsync', '--recursive', '--safe-links', '--times', '--perms',
|
2014-07-03 03:03:26 +02:00
|
|
|
'--one-file-system', '--delete', '--chmod=Da+rx,Fa-x,a+r,u+w']
|
2014-06-26 17:57:40 +02:00
|
|
|
# use stricter rsync checking on all files since people using offline mode
|
|
|
|
# are already prioritizing security above ease and speed
|
2015-01-21 10:15:20 +01:00
|
|
|
if not options.no_checksum:
|
2015-01-11 16:33:41 +01:00
|
|
|
rsyncargs.append('--checksum')
|
2014-06-26 17:57:40 +02:00
|
|
|
if options.verbose:
|
|
|
|
rsyncargs += ['--verbose']
|
|
|
|
if options.quiet:
|
|
|
|
rsyncargs += ['--quiet']
|
2014-06-26 20:18:29 +02:00
|
|
|
logging.debug(' '.join(rsyncargs + [fromdir, todir]))
|
|
|
|
if subprocess.call(rsyncargs + [fromdir, todir]) != 0:
|
2014-06-26 17:57:40 +02:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
|
2014-06-26 20:18:29 +02:00
|
|
|
def sync_from_localcopy(repo_section, local_copy_dir):
|
|
|
|
logging.info('Syncing from local_copy_dir to this repo.')
|
|
|
|
# trailing slashes have a meaning in rsync which is not needed here, so
|
2014-06-30 22:09:57 +02:00
|
|
|
# make sure both paths have exactly one trailing slash
|
|
|
|
_local_sync(os.path.join(local_copy_dir, repo_section).rstrip('/') + '/',
|
|
|
|
repo_section.rstrip('/') + '/')
|
2014-06-26 20:18:29 +02:00
|
|
|
|
|
|
|
|
|
|
|
def update_localcopy(repo_section, local_copy_dir):
|
|
|
|
# local_copy_dir is guaranteed to have a trailing slash in main() below
|
|
|
|
_local_sync(repo_section, local_copy_dir)
|
|
|
|
|
|
|
|
|
2017-02-10 18:38:25 +01:00
|
|
|
def update_servergitmirrors(servergitmirrors, repo_section):
|
|
|
|
# depend on GitPython only if users set a git mirror
|
|
|
|
import git
|
|
|
|
# right now we support only 'repo' git-mirroring
|
|
|
|
if repo_section == 'repo':
|
|
|
|
# create a new git-mirror folder
|
|
|
|
repo_dir = os.path.join('.', 'git-mirror/')
|
|
|
|
|
|
|
|
# remove if already present
|
|
|
|
if os.path.isdir(repo_dir):
|
|
|
|
shutil.rmtree(repo_dir)
|
|
|
|
|
|
|
|
repo = git.Repo.init(repo_dir)
|
|
|
|
|
|
|
|
# take care of each mirror
|
|
|
|
for mirror in servergitmirrors:
|
|
|
|
hostname = mirror.split("/")[2]
|
|
|
|
repo.create_remote(hostname, mirror)
|
|
|
|
logging.info('Mirroring to: ' + mirror)
|
|
|
|
|
|
|
|
# copy local 'repo' to 'git-mirror/fdroid/repo directory' with _local_sync
|
|
|
|
fdroid_repo_path = os.path.join(repo_dir, "fdroid")
|
|
|
|
_local_sync(repo_section, fdroid_repo_path)
|
|
|
|
|
|
|
|
# sadly index.add don't allow the --all parameter
|
|
|
|
repo.git.add(all=True)
|
|
|
|
repo.index.commit("fdroidserver git-mirror")
|
|
|
|
|
|
|
|
# push for every remote. This will overwrite the git history
|
|
|
|
for remote in repo.remotes:
|
|
|
|
remote.push('master', force=True, set_upstream=True)
|
|
|
|
|
|
|
|
|
2017-03-01 22:34:55 +01:00
|
|
|
def upload_to_android_observatory(repo_section):
|
|
|
|
# depend on requests and lxml only if users enable AO
|
|
|
|
import requests
|
|
|
|
from lxml.html import fromstring
|
|
|
|
|
|
|
|
if repo_section == 'repo':
|
|
|
|
for f in glob.glob(os.path.join(repo_section, '*.apk')):
|
|
|
|
fpath = f
|
|
|
|
fname = os.path.basename(f)
|
|
|
|
logging.info('Uploading ' + fname + ' to androidobservatory.org')
|
|
|
|
|
|
|
|
# upload the file with a post request
|
|
|
|
r = requests.post('https://androidobservatory.org/upload', files={'apk': (fname, open(fpath, 'rb'))})
|
|
|
|
response = r.text
|
|
|
|
page = r.url
|
|
|
|
|
|
|
|
# from now on XPath will be used to retrieve the message in the HTML
|
|
|
|
# androidobservatory doesn't have a nice API to talk with
|
|
|
|
# so we must scrape the page content
|
|
|
|
tree = fromstring(response)
|
|
|
|
alert = tree.xpath("//html/body/div[@class='container content-container']/div[@class='alert alert-info']")[0]
|
|
|
|
|
|
|
|
message = ""
|
|
|
|
appurl = page
|
|
|
|
for el in alert:
|
|
|
|
# if the application was added successfully we retrive the url
|
|
|
|
# if the application was already uploaded we use the redirect page url
|
|
|
|
if el.attrib.get("href") is not None:
|
|
|
|
appurl = page + el.attrib["href"][1:]
|
|
|
|
message += el.text.replace(" here", "") + el.tail
|
|
|
|
else:
|
|
|
|
message += el.tail
|
|
|
|
message = message.strip() + " " + appurl
|
|
|
|
logging.info(message)
|
|
|
|
|
|
|
|
|
2017-03-03 13:44:55 +01:00
|
|
|
def upload_to_virustotal(repo_section, vt_apikey):
|
|
|
|
import requests
|
|
|
|
|
|
|
|
if repo_section == 'repo':
|
|
|
|
for f in glob.glob(os.path.join(repo_section, '*.apk')):
|
|
|
|
fpath = f
|
|
|
|
fname = os.path.basename(f)
|
|
|
|
logging.info('Uploading ' + fname + ' to virustotal.com')
|
|
|
|
|
|
|
|
# upload the file with a post request
|
|
|
|
params = {'apikey': vt_apikey}
|
|
|
|
files = {'file': (fname, open(fpath, 'rb'))}
|
|
|
|
r = requests.post('https://www.virustotal.com/vtapi/v2/file/scan', files=files, params=params)
|
|
|
|
response = r.json()
|
|
|
|
|
|
|
|
logging.info(response['verbose_msg'] + " " + response['permalink'])
|
|
|
|
|
|
|
|
|
2014-04-16 02:00:31 +02:00
|
|
|
def main():
|
2013-11-01 12:10:57 +01:00
|
|
|
global config, options
|
|
|
|
|
2012-03-11 14:17:37 +01:00
|
|
|
# Parse command line...
|
2015-09-04 11:37:05 +02:00
|
|
|
parser = ArgumentParser()
|
2015-09-12 08:42:50 +02:00
|
|
|
common.setup_global_opts(parser)
|
2015-09-04 11:37:05 +02:00
|
|
|
parser.add_argument("command", help="command to execute, either 'init' or 'update'")
|
|
|
|
parser.add_argument("-i", "--identity-file", default=None,
|
|
|
|
help="Specify an identity file to provide to SSH for rsyncing")
|
|
|
|
parser.add_argument("--local-copy-dir", default=None,
|
|
|
|
help="Specify a local folder to sync the repo to")
|
|
|
|
parser.add_argument("--sync-from-local-copy-dir", action="store_true", default=False,
|
|
|
|
help="Before uploading to servers, sync from local copy dir")
|
|
|
|
parser.add_argument("--no-checksum", action="store_true", default=False,
|
|
|
|
help="Don't use rsync checksums")
|
|
|
|
options = parser.parse_args()
|
2012-03-11 14:17:37 +01:00
|
|
|
|
2013-11-04 15:16:09 +01:00
|
|
|
config = common.read_config(options)
|
|
|
|
|
2015-09-04 11:37:05 +02:00
|
|
|
if options.command != 'init' and options.command != 'update':
|
2014-01-27 17:08:54 +01:00
|
|
|
logging.critical("The only commands currently supported are 'init' and 'update'")
|
2012-03-11 14:17:37 +01:00
|
|
|
sys.exit(1)
|
|
|
|
|
2014-05-02 04:32:54 +02:00
|
|
|
if config.get('nonstandardwebroot') is True:
|
2014-04-16 02:00:31 +02:00
|
|
|
standardwebroot = False
|
|
|
|
else:
|
|
|
|
standardwebroot = True
|
|
|
|
|
2014-07-14 21:03:58 +02:00
|
|
|
for serverwebroot in config.get('serverwebroot', []):
|
2015-05-01 05:39:58 +02:00
|
|
|
# this supports both an ssh host:path and just a path
|
|
|
|
s = serverwebroot.rstrip('/').split(':')
|
|
|
|
if len(s) == 1:
|
|
|
|
fdroiddir = s[0]
|
|
|
|
elif len(s) == 2:
|
|
|
|
host, fdroiddir = s
|
|
|
|
else:
|
|
|
|
logging.error('Malformed serverwebroot line: ' + serverwebroot)
|
|
|
|
sys.exit(1)
|
2014-06-26 17:57:40 +02:00
|
|
|
repobase = os.path.basename(fdroiddir)
|
|
|
|
if standardwebroot and repobase != 'fdroid':
|
2014-07-14 21:03:58 +02:00
|
|
|
logging.error('serverwebroot path does not end with "fdroid", '
|
2014-04-15 05:44:20 +02:00
|
|
|
+ 'perhaps you meant one of these:\n\t'
|
|
|
|
+ serverwebroot.rstrip('/') + '/fdroid\n\t'
|
2014-06-26 17:57:40 +02:00
|
|
|
+ serverwebroot.rstrip('/').rstrip(repobase) + 'fdroid')
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
if options.local_copy_dir is not None:
|
|
|
|
local_copy_dir = options.local_copy_dir
|
|
|
|
elif config.get('local_copy_dir'):
|
|
|
|
local_copy_dir = config['local_copy_dir']
|
|
|
|
else:
|
|
|
|
local_copy_dir = None
|
|
|
|
if local_copy_dir is not None:
|
|
|
|
fdroiddir = local_copy_dir.rstrip('/')
|
|
|
|
if os.path.exists(fdroiddir) and not os.path.isdir(fdroiddir):
|
|
|
|
logging.error('local_copy_dir must be directory, not a file!')
|
2014-04-15 05:44:20 +02:00
|
|
|
sys.exit(1)
|
2014-06-26 17:57:40 +02:00
|
|
|
if not os.path.exists(os.path.dirname(fdroiddir)):
|
|
|
|
logging.error('The root dir for local_copy_dir "'
|
|
|
|
+ os.path.dirname(fdroiddir)
|
|
|
|
+ '" does not exist!')
|
|
|
|
sys.exit(1)
|
|
|
|
if not os.path.isabs(fdroiddir):
|
|
|
|
logging.error('local_copy_dir must be an absolute path!')
|
|
|
|
sys.exit(1)
|
|
|
|
repobase = os.path.basename(fdroiddir)
|
|
|
|
if standardwebroot and repobase != 'fdroid':
|
|
|
|
logging.error('local_copy_dir does not end with "fdroid", '
|
|
|
|
+ 'perhaps you meant: ' + fdroiddir + '/fdroid')
|
|
|
|
sys.exit(1)
|
|
|
|
if local_copy_dir[-1] != '/':
|
|
|
|
local_copy_dir += '/'
|
|
|
|
local_copy_dir = local_copy_dir.replace('//', '/')
|
|
|
|
if not os.path.exists(fdroiddir):
|
|
|
|
os.mkdir(fdroiddir)
|
|
|
|
|
|
|
|
if not config.get('awsbucket') \
|
|
|
|
and not config.get('serverwebroot') \
|
2017-02-10 18:38:25 +01:00
|
|
|
and not config.get('servergitmirrors') \
|
2017-03-01 22:34:55 +01:00
|
|
|
and not config.get('uploadto_androidobservatory') \
|
2017-03-03 13:44:55 +01:00
|
|
|
and not config.get('virustotal_apikey') \
|
2014-06-26 17:57:40 +02:00
|
|
|
and local_copy_dir is None:
|
2017-03-03 13:44:55 +01:00
|
|
|
logging.warn('No option set! Edit your config.py to set at least one among:\n'
|
|
|
|
+ 'serverwebroot, servergitmirrors, local_copy_dir, awsbucket, virustotal_apikey or uploadto_androidobservatory')
|
2014-04-16 02:00:31 +02:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
repo_sections = ['repo']
|
2013-10-31 16:37:39 +01:00
|
|
|
if config['archive_older'] != 0:
|
2014-04-16 02:00:31 +02:00
|
|
|
repo_sections.append('archive')
|
2014-06-26 21:46:47 +02:00
|
|
|
if not os.path.exists('archive'):
|
|
|
|
os.mkdir('archive')
|
2015-08-20 17:40:18 +02:00
|
|
|
if config['per_app_repos']:
|
|
|
|
repo_sections += common.get_per_app_repos()
|
2014-04-16 02:00:31 +02:00
|
|
|
|
2015-09-04 11:37:05 +02:00
|
|
|
if options.command == 'init':
|
2014-07-14 21:03:58 +02:00
|
|
|
ssh = paramiko.SSHClient()
|
|
|
|
ssh.load_system_host_keys()
|
|
|
|
for serverwebroot in config.get('serverwebroot', []):
|
|
|
|
sshstr, remotepath = serverwebroot.rstrip('/').split(':')
|
2014-07-03 02:54:52 +02:00
|
|
|
if sshstr.find('@') >= 0:
|
|
|
|
username, hostname = sshstr.split('@')
|
|
|
|
else:
|
|
|
|
username = pwd.getpwuid(os.getuid())[0] # get effective uid
|
|
|
|
hostname = sshstr
|
|
|
|
ssh.connect(hostname, username=username)
|
|
|
|
sftp = ssh.open_sftp()
|
|
|
|
if os.path.basename(remotepath) \
|
|
|
|
not in sftp.listdir(os.path.dirname(remotepath)):
|
2016-01-04 17:28:55 +01:00
|
|
|
sftp.mkdir(remotepath, mode=0o755)
|
2014-04-16 02:00:31 +02:00
|
|
|
for repo_section in repo_sections:
|
2014-07-03 02:54:52 +02:00
|
|
|
repo_path = os.path.join(remotepath, repo_section)
|
|
|
|
if os.path.basename(repo_path) \
|
|
|
|
not in sftp.listdir(remotepath):
|
2016-01-04 17:28:55 +01:00
|
|
|
sftp.mkdir(repo_path, mode=0o755)
|
2014-07-03 02:54:52 +02:00
|
|
|
sftp.close()
|
|
|
|
ssh.close()
|
2015-09-04 11:37:05 +02:00
|
|
|
elif options.command == 'update':
|
2014-04-16 02:00:31 +02:00
|
|
|
for repo_section in repo_sections:
|
2014-06-26 20:18:29 +02:00
|
|
|
if local_copy_dir is not None:
|
|
|
|
if config['sync_from_local_copy_dir'] and os.path.exists(repo_section):
|
|
|
|
sync_from_localcopy(repo_section, local_copy_dir)
|
|
|
|
else:
|
|
|
|
update_localcopy(repo_section, local_copy_dir)
|
2014-07-14 21:03:58 +02:00
|
|
|
for serverwebroot in config.get('serverwebroot', []):
|
|
|
|
update_serverwebroot(serverwebroot, repo_section)
|
2017-02-10 18:38:25 +01:00
|
|
|
if config.get('servergitmirrors', []):
|
|
|
|
# update_servergitmirrors will take care of multiple mirrors so don't need a foreach
|
|
|
|
servergitmirrors = config.get('servergitmirrors', [])
|
|
|
|
update_servergitmirrors(servergitmirrors, repo_section)
|
2014-04-24 01:52:37 +02:00
|
|
|
if config.get('awsbucket'):
|
2014-04-16 04:41:28 +02:00
|
|
|
update_awsbucket(repo_section)
|
2017-03-01 22:34:55 +01:00
|
|
|
if config.get('uploadto_androidobservatory'):
|
|
|
|
upload_to_android_observatory(repo_section)
|
2017-03-03 13:44:55 +01:00
|
|
|
if config.get('virustotal_apikey'):
|
|
|
|
upload_to_virustotal(repo_section, config.get('virustotal_apikey'))
|
2013-04-04 19:43:35 +02:00
|
|
|
sys.exit(0)
|
2012-03-11 14:17:37 +01:00
|
|
|
|
2016-11-15 21:55:06 +01:00
|
|
|
|
2012-03-11 14:17:37 +01:00
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|