mirror of
https://gitlab.com/fdroid/fdroidserver.git
synced 2024-11-13 02:30:11 +01:00
All callable scripts now implement main()
This commit is contained in:
parent
4e5b4fa77c
commit
00abc9527d
@ -27,9 +27,6 @@ from optparse import OptionParser
|
||||
import HTMLParser
|
||||
import common
|
||||
|
||||
#Read configuration...
|
||||
execfile('config.py')
|
||||
|
||||
|
||||
# Check for a new version by looking at the Google market.
|
||||
# Returns (None, "a message") if this didn't work, or (version, vercode) for
|
||||
@ -66,48 +63,55 @@ def check_market(app):
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
# Parse command line...
|
||||
parser = OptionParser()
|
||||
parser.add_option("-v", "--verbose", action="store_true", default=False,
|
||||
help="Spew out even more information than normal")
|
||||
parser.add_option("-p", "--package", default=None,
|
||||
help="Build only the specified package")
|
||||
(options, args) = parser.parse_args()
|
||||
#Read configuration...
|
||||
execfile('config.py')
|
||||
|
||||
# Get all apps...
|
||||
apps = common.read_metadata(options.verbose)
|
||||
# Parse command line...
|
||||
parser = OptionParser()
|
||||
parser.add_option("-v", "--verbose", action="store_true", default=False,
|
||||
help="Spew out even more information than normal")
|
||||
parser.add_option("-p", "--package", default=None,
|
||||
help="Build only the specified package")
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
html_parser = HTMLParser.HTMLParser()
|
||||
# Get all apps...
|
||||
apps = common.read_metadata(options.verbose)
|
||||
|
||||
for app in apps:
|
||||
html_parser = HTMLParser.HTMLParser()
|
||||
|
||||
if options.package and options.package != app['id']:
|
||||
# Silent skip...
|
||||
pass
|
||||
else:
|
||||
print "Processing " + app['id'] + '...'
|
||||
for app in apps:
|
||||
|
||||
mode = app['Update Check Mode']
|
||||
if mode == 'Market':
|
||||
(version, vercode) = check_market(app)
|
||||
elif mode == 'None':
|
||||
version = None
|
||||
vercode = 'Checking disabled'
|
||||
if options.package and options.package != app['id']:
|
||||
# Silent skip...
|
||||
pass
|
||||
else:
|
||||
version = None
|
||||
vercode = 'Invalid update check method'
|
||||
print "Processing " + app['id'] + '...'
|
||||
|
||||
if not version:
|
||||
print "..." + vercode
|
||||
elif vercode == app['Current Version Code'] and version == app['Current Version']:
|
||||
print "...up to date"
|
||||
else:
|
||||
print '...updating to version:' + version + ' vercode:' + vercode
|
||||
app['Current Version'] = version
|
||||
app['Current Version Code'] = vercode
|
||||
metafile = os.path.join('metadata', app['id'] + '.txt')
|
||||
common.write_metadata(metafile, app)
|
||||
mode = app['Update Check Mode']
|
||||
if mode == 'Market':
|
||||
(version, vercode) = check_market(app)
|
||||
elif mode == 'None':
|
||||
version = None
|
||||
vercode = 'Checking disabled'
|
||||
else:
|
||||
version = None
|
||||
vercode = 'Invalid update check method'
|
||||
|
||||
print "Finished."
|
||||
if not version:
|
||||
print "..." + vercode
|
||||
elif vercode == app['Current Version Code'] and version == app['Current Version']:
|
||||
print "...up to date"
|
||||
else:
|
||||
print '...updating to version:' + version + ' vercode:' + vercode
|
||||
app['Current Version'] = version
|
||||
app['Current Version Code'] = vercode
|
||||
metafile = os.path.join('metadata', app['id'] + '.txt')
|
||||
common.write_metadata(metafile, app)
|
||||
|
||||
print "Finished."
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
|
386
import.py
386
import.py
@ -25,219 +25,225 @@ import re
|
||||
import urllib
|
||||
from optparse import OptionParser
|
||||
|
||||
#Read configuration...
|
||||
repo_name = None
|
||||
repo_description = None
|
||||
repo_icon = None
|
||||
repo_url = None
|
||||
execfile('config.py')
|
||||
def main():
|
||||
|
||||
import common
|
||||
# Read configuration...
|
||||
repo_name = None
|
||||
repo_description = None
|
||||
repo_icon = None
|
||||
repo_url = None
|
||||
execfile('config.py')
|
||||
|
||||
# Parse command line...
|
||||
parser = OptionParser()
|
||||
parser.add_option("-u", "--url", default=None,
|
||||
help="Project URL to import from.")
|
||||
parser.add_option("-s", "--subdir", default=None,
|
||||
help="Path to main android project subdirectory, if not in root.")
|
||||
(options, args) = parser.parse_args()
|
||||
import common
|
||||
|
||||
if not options.url:
|
||||
print "Specify project url."
|
||||
sys.exit(1)
|
||||
url = options.url
|
||||
# Parse command line...
|
||||
parser = OptionParser()
|
||||
parser.add_option("-u", "--url", default=None,
|
||||
help="Project URL to import from.")
|
||||
parser.add_option("-s", "--subdir", default=None,
|
||||
help="Path to main android project subdirectory, if not in root.")
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
tmp_dir = 'tmp'
|
||||
if not os.path.isdir(tmp_dir):
|
||||
print "Creating temporary directory"
|
||||
os.makedirs(tmp_dir)
|
||||
|
||||
# Get all apps...
|
||||
apps = common.read_metadata()
|
||||
|
||||
# Figure out what kind of project it is...
|
||||
projecttype = None
|
||||
issuetracker = None
|
||||
license = None
|
||||
if url.startswith('https://github.com'):
|
||||
projecttype = 'github'
|
||||
repo = url + '.git'
|
||||
repotype = 'git'
|
||||
sourcecode = url
|
||||
elif url.startswith('http://code.google.com/p/'):
|
||||
if not url.endswith('/'):
|
||||
print "Expected format for googlecode url is http://code.google.com/p/PROJECT/"
|
||||
if not options.url:
|
||||
print "Specify project url."
|
||||
sys.exit(1)
|
||||
projecttype = 'googlecode'
|
||||
sourcecode = url + 'source/checkout'
|
||||
issuetracker = url + 'issues/list'
|
||||
url = options.url
|
||||
|
||||
# Figure out the repo type and adddress...
|
||||
req = urllib.urlopen(sourcecode)
|
||||
if req.getcode() != 200:
|
||||
print 'Unable to find source at ' + sourcecode + ' - return code ' + str(req.getcode())
|
||||
sys.exit(1)
|
||||
page = req.read()
|
||||
repotype = None
|
||||
index = page.find('hg clone')
|
||||
if index != -1:
|
||||
repotype = 'hg'
|
||||
repo = page[index + 9:]
|
||||
index = repo.find('<')
|
||||
if index == -1:
|
||||
print "Error while getting repo address"
|
||||
tmp_dir = 'tmp'
|
||||
if not os.path.isdir(tmp_dir):
|
||||
print "Creating temporary directory"
|
||||
os.makedirs(tmp_dir)
|
||||
|
||||
# Get all apps...
|
||||
apps = common.read_metadata()
|
||||
|
||||
# Figure out what kind of project it is...
|
||||
projecttype = None
|
||||
issuetracker = None
|
||||
license = None
|
||||
if url.startswith('https://github.com'):
|
||||
projecttype = 'github'
|
||||
repo = url + '.git'
|
||||
repotype = 'git'
|
||||
sourcecode = url
|
||||
elif url.startswith('http://code.google.com/p/'):
|
||||
if not url.endswith('/'):
|
||||
print "Expected format for googlecode url is http://code.google.com/p/PROJECT/"
|
||||
sys.exit(1)
|
||||
repo = repo[:index]
|
||||
if not repotype:
|
||||
index=page.find('git clone')
|
||||
projecttype = 'googlecode'
|
||||
sourcecode = url + 'source/checkout'
|
||||
issuetracker = url + 'issues/list'
|
||||
|
||||
# Figure out the repo type and adddress...
|
||||
req = urllib.urlopen(sourcecode)
|
||||
if req.getcode() != 200:
|
||||
print 'Unable to find source at ' + sourcecode + ' - return code ' + str(req.getcode())
|
||||
sys.exit(1)
|
||||
page = req.read()
|
||||
repotype = None
|
||||
index = page.find('hg clone')
|
||||
if index != -1:
|
||||
repotype = 'git'
|
||||
repo = page[index + 10:]
|
||||
repotype = 'hg'
|
||||
repo = page[index + 9:]
|
||||
index = repo.find('<')
|
||||
if index == -1:
|
||||
print "Error while getting repo address"
|
||||
sys.exit(1)
|
||||
repo = repo[:index]
|
||||
if not repotype:
|
||||
index=page.find('svn checkout')
|
||||
if index != -1:
|
||||
repotype = 'git-svn'
|
||||
repo = page[index + 13:]
|
||||
prefix = '<strong><em>http</em></strong>'
|
||||
if not repo.startswith(prefix):
|
||||
print "Unexpected checkout instructions format"
|
||||
sys.exit(1)
|
||||
repo = 'http' + repo[len(prefix):]
|
||||
index = repo.find('<')
|
||||
if index == -1:
|
||||
print "Error while getting repo address - no end tag? '" + repo + "'"
|
||||
sys.exit(1)
|
||||
repo = repo[:index]
|
||||
index = repo.find(' ')
|
||||
if index == -1:
|
||||
print "Error while getting repo address - no space? '" + repo + "'"
|
||||
sys.exit(1)
|
||||
repo = repo[:index]
|
||||
if not repotype:
|
||||
print "Unable to determine vcs type"
|
||||
if not repotype:
|
||||
index=page.find('git clone')
|
||||
if index != -1:
|
||||
repotype = 'git'
|
||||
repo = page[index + 10:]
|
||||
index = repo.find('<')
|
||||
if index == -1:
|
||||
print "Error while getting repo address"
|
||||
sys.exit(1)
|
||||
repo = repo[:index]
|
||||
if not repotype:
|
||||
index=page.find('svn checkout')
|
||||
if index != -1:
|
||||
repotype = 'git-svn'
|
||||
repo = page[index + 13:]
|
||||
prefix = '<strong><em>http</em></strong>'
|
||||
if not repo.startswith(prefix):
|
||||
print "Unexpected checkout instructions format"
|
||||
sys.exit(1)
|
||||
repo = 'http' + repo[len(prefix):]
|
||||
index = repo.find('<')
|
||||
if index == -1:
|
||||
print "Error while getting repo address - no end tag? '" + repo + "'"
|
||||
sys.exit(1)
|
||||
repo = repo[:index]
|
||||
index = repo.find(' ')
|
||||
if index == -1:
|
||||
print "Error while getting repo address - no space? '" + repo + "'"
|
||||
sys.exit(1)
|
||||
repo = repo[:index]
|
||||
if not repotype:
|
||||
print "Unable to determine vcs type"
|
||||
sys.exit(1)
|
||||
|
||||
# Figure out the license...
|
||||
req = urllib.urlopen(url)
|
||||
if req.getcode() != 200:
|
||||
print 'Unable to find project page at ' + sourcecode + ' - return code ' + str(req.getcode())
|
||||
sys.exit(1)
|
||||
page = req.read()
|
||||
index = page.find('Code license')
|
||||
if index == -1:
|
||||
print "Couldn't find license data"
|
||||
sys.exit(1)
|
||||
ltext = page[index:]
|
||||
lprefix = 'rel="nofollow">'
|
||||
index = ltext.find(lprefix)
|
||||
if index == -1:
|
||||
print "Couldn't find license text"
|
||||
sys.exit(1)
|
||||
ltext = ltext[index + len(lprefix):]
|
||||
index = ltext.find('<')
|
||||
if index == -1:
|
||||
print "License text not formatted as expected"
|
||||
sys.exit(1)
|
||||
ltext = ltext[:index]
|
||||
if ltext == 'GNU GPL v3':
|
||||
license = 'GPLv3'
|
||||
elif ltext == 'GNU GPL v2':
|
||||
license = 'GPLv2'
|
||||
elif ltext == 'Apache License 2.0':
|
||||
license = 'Apache2'
|
||||
else:
|
||||
print "License " + ltext + " is not recognised"
|
||||
sys.exit(1)
|
||||
|
||||
if not projecttype:
|
||||
print "Unable to determine the project type."
|
||||
sys.exit(1)
|
||||
|
||||
# Figure out the license...
|
||||
req = urllib.urlopen(url)
|
||||
if req.getcode() != 200:
|
||||
print 'Unable to find project page at ' + sourcecode + ' - return code ' + str(req.getcode())
|
||||
sys.exit(1)
|
||||
page = req.read()
|
||||
index = page.find('Code license')
|
||||
if index == -1:
|
||||
print "Couldn't find license data"
|
||||
sys.exit(1)
|
||||
ltext = page[index:]
|
||||
lprefix = 'rel="nofollow">'
|
||||
index = ltext.find(lprefix)
|
||||
if index == -1:
|
||||
print "Couldn't find license text"
|
||||
sys.exit(1)
|
||||
ltext = ltext[index + len(lprefix):]
|
||||
index = ltext.find('<')
|
||||
if index == -1:
|
||||
print "License text not formatted as expected"
|
||||
sys.exit(1)
|
||||
ltext = ltext[:index]
|
||||
if ltext == 'GNU GPL v3':
|
||||
license = 'GPLv3'
|
||||
elif ltext == 'GNU GPL v2':
|
||||
license = 'GPLv2'
|
||||
elif ltext == 'Apache License 2.0':
|
||||
license = 'Apache2'
|
||||
# Get a copy of the source so we can extract some info...
|
||||
print 'Getting source from ' + repotype + ' repo at ' + repo
|
||||
src_dir = os.path.join(tmp_dir, 'importer')
|
||||
if os.path.exists(tmp_dir):
|
||||
shutil.rmtree(tmp_dir)
|
||||
vcs = common.getvcs(repotype, repo, src_dir)
|
||||
vcs.gotorevision(None)
|
||||
if options.subdir:
|
||||
root_dir = os.path.join(src_dir, options.subdir)
|
||||
else:
|
||||
print "License " + ltext + " is not recognised"
|
||||
root_dir = src_dir
|
||||
|
||||
# Check AndroidManiifest.xml exists...
|
||||
manifest = os.path.join(root_dir, 'AndroidManifest.xml')
|
||||
if not os.path.exists(manifest):
|
||||
print "AndroidManifest.xml did not exist in the expected location. Specify --subdir?"
|
||||
sys.exit(1)
|
||||
|
||||
if not projecttype:
|
||||
print "Unable to determine the project type."
|
||||
sys.exit(1)
|
||||
|
||||
# Get a copy of the source so we can extract some info...
|
||||
print 'Getting source from ' + repotype + ' repo at ' + repo
|
||||
src_dir = os.path.join(tmp_dir, 'importer')
|
||||
if os.path.exists(tmp_dir):
|
||||
shutil.rmtree(tmp_dir)
|
||||
vcs = common.getvcs(repotype, repo, src_dir)
|
||||
vcs.gotorevision(None)
|
||||
if options.subdir:
|
||||
root_dir = os.path.join(src_dir, options.subdir)
|
||||
else:
|
||||
root_dir = src_dir
|
||||
|
||||
# Check AndroidManiifest.xml exists...
|
||||
manifest = os.path.join(root_dir, 'AndroidManifest.xml')
|
||||
if not os.path.exists(manifest):
|
||||
print "AndroidManifest.xml did not exist in the expected location. Specify --subdir?"
|
||||
sys.exit(1)
|
||||
|
||||
# Extract some information...
|
||||
vcsearch = re.compile(r'.*android:versionCode="([^"]+)".*').search
|
||||
vnsearch = re.compile(r'.*android:versionName="([^"]+)".*').search
|
||||
psearch = re.compile(r'.*package="([^"]+)".*').search
|
||||
version = None
|
||||
vercode = None
|
||||
package = None
|
||||
for line in file(manifest):
|
||||
# Extract some information...
|
||||
vcsearch = re.compile(r'.*android:versionCode="([^"]+)".*').search
|
||||
vnsearch = re.compile(r'.*android:versionName="([^"]+)".*').search
|
||||
psearch = re.compile(r'.*package="([^"]+)".*').search
|
||||
version = None
|
||||
vercode = None
|
||||
package = None
|
||||
for line in file(manifest):
|
||||
if not package:
|
||||
matches = psearch(line)
|
||||
if matches:
|
||||
package = matches.group(1)
|
||||
if not version:
|
||||
matches = vnsearch(line)
|
||||
if matches:
|
||||
version = matches.group(1)
|
||||
if not vercode:
|
||||
matches = vcsearch(line)
|
||||
if matches:
|
||||
vercode = matches.group(1)
|
||||
if not package:
|
||||
matches = psearch(line)
|
||||
if matches:
|
||||
package = matches.group(1)
|
||||
print "Couldn't find package ID"
|
||||
sys.exit(1)
|
||||
if not version:
|
||||
matches = vnsearch(line)
|
||||
if matches:
|
||||
version = matches.group(1)
|
||||
print "Couldn't find latest version name"
|
||||
sys.exit(1)
|
||||
if not vercode:
|
||||
matches = vcsearch(line)
|
||||
if matches:
|
||||
vercode = matches.group(1)
|
||||
if not package:
|
||||
print "Couldn't find package ID"
|
||||
sys.exit(1)
|
||||
if not version:
|
||||
print "Couldn't find latest version name"
|
||||
sys.exit(1)
|
||||
if not vercode:
|
||||
print "Couldn't find latest version code"
|
||||
sys.exit(1)
|
||||
|
||||
# Make sure it's actually new...
|
||||
for app in apps:
|
||||
if app['id'] == package:
|
||||
print "Package " + package + " already exists"
|
||||
print "Couldn't find latest version code"
|
||||
sys.exit(1)
|
||||
|
||||
# Construct the metadata...
|
||||
app = common.parse_metadata(None)
|
||||
app['id'] = package
|
||||
app['Web Site'] = url
|
||||
app['Source Code'] = sourcecode
|
||||
if issuetracker:
|
||||
app['Issue Tracker'] = issuetracker
|
||||
if license:
|
||||
app['License'] = license
|
||||
app['Repo Type'] = repotype
|
||||
app['Repo'] = repo
|
||||
# Make sure it's actually new...
|
||||
for app in apps:
|
||||
if app['id'] == package:
|
||||
print "Package " + package + " already exists"
|
||||
sys.exit(1)
|
||||
|
||||
# Create a build line...
|
||||
build = {}
|
||||
build['version'] = version
|
||||
build['vercode'] = vercode
|
||||
build['commit'] = '?'
|
||||
if options.subdir:
|
||||
build['subdir'] = options.subdir
|
||||
if os.path.exists(os.path.join(root_dir, 'jni')):
|
||||
build['buildjni'] = 'yes'
|
||||
app['builds'].append(build)
|
||||
app['comments'].append(('build:' + version,
|
||||
"#Generated by import.py - check this is the right version, and find the right commit!"))
|
||||
# Construct the metadata...
|
||||
app = common.parse_metadata(None)
|
||||
app['id'] = package
|
||||
app['Web Site'] = url
|
||||
app['Source Code'] = sourcecode
|
||||
if issuetracker:
|
||||
app['Issue Tracker'] = issuetracker
|
||||
if license:
|
||||
app['License'] = license
|
||||
app['Repo Type'] = repotype
|
||||
app['Repo'] = repo
|
||||
|
||||
metafile = os.path.join('metadata', package + '.txt')
|
||||
common.write_metadata(metafile, app)
|
||||
print "Wrote " + metafile
|
||||
# Create a build line...
|
||||
build = {}
|
||||
build['version'] = version
|
||||
build['vercode'] = vercode
|
||||
build['commit'] = '?'
|
||||
if options.subdir:
|
||||
build['subdir'] = options.subdir
|
||||
if os.path.exists(os.path.join(root_dir, 'jni')):
|
||||
build['buildjni'] = 'yes'
|
||||
app['builds'].append(build)
|
||||
app['comments'].append(('build:' + version,
|
||||
"#Generated by import.py - check this is the right version, and find the right commit!"))
|
||||
|
||||
metafile = os.path.join('metadata', package + '.txt')
|
||||
common.write_metadata(metafile, app)
|
||||
print "Wrote " + metafile
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
|
174
publish.py
174
publish.py
@ -31,106 +31,112 @@ from optparse import OptionParser
|
||||
import common
|
||||
from common import BuildException
|
||||
|
||||
#Read configuration...
|
||||
execfile('config.py')
|
||||
def main():
|
||||
|
||||
# Parse command line...
|
||||
parser = OptionParser()
|
||||
parser.add_option("-v", "--verbose", action="store_true", default=False,
|
||||
help="Spew out even more information than normal")
|
||||
parser.add_option("-p", "--package", default=None,
|
||||
help="Publish only the specified package")
|
||||
(options, args) = parser.parse_args()
|
||||
#Read configuration...
|
||||
execfile('config.py')
|
||||
|
||||
log_dir = 'logs'
|
||||
if not os.path.isdir(log_dir):
|
||||
print "Creating log directory"
|
||||
os.makedirs(log_dir)
|
||||
# Parse command line...
|
||||
parser = OptionParser()
|
||||
parser.add_option("-v", "--verbose", action="store_true", default=False,
|
||||
help="Spew out even more information than normal")
|
||||
parser.add_option("-p", "--package", default=None,
|
||||
help="Publish only the specified package")
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
tmp_dir = 'tmp'
|
||||
if not os.path.isdir(tmp_dir):
|
||||
print "Creating temporary directory"
|
||||
os.makedirs(tmp_dir)
|
||||
log_dir = 'logs'
|
||||
if not os.path.isdir(log_dir):
|
||||
print "Creating log directory"
|
||||
os.makedirs(log_dir)
|
||||
|
||||
output_dir = 'repo'
|
||||
if not os.path.isdir(output_dir):
|
||||
print "Creating output directory"
|
||||
os.makedirs(output_dir)
|
||||
tmp_dir = 'tmp'
|
||||
if not os.path.isdir(tmp_dir):
|
||||
print "Creating temporary directory"
|
||||
os.makedirs(tmp_dir)
|
||||
|
||||
unsigned_dir = 'unsigned'
|
||||
if not os.path.isdir(unsigned_dir):
|
||||
print "No unsigned directory - nothing to do"
|
||||
sys.exit(0)
|
||||
output_dir = 'repo'
|
||||
if not os.path.isdir(output_dir):
|
||||
print "Creating output directory"
|
||||
os.makedirs(output_dir)
|
||||
|
||||
for apkfile in sorted(glob.glob(os.path.join(unsigned_dir, '*.apk'))):
|
||||
unsigned_dir = 'unsigned'
|
||||
if not os.path.isdir(unsigned_dir):
|
||||
print "No unsigned directory - nothing to do"
|
||||
sys.exit(0)
|
||||
|
||||
apkfilename = os.path.basename(apkfile)
|
||||
i = apkfilename.rfind('_')
|
||||
if i == -1:
|
||||
raise BuildException("Invalid apk name")
|
||||
appid = apkfilename[:i]
|
||||
print "Processing " + appid
|
||||
for apkfile in sorted(glob.glob(os.path.join(unsigned_dir, '*.apk'))):
|
||||
|
||||
if not options.package or options.package == appid:
|
||||
apkfilename = os.path.basename(apkfile)
|
||||
i = apkfilename.rfind('_')
|
||||
if i == -1:
|
||||
raise BuildException("Invalid apk name")
|
||||
appid = apkfilename[:i]
|
||||
print "Processing " + appid
|
||||
|
||||
# Figure out the key alias name we'll use. Only the first 8
|
||||
# characters are significant, so we'll use the first 8 from
|
||||
# the MD5 of the app's ID and hope there are no collisions.
|
||||
# If a collision does occur later, we're going to have to
|
||||
# come up with a new alogrithm, AND rename all existing keys
|
||||
# in the keystore!
|
||||
if keyaliases.has_key(appid):
|
||||
# For this particular app, the key alias is overridden...
|
||||
keyalias = keyaliases[appid]
|
||||
else:
|
||||
m = md5.new()
|
||||
m.update(appid)
|
||||
keyalias = m.hexdigest()[:8]
|
||||
print "Key alias: " + keyalias
|
||||
if not options.package or options.package == appid:
|
||||
|
||||
# See if we already have a key for this application, and
|
||||
# if not generate one...
|
||||
p = subprocess.Popen(['keytool', '-list',
|
||||
'-alias', keyalias, '-keystore', keystore,
|
||||
'-storepass', keystorepass], stdout=subprocess.PIPE)
|
||||
output = p.communicate()[0]
|
||||
if p.returncode !=0:
|
||||
print "Key does not exist - generating..."
|
||||
p = subprocess.Popen(['keytool', '-genkey',
|
||||
'-keystore', keystore, '-alias', keyalias,
|
||||
'-keyalg', 'RSA', '-keysize', '2048',
|
||||
'-validity', '10000',
|
||||
# Figure out the key alias name we'll use. Only the first 8
|
||||
# characters are significant, so we'll use the first 8 from
|
||||
# the MD5 of the app's ID and hope there are no collisions.
|
||||
# If a collision does occur later, we're going to have to
|
||||
# come up with a new alogrithm, AND rename all existing keys
|
||||
# in the keystore!
|
||||
if keyaliases.has_key(appid):
|
||||
# For this particular app, the key alias is overridden...
|
||||
keyalias = keyaliases[appid]
|
||||
else:
|
||||
m = md5.new()
|
||||
m.update(appid)
|
||||
keyalias = m.hexdigest()[:8]
|
||||
print "Key alias: " + keyalias
|
||||
|
||||
# See if we already have a key for this application, and
|
||||
# if not generate one...
|
||||
p = subprocess.Popen(['keytool', '-list',
|
||||
'-alias', keyalias, '-keystore', keystore,
|
||||
'-storepass', keystorepass], stdout=subprocess.PIPE)
|
||||
output = p.communicate()[0]
|
||||
if p.returncode !=0:
|
||||
print "Key does not exist - generating..."
|
||||
p = subprocess.Popen(['keytool', '-genkey',
|
||||
'-keystore', keystore, '-alias', keyalias,
|
||||
'-keyalg', 'RSA', '-keysize', '2048',
|
||||
'-validity', '10000',
|
||||
'-storepass', keystorepass, '-keypass', keypass,
|
||||
'-dname', keydname], stdout=subprocess.PIPE)
|
||||
output = p.communicate()[0]
|
||||
print output
|
||||
if p.returncode != 0:
|
||||
raise BuildException("Failed to generate key")
|
||||
|
||||
# Sign the application...
|
||||
p = subprocess.Popen(['jarsigner', '-keystore', keystore,
|
||||
'-storepass', keystorepass, '-keypass', keypass,
|
||||
'-dname', keydname], stdout=subprocess.PIPE)
|
||||
apkfile, keyalias], stdout=subprocess.PIPE)
|
||||
output = p.communicate()[0]
|
||||
print output
|
||||
if p.returncode != 0:
|
||||
raise BuildException("Failed to generate key")
|
||||
raise BuildException("Failed to sign application")
|
||||
|
||||
# Sign the application...
|
||||
p = subprocess.Popen(['jarsigner', '-keystore', keystore,
|
||||
'-storepass', keystorepass, '-keypass', keypass,
|
||||
apkfile, keyalias], stdout=subprocess.PIPE)
|
||||
output = p.communicate()[0]
|
||||
print output
|
||||
if p.returncode != 0:
|
||||
raise BuildException("Failed to sign application")
|
||||
# Zipalign it...
|
||||
p = subprocess.Popen([os.path.join(sdk_path,'tools','zipalign'),
|
||||
'-v', '4', apkfile,
|
||||
os.path.join(output_dir, apkfilename)],
|
||||
stdout=subprocess.PIPE)
|
||||
output = p.communicate()[0]
|
||||
print output
|
||||
if p.returncode != 0:
|
||||
raise BuildException("Failed to align application")
|
||||
os.remove(apkfile)
|
||||
|
||||
# Zipalign it...
|
||||
p = subprocess.Popen([os.path.join(sdk_path,'tools','zipalign'),
|
||||
'-v', '4', apkfile,
|
||||
os.path.join(output_dir, apkfilename)],
|
||||
stdout=subprocess.PIPE)
|
||||
output = p.communicate()[0]
|
||||
print output
|
||||
if p.returncode != 0:
|
||||
raise BuildException("Failed to align application")
|
||||
os.remove(apkfile)
|
||||
# Move the source tarball into the output directory...
|
||||
tarfilename = apkfilename[:-4] + '_src.tar.gz'
|
||||
shutil.move(os.path.join(unsigned_dir, tarfilename),
|
||||
os.path.join(output_dir, tarfilename))
|
||||
|
||||
# Move the source tarball into the output directory...
|
||||
tarfilename = apkfilename[:-4] + '_src.tar.gz'
|
||||
shutil.move(os.path.join(unsigned_dir, tarfilename),
|
||||
os.path.join(output_dir, tarfilename))
|
||||
print 'Published ' + apkfilename
|
||||
|
||||
print 'Published ' + apkfilename
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
|
@ -27,22 +27,26 @@ from optparse import OptionParser
|
||||
import HTMLParser
|
||||
import common
|
||||
|
||||
#Read configuration...
|
||||
execfile('config.py')
|
||||
def main():
|
||||
|
||||
#Read configuration...
|
||||
execfile('config.py')
|
||||
|
||||
# Parse command line...
|
||||
parser = OptionParser()
|
||||
parser.add_option("-v", "--verbose", action="store_true", default=False,
|
||||
help="Spew out even more information than normal")
|
||||
(options, args) = parser.parse_args()
|
||||
# Parse command line...
|
||||
parser = OptionParser()
|
||||
parser.add_option("-v", "--verbose", action="store_true", default=False,
|
||||
help="Spew out even more information than normal")
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
# Get all apps...
|
||||
apps = common.read_metadata(options.verbose)
|
||||
# Get all apps...
|
||||
apps = common.read_metadata(options.verbose)
|
||||
|
||||
for app in apps:
|
||||
print "Writing " + app['id']
|
||||
common.write_metadata(os.path.join('metadata', app['id']) + '.txt', app)
|
||||
for app in apps:
|
||||
print "Writing " + app['id']
|
||||
common.write_metadata(os.path.join('metadata', app['id']) + '.txt', app)
|
||||
|
||||
print "Finished."
|
||||
print "Finished."
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
|
119
scanner.py
119
scanner.py
@ -31,80 +31,85 @@ import common
|
||||
from common import BuildException
|
||||
from common import VCSException
|
||||
|
||||
#Read configuration...
|
||||
execfile('config.py')
|
||||
def main():
|
||||
|
||||
# Read configuration...
|
||||
execfile('config.py')
|
||||
|
||||
|
||||
# Parse command line...
|
||||
parser = OptionParser()
|
||||
parser.add_option("-v", "--verbose", action="store_true", default=False,
|
||||
help="Spew out even more information than normal")
|
||||
parser.add_option("-p", "--package", default=None,
|
||||
help="Scan only the specified package")
|
||||
(options, args) = parser.parse_args()
|
||||
# Parse command line...
|
||||
parser = OptionParser()
|
||||
parser.add_option("-v", "--verbose", action="store_true", default=False,
|
||||
help="Spew out even more information than normal")
|
||||
parser.add_option("-p", "--package", default=None,
|
||||
help="Scan only the specified package")
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
# Get all apps...
|
||||
apps = common.read_metadata(options.verbose)
|
||||
# Get all apps...
|
||||
apps = common.read_metadata(options.verbose)
|
||||
|
||||
html_parser = HTMLParser.HTMLParser()
|
||||
html_parser = HTMLParser.HTMLParser()
|
||||
|
||||
problems = []
|
||||
problems = []
|
||||
|
||||
extlib_dir = os.path.join('build', 'extlib')
|
||||
extlib_dir = os.path.join('build', 'extlib')
|
||||
|
||||
for app in apps:
|
||||
for app in apps:
|
||||
|
||||
skip = False
|
||||
if options.package and app['id'] != options.package:
|
||||
skip = True
|
||||
elif app['Disabled']:
|
||||
print "Skipping %s: disabled" % app['id']
|
||||
skip = True
|
||||
elif not app['builds']:
|
||||
print "Skipping %s: no builds specified" % app['id']
|
||||
skip = True
|
||||
skip = False
|
||||
if options.package and app['id'] != options.package:
|
||||
skip = True
|
||||
elif app['Disabled']:
|
||||
print "Skipping %s: disabled" % app['id']
|
||||
skip = True
|
||||
elif not app['builds']:
|
||||
print "Skipping %s: no builds specified" % app['id']
|
||||
skip = True
|
||||
|
||||
if not skip:
|
||||
if not skip:
|
||||
|
||||
print "Processing " + app['id']
|
||||
print "Processing " + app['id']
|
||||
|
||||
try:
|
||||
try:
|
||||
|
||||
build_dir = 'build/' + app['id']
|
||||
build_dir = 'build/' + app['id']
|
||||
|
||||
# Set up vcs interface and make sure we have the latest code...
|
||||
vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir)
|
||||
# Set up vcs interface and make sure we have the latest code...
|
||||
vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir)
|
||||
|
||||
for thisbuild in app['builds']:
|
||||
for thisbuild in app['builds']:
|
||||
|
||||
if thisbuild['commit'].startswith('!'):
|
||||
print ("..skipping version " + thisbuild['version'] + " - " +
|
||||
thisbuild['commit'][1:])
|
||||
else:
|
||||
print "..scanning version " + thisbuild['version']
|
||||
if thisbuild['commit'].startswith('!'):
|
||||
print ("..skipping version " + thisbuild['version'] + " - " +
|
||||
thisbuild['commit'][1:])
|
||||
else:
|
||||
print "..scanning version " + thisbuild['version']
|
||||
|
||||
# Prepare the source code...
|
||||
root_dir = common.prepare_source(vcs, app, thisbuild,
|
||||
build_dir, extlib_dir, sdk_path, ndk_path, javacc_path)
|
||||
# Prepare the source code...
|
||||
root_dir = common.prepare_source(vcs, app, thisbuild,
|
||||
build_dir, extlib_dir, sdk_path, ndk_path, javacc_path)
|
||||
|
||||
# Do the scan...
|
||||
buildprobs = common.scan_source(build_dir, root_dir, thisbuild)
|
||||
for problem in buildprobs:
|
||||
problems.append(problem +
|
||||
' in ' + app['id'] + ' ' + thisbuild['version'])
|
||||
# Do the scan...
|
||||
buildprobs = common.scan_source(build_dir, root_dir, thisbuild)
|
||||
for problem in buildprobs:
|
||||
problems.append(problem +
|
||||
' in ' + app['id'] + ' ' + thisbuild['version'])
|
||||
|
||||
except BuildException as be:
|
||||
msg = "Could not scan app %s due to BuildException: %s" % (app['id'], be)
|
||||
problems.append(msg)
|
||||
except VCSException as vcse:
|
||||
msg = "VCS error while scanning app %s: %s" % (app['id'], vcse)
|
||||
problems.append(msg)
|
||||
except Exception:
|
||||
msg = "Could not scan app %s due to unknown error: %s" % (app['id'], traceback.format_exc())
|
||||
problems.append(msg)
|
||||
except BuildException as be:
|
||||
msg = "Could not scan app %s due to BuildException: %s" % (app['id'], be)
|
||||
problems.append(msg)
|
||||
except VCSException as vcse:
|
||||
msg = "VCS error while scanning app %s: %s" % (app['id'], vcse)
|
||||
problems.append(msg)
|
||||
except Exception:
|
||||
msg = "Could not scan app %s due to unknown error: %s" % (app['id'], traceback.format_exc())
|
||||
problems.append(msg)
|
||||
|
||||
print "Finished:"
|
||||
for problem in problems:
|
||||
print problem
|
||||
print str(len(problems)) + ' problems.'
|
||||
print "Finished:"
|
||||
for problem in problems:
|
||||
print problem
|
||||
print str(len(problems)) + ' problems.'
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
|
899
update.py
899
update.py
@ -29,491 +29,496 @@ from xml.dom.minidom import Document
|
||||
from optparse import OptionParser
|
||||
import time
|
||||
|
||||
#Read configuration...
|
||||
repo_name = None
|
||||
repo_description = None
|
||||
repo_icon = None
|
||||
repo_url = None
|
||||
execfile('config.py')
|
||||
def main():
|
||||
|
||||
import common
|
||||
# Read configuration...
|
||||
repo_name = None
|
||||
repo_description = None
|
||||
repo_icon = None
|
||||
repo_url = None
|
||||
execfile('config.py')
|
||||
|
||||
# Parse command line...
|
||||
parser = OptionParser()
|
||||
parser.add_option("-c", "--createmeta", action="store_true", default=False,
|
||||
help="Create skeleton metadata files that are missing")
|
||||
parser.add_option("-v", "--verbose", action="store_true", default=False,
|
||||
help="Spew out even more information than normal")
|
||||
parser.add_option("-q", "--quiet", action="store_true", default=False,
|
||||
help="No output, except for warnings and errors")
|
||||
parser.add_option("-b", "--buildreport", action="store_true", default=False,
|
||||
help="Report on build data status")
|
||||
parser.add_option("-i", "--interactive", default=False, action="store_true",
|
||||
help="Interactively ask about things that need updating.")
|
||||
parser.add_option("-e", "--editor", default="/etc/alternatives/editor",
|
||||
help="Specify editor to use in interactive mode. Default "+
|
||||
"is /etc/alternatives/editor")
|
||||
parser.add_option("", "--pretty", action="store_true", default=False,
|
||||
help="Produce human-readable index.xml")
|
||||
(options, args) = parser.parse_args()
|
||||
import common
|
||||
|
||||
# Parse command line...
|
||||
parser = OptionParser()
|
||||
parser.add_option("-c", "--createmeta", action="store_true", default=False,
|
||||
help="Create skeleton metadata files that are missing")
|
||||
parser.add_option("-v", "--verbose", action="store_true", default=False,
|
||||
help="Spew out even more information than normal")
|
||||
parser.add_option("-q", "--quiet", action="store_true", default=False,
|
||||
help="No output, except for warnings and errors")
|
||||
parser.add_option("-b", "--buildreport", action="store_true", default=False,
|
||||
help="Report on build data status")
|
||||
parser.add_option("-i", "--interactive", default=False, action="store_true",
|
||||
help="Interactively ask about things that need updating.")
|
||||
parser.add_option("-e", "--editor", default="/etc/alternatives/editor",
|
||||
help="Specify editor to use in interactive mode. Default "+
|
||||
"is /etc/alternatives/editor")
|
||||
parser.add_option("", "--pretty", action="store_true", default=False,
|
||||
help="Produce human-readable index.xml")
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
|
||||
icon_dir=os.path.join('repo','icons')
|
||||
icon_dir=os.path.join('repo','icons')
|
||||
|
||||
# Delete and re-create the icon directory...
|
||||
if os.path.exists(icon_dir):
|
||||
shutil.rmtree(icon_dir)
|
||||
os.mkdir(icon_dir)
|
||||
# Delete and re-create the icon directory...
|
||||
if os.path.exists(icon_dir):
|
||||
shutil.rmtree(icon_dir)
|
||||
os.mkdir(icon_dir)
|
||||
|
||||
warnings = 0
|
||||
warnings = 0
|
||||
|
||||
#Make sure we have the repository description...
|
||||
if (repo_url is None or repo_name is None or
|
||||
repo_icon is None or repo_description is None):
|
||||
print "Repository description fields are required in config.py"
|
||||
print "See config.sample.py for details"
|
||||
sys.exit(1)
|
||||
|
||||
# Get all apps...
|
||||
apps = common.read_metadata(verbose=options.verbose)
|
||||
|
||||
# Generate a list of categories...
|
||||
categories = []
|
||||
for app in apps:
|
||||
if app['Category'] not in categories:
|
||||
categories.append(app['Category'])
|
||||
|
||||
# Gather information about all the apk files in the repo directory...
|
||||
apks = []
|
||||
for apkfile in glob.glob(os.path.join('repo','*.apk')):
|
||||
|
||||
apkfilename = apkfile[5:]
|
||||
if apkfilename.find(' ') != -1:
|
||||
print "No spaces in APK filenames!"
|
||||
# Make sure we have the repository description...
|
||||
if (repo_url is None or repo_name is None or
|
||||
repo_icon is None or repo_description is None):
|
||||
print "Repository description fields are required in config.py"
|
||||
print "See config.sample.py for details"
|
||||
sys.exit(1)
|
||||
srcfilename = apkfilename[:-4] + "_src.tar.gz"
|
||||
|
||||
if not options.quiet:
|
||||
print "Processing " + apkfilename
|
||||
thisinfo = {}
|
||||
thisinfo['apkname'] = apkfilename
|
||||
if os.path.exists(os.path.join('repo', srcfilename)):
|
||||
thisinfo['srcname'] = srcfilename
|
||||
thisinfo['size'] = os.path.getsize(apkfile)
|
||||
thisinfo['permissions'] = []
|
||||
thisinfo['features'] = []
|
||||
p = subprocess.Popen([os.path.join(sdk_path, 'platform-tools', 'aapt'),
|
||||
'dump', 'badging', apkfile],
|
||||
stdout=subprocess.PIPE)
|
||||
output = p.communicate()[0]
|
||||
if options.verbose:
|
||||
print output
|
||||
if p.returncode != 0:
|
||||
print "ERROR: Failed to get apk information"
|
||||
sys.exit(1)
|
||||
for line in output.splitlines():
|
||||
if line.startswith("package:"):
|
||||
pat = re.compile(".*name='([a-zA-Z0-9._]*)'.*")
|
||||
thisinfo['id'] = re.match(pat, line).group(1)
|
||||
pat = re.compile(".*versionCode='([0-9]*)'.*")
|
||||
thisinfo['versioncode'] = int(re.match(pat, line).group(1))
|
||||
pat = re.compile(".*versionName='([^']*)'.*")
|
||||
thisinfo['version'] = re.match(pat, line).group(1)
|
||||
if line.startswith("application:"):
|
||||
pat = re.compile(".*label='([^']*)'.*")
|
||||
thisinfo['name'] = re.match(pat, line).group(1)
|
||||
pat = re.compile(".*icon='([^']*)'.*")
|
||||
thisinfo['iconsrc'] = re.match(pat, line).group(1)
|
||||
if line.startswith("sdkVersion:"):
|
||||
pat = re.compile(".*'([0-9]*)'.*")
|
||||
thisinfo['sdkversion'] = re.match(pat, line).group(1)
|
||||
if line.startswith("native-code:"):
|
||||
pat = re.compile(".*'([^']*)'.*")
|
||||
thisinfo['nativecode'] = re.match(pat, line).group(1)
|
||||
if line.startswith("uses-permission:"):
|
||||
pat = re.compile(".*'([^']*)'.*")
|
||||
perm = re.match(pat, line).group(1)
|
||||
if perm.startswith("android.permission."):
|
||||
perm = perm[19:]
|
||||
thisinfo['permissions'].append(perm)
|
||||
if line.startswith("uses-feature:"):
|
||||
pat = re.compile(".*'([^']*)'.*")
|
||||
perm = re.match(pat, line).group(1)
|
||||
#Filter out this, it's only added with the latest SDK tools and
|
||||
#causes problems for lots of apps.
|
||||
if (perm != "android.hardware.screen.portrait" and
|
||||
perm != "android.hardware.screen.landscape"):
|
||||
if perm.startswith("android.feature."):
|
||||
perm = perm[16:]
|
||||
thisinfo['features'].append(perm)
|
||||
# Get all apps...
|
||||
apps = common.read_metadata(verbose=options.verbose)
|
||||
|
||||
if not thisinfo.has_key('sdkversion'):
|
||||
print " WARNING: no SDK version information found"
|
||||
thisinfo['sdkversion'] = 0
|
||||
|
||||
# Calculate the md5 and sha256...
|
||||
m = hashlib.md5()
|
||||
sha = hashlib.sha256()
|
||||
f = open(apkfile, 'rb')
|
||||
while True:
|
||||
t = f.read(1024)
|
||||
if len(t) == 0:
|
||||
break
|
||||
m.update(t)
|
||||
sha.update(t)
|
||||
thisinfo['md5'] = m.hexdigest()
|
||||
thisinfo['sha256'] = sha.hexdigest()
|
||||
f.close()
|
||||
|
||||
# Get the signature (or md5 of, to be precise)...
|
||||
p = subprocess.Popen(['java', 'getsig',
|
||||
os.path.join(os.getcwd(), apkfile)],
|
||||
cwd=os.path.join(sys.path[0], 'getsig'),
|
||||
stdout=subprocess.PIPE)
|
||||
output = p.communicate()[0]
|
||||
if options.verbose:
|
||||
print output
|
||||
if p.returncode != 0 or not output.startswith('Result:'):
|
||||
print "ERROR: Failed to get apk signature"
|
||||
sys.exit(1)
|
||||
thisinfo['sig'] = output[7:].strip()
|
||||
|
||||
# Extract the icon file...
|
||||
apk = zipfile.ZipFile(apkfile, 'r')
|
||||
thisinfo['icon'] = (thisinfo['id'] + '.' +
|
||||
str(thisinfo['versioncode']) + '.png')
|
||||
iconfilename = os.path.join(icon_dir, thisinfo['icon'])
|
||||
try:
|
||||
iconfile = open(iconfilename, 'wb')
|
||||
iconfile.write(apk.read(thisinfo['iconsrc']))
|
||||
iconfile.close()
|
||||
except:
|
||||
print "WARNING: Error retrieving icon file"
|
||||
warnings += 1
|
||||
apk.close()
|
||||
|
||||
apks.append(thisinfo)
|
||||
|
||||
# Some information from the apks needs to be applied up to the application
|
||||
# level. When doing this, we use the info from the most recent version's apk.
|
||||
for app in apps:
|
||||
bestver = 0
|
||||
for apk in apks:
|
||||
if apk['id'] == app['id']:
|
||||
if apk['versioncode'] > bestver:
|
||||
bestver = apk['versioncode']
|
||||
bestapk = apk
|
||||
|
||||
if bestver == 0:
|
||||
if app['Name'] is None:
|
||||
app['Name'] = app['id']
|
||||
app['icon'] = ''
|
||||
if app['Disabled'] is None:
|
||||
print "WARNING: Application " + app['id'] + " has no packages"
|
||||
else:
|
||||
if app['Name'] is None:
|
||||
app['Name'] = bestapk['name']
|
||||
app['icon'] = bestapk['icon']
|
||||
|
||||
# Generate warnings for apk's with no metadata (or create skeleton
|
||||
# metadata files, if requested on the command line)
|
||||
for apk in apks:
|
||||
found = False
|
||||
# Generate a list of categories...
|
||||
categories = []
|
||||
for app in apps:
|
||||
if app['id'] == apk['id']:
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
if options.createmeta:
|
||||
f = open(os.path.join('metadata', apk['id'] + '.txt'), 'w')
|
||||
f.write("License:Unknown\n")
|
||||
f.write("Web Site:\n")
|
||||
f.write("Source Code:\n")
|
||||
f.write("Issue Tracker:\n")
|
||||
f.write("Summary:" + apk['name'] + "\n")
|
||||
f.write("Description:\n")
|
||||
f.write(apk['name'] + "\n")
|
||||
f.write(".\n")
|
||||
f.close()
|
||||
print "Generated skeleton metadata for " + apk['id']
|
||||
else:
|
||||
print "WARNING: " + apk['apkname'] + " (" + apk['id'] + ") has no metadata"
|
||||
print " " + apk['name'] + " - " + apk['version']
|
||||
if app['Category'] not in categories:
|
||||
categories.append(app['Category'])
|
||||
|
||||
#Sort the app list by name, then the web site doesn't have to by default:
|
||||
apps = sorted(apps, key=lambda app: app['Name'].upper())
|
||||
# Gather information about all the apk files in the repo directory...
|
||||
apks = []
|
||||
for apkfile in glob.glob(os.path.join('repo','*.apk')):
|
||||
|
||||
# Create the index
|
||||
doc = Document()
|
||||
|
||||
def addElement(name, value, doc, parent):
|
||||
el = doc.createElement(name)
|
||||
el.appendChild(doc.createTextNode(value))
|
||||
parent.appendChild(el)
|
||||
|
||||
root = doc.createElement("fdroid")
|
||||
doc.appendChild(root)
|
||||
|
||||
repoel = doc.createElement("repo")
|
||||
repoel.setAttribute("name", repo_name)
|
||||
repoel.setAttribute("icon", os.path.basename(repo_icon))
|
||||
repoel.setAttribute("url", repo_url)
|
||||
|
||||
if repo_keyalias != None:
|
||||
|
||||
# Generate a certificate fingerprint the same way keytool does it
|
||||
# (but with slightly different formatting)
|
||||
def cert_fingerprint(data):
|
||||
digest = hashlib.sha1(data).digest()
|
||||
ret = []
|
||||
for i in range(4):
|
||||
ret.append(":".join("%02X" % ord(b) for b in digest[i*5:i*5+5]))
|
||||
return " ".join(ret)
|
||||
|
||||
def extract_pubkey():
|
||||
p = subprocess.Popen(['keytool', '-exportcert',
|
||||
'-alias', repo_keyalias,
|
||||
'-keystore', keystore,
|
||||
'-storepass', keystorepass],
|
||||
stdout=subprocess.PIPE)
|
||||
cert = p.communicate()[0]
|
||||
if p.returncode != 0:
|
||||
print "ERROR: Failed to get repo pubkey"
|
||||
apkfilename = apkfile[5:]
|
||||
if apkfilename.find(' ') != -1:
|
||||
print "No spaces in APK filenames!"
|
||||
sys.exit(1)
|
||||
global repo_pubkey_fingerprint
|
||||
repo_pubkey_fingerprint = cert_fingerprint(cert)
|
||||
return "".join("%02x" % ord(b) for b in cert)
|
||||
srcfilename = apkfilename[:-4] + "_src.tar.gz"
|
||||
|
||||
repoel.setAttribute("pubkey", extract_pubkey())
|
||||
if not options.quiet:
|
||||
print "Processing " + apkfilename
|
||||
thisinfo = {}
|
||||
thisinfo['apkname'] = apkfilename
|
||||
if os.path.exists(os.path.join('repo', srcfilename)):
|
||||
thisinfo['srcname'] = srcfilename
|
||||
thisinfo['size'] = os.path.getsize(apkfile)
|
||||
thisinfo['permissions'] = []
|
||||
thisinfo['features'] = []
|
||||
p = subprocess.Popen([os.path.join(sdk_path, 'platform-tools', 'aapt'),
|
||||
'dump', 'badging', apkfile],
|
||||
stdout=subprocess.PIPE)
|
||||
output = p.communicate()[0]
|
||||
if options.verbose:
|
||||
print output
|
||||
if p.returncode != 0:
|
||||
print "ERROR: Failed to get apk information"
|
||||
sys.exit(1)
|
||||
for line in output.splitlines():
|
||||
if line.startswith("package:"):
|
||||
pat = re.compile(".*name='([a-zA-Z0-9._]*)'.*")
|
||||
thisinfo['id'] = re.match(pat, line).group(1)
|
||||
pat = re.compile(".*versionCode='([0-9]*)'.*")
|
||||
thisinfo['versioncode'] = int(re.match(pat, line).group(1))
|
||||
pat = re.compile(".*versionName='([^']*)'.*")
|
||||
thisinfo['version'] = re.match(pat, line).group(1)
|
||||
if line.startswith("application:"):
|
||||
pat = re.compile(".*label='([^']*)'.*")
|
||||
thisinfo['name'] = re.match(pat, line).group(1)
|
||||
pat = re.compile(".*icon='([^']*)'.*")
|
||||
thisinfo['iconsrc'] = re.match(pat, line).group(1)
|
||||
if line.startswith("sdkVersion:"):
|
||||
pat = re.compile(".*'([0-9]*)'.*")
|
||||
thisinfo['sdkversion'] = re.match(pat, line).group(1)
|
||||
if line.startswith("native-code:"):
|
||||
pat = re.compile(".*'([^']*)'.*")
|
||||
thisinfo['nativecode'] = re.match(pat, line).group(1)
|
||||
if line.startswith("uses-permission:"):
|
||||
pat = re.compile(".*'([^']*)'.*")
|
||||
perm = re.match(pat, line).group(1)
|
||||
if perm.startswith("android.permission."):
|
||||
perm = perm[19:]
|
||||
thisinfo['permissions'].append(perm)
|
||||
if line.startswith("uses-feature:"):
|
||||
pat = re.compile(".*'([^']*)'.*")
|
||||
perm = re.match(pat, line).group(1)
|
||||
#Filter out this, it's only added with the latest SDK tools and
|
||||
#causes problems for lots of apps.
|
||||
if (perm != "android.hardware.screen.portrait" and
|
||||
perm != "android.hardware.screen.landscape"):
|
||||
if perm.startswith("android.feature."):
|
||||
perm = perm[16:]
|
||||
thisinfo['features'].append(perm)
|
||||
|
||||
addElement('description', repo_description, doc, repoel)
|
||||
root.appendChild(repoel)
|
||||
if not thisinfo.has_key('sdkversion'):
|
||||
print " WARNING: no SDK version information found"
|
||||
thisinfo['sdkversion'] = 0
|
||||
|
||||
apps_inrepo = 0
|
||||
apps_disabled = 0
|
||||
apps_nopkg = 0
|
||||
# Calculate the md5 and sha256...
|
||||
m = hashlib.md5()
|
||||
sha = hashlib.sha256()
|
||||
f = open(apkfile, 'rb')
|
||||
while True:
|
||||
t = f.read(1024)
|
||||
if len(t) == 0:
|
||||
break
|
||||
m.update(t)
|
||||
sha.update(t)
|
||||
thisinfo['md5'] = m.hexdigest()
|
||||
thisinfo['sha256'] = sha.hexdigest()
|
||||
f.close()
|
||||
|
||||
for app in apps:
|
||||
# Get the signature (or md5 of, to be precise)...
|
||||
p = subprocess.Popen(['java', 'getsig',
|
||||
os.path.join(os.getcwd(), apkfile)],
|
||||
cwd=os.path.join(sys.path[0], 'getsig'),
|
||||
stdout=subprocess.PIPE)
|
||||
output = p.communicate()[0]
|
||||
if options.verbose:
|
||||
print output
|
||||
if p.returncode != 0 or not output.startswith('Result:'):
|
||||
print "ERROR: Failed to get apk signature"
|
||||
sys.exit(1)
|
||||
thisinfo['sig'] = output[7:].strip()
|
||||
|
||||
if app['Disabled'] is None:
|
||||
# Extract the icon file...
|
||||
apk = zipfile.ZipFile(apkfile, 'r')
|
||||
thisinfo['icon'] = (thisinfo['id'] + '.' +
|
||||
str(thisinfo['versioncode']) + '.png')
|
||||
iconfilename = os.path.join(icon_dir, thisinfo['icon'])
|
||||
try:
|
||||
iconfile = open(iconfilename, 'wb')
|
||||
iconfile.write(apk.read(thisinfo['iconsrc']))
|
||||
iconfile.close()
|
||||
except:
|
||||
print "WARNING: Error retrieving icon file"
|
||||
warnings += 1
|
||||
apk.close()
|
||||
|
||||
# Get a list of the apks for this app...
|
||||
gotcurrentver = False
|
||||
apklist = []
|
||||
apks.append(thisinfo)
|
||||
|
||||
# Some information from the apks needs to be applied up to the application
|
||||
# level. When doing this, we use the info from the most recent version's apk.
|
||||
for app in apps:
|
||||
bestver = 0
|
||||
for apk in apks:
|
||||
if apk['id'] == app['id']:
|
||||
if str(apk['versioncode']) == app['Current Version Code']:
|
||||
gotcurrentver = True
|
||||
apklist.append(apk)
|
||||
if apk['versioncode'] > bestver:
|
||||
bestver = apk['versioncode']
|
||||
bestapk = apk
|
||||
|
||||
if len(apklist) == 0:
|
||||
apps_nopkg += 1
|
||||
if bestver == 0:
|
||||
if app['Name'] is None:
|
||||
app['Name'] = app['id']
|
||||
app['icon'] = ''
|
||||
if app['Disabled'] is None:
|
||||
print "WARNING: Application " + app['id'] + " has no packages"
|
||||
else:
|
||||
apps_inrepo += 1
|
||||
apel = doc.createElement("application")
|
||||
apel.setAttribute("id", app['id'])
|
||||
root.appendChild(apel)
|
||||
if app['Name'] is None:
|
||||
app['Name'] = bestapk['name']
|
||||
app['icon'] = bestapk['icon']
|
||||
|
||||
addElement('id', app['id'], doc, apel)
|
||||
addElement('name', app['Name'], doc, apel)
|
||||
addElement('summary', app['Summary'], doc, apel)
|
||||
addElement('icon', app['icon'], doc, apel)
|
||||
addElement('description',
|
||||
common.parse_description(app['Description']), doc, apel)
|
||||
addElement('license', app['License'], doc, apel)
|
||||
if 'Category' in app:
|
||||
addElement('category', app['Category'], doc, apel)
|
||||
addElement('web', app['Web Site'], doc, apel)
|
||||
addElement('source', app['Source Code'], doc, apel)
|
||||
addElement('tracker', app['Issue Tracker'], doc, apel)
|
||||
if app['Donate'] != None:
|
||||
addElement('donate', app['Donate'], doc, apel)
|
||||
|
||||
# These elements actually refer to the current version (i.e. which
|
||||
# one is recommended. They are historically mis-named, and need
|
||||
# changing, but stay like this for now to support existing clients.
|
||||
addElement('marketversion', app['Current Version'], doc, apel)
|
||||
addElement('marketvercode', app['Current Version Code'], doc, apel)
|
||||
|
||||
if not (app['AntiFeatures'] is None):
|
||||
addElement('antifeatures', app['AntiFeatures'], doc, apel)
|
||||
if app['Requires Root']:
|
||||
addElement('requirements', 'root', doc, apel)
|
||||
|
||||
# Sort the apk list into version order, just so the web site
|
||||
# doesn't have to do any work by default...
|
||||
apklist = sorted(apklist, key=lambda apk: apk['versioncode'], reverse=True)
|
||||
|
||||
# Check for duplicates - they will make the client unhappy...
|
||||
for i in range(len(apklist) - 1):
|
||||
if apklist[i]['versioncode'] == apklist[i+1]['versioncode']:
|
||||
print "ERROR - duplicate versions"
|
||||
print apklist[i]['apkname']
|
||||
print apklist[i+1]['apkname']
|
||||
sys.exit(1)
|
||||
|
||||
for apk in apklist:
|
||||
apkel = doc.createElement("package")
|
||||
apel.appendChild(apkel)
|
||||
addElement('version', apk['version'], doc, apkel)
|
||||
addElement('versioncode', str(apk['versioncode']), doc, apkel)
|
||||
addElement('apkname', apk['apkname'], doc, apkel)
|
||||
if apk.has_key('srcname'):
|
||||
addElement('srcname', apk['srcname'], doc, apkel)
|
||||
for hash_type in ('sha256', 'md5'):
|
||||
if not hash_type in apk:
|
||||
continue
|
||||
hashel = doc.createElement("hash")
|
||||
hashel.setAttribute("type", hash_type)
|
||||
hashel.appendChild(doc.createTextNode(apk[hash_type]))
|
||||
apkel.appendChild(hashel)
|
||||
addElement('sig', apk['sig'], doc, apkel)
|
||||
addElement('size', str(apk['size']), doc, apkel)
|
||||
addElement('sdkver', str(apk['sdkversion']), doc, apkel)
|
||||
perms = ""
|
||||
for p in apk['permissions']:
|
||||
if len(perms) > 0:
|
||||
perms += ","
|
||||
perms += p
|
||||
if len(perms) > 0:
|
||||
addElement('permissions', perms, doc, apkel)
|
||||
features = ""
|
||||
for f in apk['features']:
|
||||
if len(features) > 0:
|
||||
features += ","
|
||||
features += f
|
||||
if len(features) > 0:
|
||||
addElement('features', features, doc, apkel)
|
||||
|
||||
if options.buildreport:
|
||||
if len(app['builds']) == 0:
|
||||
print ("WARNING: No builds defined for " + app['id'] +
|
||||
" Source: " + app['Source Code'])
|
||||
warnings += 1
|
||||
# Generate warnings for apk's with no metadata (or create skeleton
|
||||
# metadata files, if requested on the command line)
|
||||
for apk in apks:
|
||||
found = False
|
||||
for app in apps:
|
||||
if app['id'] == apk['id']:
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
if options.createmeta:
|
||||
f = open(os.path.join('metadata', apk['id'] + '.txt'), 'w')
|
||||
f.write("License:Unknown\n")
|
||||
f.write("Web Site:\n")
|
||||
f.write("Source Code:\n")
|
||||
f.write("Issue Tracker:\n")
|
||||
f.write("Summary:" + apk['name'] + "\n")
|
||||
f.write("Description:\n")
|
||||
f.write(apk['name'] + "\n")
|
||||
f.write(".\n")
|
||||
f.close()
|
||||
print "Generated skeleton metadata for " + apk['id']
|
||||
else:
|
||||
if app['Current Version Code'] != '0':
|
||||
gotbuild = False
|
||||
for build in app['builds']:
|
||||
if build['vercode'] == app['Current Version Code']:
|
||||
gotbuild = True
|
||||
if not gotbuild:
|
||||
print ("WARNING: No build data for current version of "
|
||||
+ app['id'] + " (" + app['Current Version']
|
||||
+ ") " + app['Source Code'])
|
||||
warnings += 1
|
||||
print "WARNING: " + apk['apkname'] + " (" + apk['id'] + ") has no metadata"
|
||||
print " " + apk['name'] + " - " + apk['version']
|
||||
|
||||
# If we don't have the current version, check if there is a build
|
||||
# with a commit ID starting with '!' - this means we can't build it
|
||||
# for some reason, and don't want hassling about it...
|
||||
if not gotcurrentver and app['Current Version Code'] != '0':
|
||||
for build in app['builds']:
|
||||
if build['vercode'] == app['Current Version Code']:
|
||||
gotcurrentver = True
|
||||
#Sort the app list by name, then the web site doesn't have to by default:
|
||||
apps = sorted(apps, key=lambda app: app['Name'].upper())
|
||||
|
||||
# Output a message of harassment if we don't have the current version:
|
||||
if not gotcurrentver and app['Current Version Code'] != '0':
|
||||
addr = app['Source Code']
|
||||
print "WARNING: Don't have current version (" + app['Current Version'] + ") of " + app['Name']
|
||||
print " (" + app['id'] + ") " + addr
|
||||
warnings += 1
|
||||
if options.verbose:
|
||||
# A bit of extra debug info, basically for diagnosing
|
||||
# app developer mistakes:
|
||||
print " Current vercode:" + app['Current Version Code']
|
||||
print " Got:"
|
||||
for apk in apks:
|
||||
if apk['id'] == app['id']:
|
||||
print " " + str(apk['versioncode']) + " - " + apk['version']
|
||||
if options.interactive:
|
||||
print "Build data out of date for " + app['id']
|
||||
while True:
|
||||
answer = raw_input("[I]gnore, [E]dit or [Q]uit?").lower()
|
||||
if answer == 'i':
|
||||
break
|
||||
elif answer == 'e':
|
||||
subprocess.call([options.editor,
|
||||
os.path.join('metadata',
|
||||
app['id'] + '.txt')])
|
||||
break
|
||||
elif answer == 'q':
|
||||
sys.exit(0)
|
||||
else:
|
||||
apps_disabled += 1
|
||||
# Create the index
|
||||
doc = Document()
|
||||
|
||||
of = open(os.path.join('repo','index.xml'), 'wb')
|
||||
if options.pretty:
|
||||
output = doc.toprettyxml()
|
||||
else:
|
||||
output = doc.toxml()
|
||||
of.write(output)
|
||||
of.close()
|
||||
def addElement(name, value, doc, parent):
|
||||
el = doc.createElement(name)
|
||||
el.appendChild(doc.createTextNode(value))
|
||||
parent.appendChild(el)
|
||||
|
||||
if repo_keyalias != None:
|
||||
root = doc.createElement("fdroid")
|
||||
doc.appendChild(root)
|
||||
|
||||
if not options.quiet:
|
||||
print "Creating signed index."
|
||||
print "Key fingerprint:", repo_pubkey_fingerprint
|
||||
|
||||
#Create a jar of the index...
|
||||
p = subprocess.Popen(['jar', 'cf', 'index.jar', 'index.xml'],
|
||||
cwd='repo', stdout=subprocess.PIPE)
|
||||
output = p.communicate()[0]
|
||||
if options.verbose:
|
||||
print output
|
||||
if p.returncode != 0:
|
||||
print "ERROR: Failed to create jar file"
|
||||
sys.exit(1)
|
||||
repoel = doc.createElement("repo")
|
||||
repoel.setAttribute("name", repo_name)
|
||||
repoel.setAttribute("icon", os.path.basename(repo_icon))
|
||||
repoel.setAttribute("url", repo_url)
|
||||
|
||||
# Sign the index...
|
||||
p = subprocess.Popen(['jarsigner', '-keystore', keystore,
|
||||
'-storepass', keystorepass, '-keypass', keypass,
|
||||
os.path.join('repo', 'index.jar') , repo_keyalias], stdout=subprocess.PIPE)
|
||||
output = p.communicate()[0]
|
||||
if p.returncode != 0:
|
||||
print "Failed to sign index"
|
||||
print output
|
||||
sys.exit(1)
|
||||
if options.verbose:
|
||||
print output
|
||||
if repo_keyalias != None:
|
||||
|
||||
# Copy the repo icon into the repo directory...
|
||||
iconfilename = os.path.join(icon_dir, os.path.basename(repo_icon))
|
||||
shutil.copyfile(repo_icon, iconfilename)
|
||||
# Generate a certificate fingerprint the same way keytool does it
|
||||
# (but with slightly different formatting)
|
||||
def cert_fingerprint(data):
|
||||
digest = hashlib.sha1(data).digest()
|
||||
ret = []
|
||||
for i in range(4):
|
||||
ret.append(":".join("%02X" % ord(b) for b in digest[i*5:i*5+5]))
|
||||
return " ".join(ret)
|
||||
|
||||
# Write a category list in the repo to allow quick access...
|
||||
catdata = ''
|
||||
for cat in categories:
|
||||
catdata += cat + '\n'
|
||||
f = open('repo/categories.txt', 'w')
|
||||
f.write(catdata)
|
||||
f.close()
|
||||
def extract_pubkey():
|
||||
p = subprocess.Popen(['keytool', '-exportcert',
|
||||
'-alias', repo_keyalias,
|
||||
'-keystore', keystore,
|
||||
'-storepass', keystorepass],
|
||||
stdout=subprocess.PIPE)
|
||||
cert = p.communicate()[0]
|
||||
if p.returncode != 0:
|
||||
print "ERROR: Failed to get repo pubkey"
|
||||
sys.exit(1)
|
||||
global repo_pubkey_fingerprint
|
||||
repo_pubkey_fingerprint = cert_fingerprint(cert)
|
||||
return "".join("%02x" % ord(b) for b in cert)
|
||||
|
||||
# Update known apks info...
|
||||
knownapks = common.KnownApks()
|
||||
for apk in apks:
|
||||
knownapks.recordapk(apk['apkname'], apk['id'])
|
||||
knownapks.writeifchanged()
|
||||
repoel.setAttribute("pubkey", extract_pubkey())
|
||||
|
||||
addElement('description', repo_description, doc, repoel)
|
||||
root.appendChild(repoel)
|
||||
|
||||
apps_inrepo = 0
|
||||
apps_disabled = 0
|
||||
apps_nopkg = 0
|
||||
|
||||
# Generate latest apps data for widget
|
||||
data = ''
|
||||
for line in file(os.path.join('stats', 'latestapps.txt')):
|
||||
appid = line.rstrip()
|
||||
data += appid + "\t"
|
||||
for app in apps:
|
||||
if app['id'] == appid:
|
||||
data += app['Name'] + "\t"
|
||||
data += app['icon'] + "\t"
|
||||
data += app['License'] + "\n"
|
||||
break
|
||||
f = open('repo/latestapps.dat', 'w')
|
||||
f.write(data)
|
||||
f.close()
|
||||
|
||||
if app['Disabled'] is None:
|
||||
|
||||
# Get a list of the apks for this app...
|
||||
gotcurrentver = False
|
||||
apklist = []
|
||||
for apk in apks:
|
||||
if apk['id'] == app['id']:
|
||||
if str(apk['versioncode']) == app['Current Version Code']:
|
||||
gotcurrentver = True
|
||||
apklist.append(apk)
|
||||
|
||||
if len(apklist) == 0:
|
||||
apps_nopkg += 1
|
||||
else:
|
||||
apps_inrepo += 1
|
||||
apel = doc.createElement("application")
|
||||
apel.setAttribute("id", app['id'])
|
||||
root.appendChild(apel)
|
||||
|
||||
addElement('id', app['id'], doc, apel)
|
||||
addElement('name', app['Name'], doc, apel)
|
||||
addElement('summary', app['Summary'], doc, apel)
|
||||
addElement('icon', app['icon'], doc, apel)
|
||||
addElement('description',
|
||||
common.parse_description(app['Description']), doc, apel)
|
||||
addElement('license', app['License'], doc, apel)
|
||||
if 'Category' in app:
|
||||
addElement('category', app['Category'], doc, apel)
|
||||
addElement('web', app['Web Site'], doc, apel)
|
||||
addElement('source', app['Source Code'], doc, apel)
|
||||
addElement('tracker', app['Issue Tracker'], doc, apel)
|
||||
if app['Donate'] != None:
|
||||
addElement('donate', app['Donate'], doc, apel)
|
||||
|
||||
# These elements actually refer to the current version (i.e. which
|
||||
# one is recommended. They are historically mis-named, and need
|
||||
# changing, but stay like this for now to support existing clients.
|
||||
addElement('marketversion', app['Current Version'], doc, apel)
|
||||
addElement('marketvercode', app['Current Version Code'], doc, apel)
|
||||
|
||||
if not (app['AntiFeatures'] is None):
|
||||
addElement('antifeatures', app['AntiFeatures'], doc, apel)
|
||||
if app['Requires Root']:
|
||||
addElement('requirements', 'root', doc, apel)
|
||||
|
||||
# Sort the apk list into version order, just so the web site
|
||||
# doesn't have to do any work by default...
|
||||
apklist = sorted(apklist, key=lambda apk: apk['versioncode'], reverse=True)
|
||||
|
||||
# Check for duplicates - they will make the client unhappy...
|
||||
for i in range(len(apklist) - 1):
|
||||
if apklist[i]['versioncode'] == apklist[i+1]['versioncode']:
|
||||
print "ERROR - duplicate versions"
|
||||
print apklist[i]['apkname']
|
||||
print apklist[i+1]['apkname']
|
||||
sys.exit(1)
|
||||
|
||||
for apk in apklist:
|
||||
apkel = doc.createElement("package")
|
||||
apel.appendChild(apkel)
|
||||
addElement('version', apk['version'], doc, apkel)
|
||||
addElement('versioncode', str(apk['versioncode']), doc, apkel)
|
||||
addElement('apkname', apk['apkname'], doc, apkel)
|
||||
if apk.has_key('srcname'):
|
||||
addElement('srcname', apk['srcname'], doc, apkel)
|
||||
for hash_type in ('sha256', 'md5'):
|
||||
if not hash_type in apk:
|
||||
continue
|
||||
hashel = doc.createElement("hash")
|
||||
hashel.setAttribute("type", hash_type)
|
||||
hashel.appendChild(doc.createTextNode(apk[hash_type]))
|
||||
apkel.appendChild(hashel)
|
||||
addElement('sig', apk['sig'], doc, apkel)
|
||||
addElement('size', str(apk['size']), doc, apkel)
|
||||
addElement('sdkver', str(apk['sdkversion']), doc, apkel)
|
||||
perms = ""
|
||||
for p in apk['permissions']:
|
||||
if len(perms) > 0:
|
||||
perms += ","
|
||||
perms += p
|
||||
if len(perms) > 0:
|
||||
addElement('permissions', perms, doc, apkel)
|
||||
features = ""
|
||||
for f in apk['features']:
|
||||
if len(features) > 0:
|
||||
features += ","
|
||||
features += f
|
||||
if len(features) > 0:
|
||||
addElement('features', features, doc, apkel)
|
||||
|
||||
if options.buildreport:
|
||||
if len(app['builds']) == 0:
|
||||
print ("WARNING: No builds defined for " + app['id'] +
|
||||
" Source: " + app['Source Code'])
|
||||
warnings += 1
|
||||
else:
|
||||
if app['Current Version Code'] != '0':
|
||||
gotbuild = False
|
||||
for build in app['builds']:
|
||||
if build['vercode'] == app['Current Version Code']:
|
||||
gotbuild = True
|
||||
if not gotbuild:
|
||||
print ("WARNING: No build data for current version of "
|
||||
+ app['id'] + " (" + app['Current Version']
|
||||
+ ") " + app['Source Code'])
|
||||
warnings += 1
|
||||
|
||||
# If we don't have the current version, check if there is a build
|
||||
# with a commit ID starting with '!' - this means we can't build it
|
||||
# for some reason, and don't want hassling about it...
|
||||
if not gotcurrentver and app['Current Version Code'] != '0':
|
||||
for build in app['builds']:
|
||||
if build['vercode'] == app['Current Version Code']:
|
||||
gotcurrentver = True
|
||||
|
||||
# Output a message of harassment if we don't have the current version:
|
||||
if not gotcurrentver and app['Current Version Code'] != '0':
|
||||
addr = app['Source Code']
|
||||
print "WARNING: Don't have current version (" + app['Current Version'] + ") of " + app['Name']
|
||||
print " (" + app['id'] + ") " + addr
|
||||
warnings += 1
|
||||
if options.verbose:
|
||||
# A bit of extra debug info, basically for diagnosing
|
||||
# app developer mistakes:
|
||||
print " Current vercode:" + app['Current Version Code']
|
||||
print " Got:"
|
||||
for apk in apks:
|
||||
if apk['id'] == app['id']:
|
||||
print " " + str(apk['versioncode']) + " - " + apk['version']
|
||||
if options.interactive:
|
||||
print "Build data out of date for " + app['id']
|
||||
while True:
|
||||
answer = raw_input("[I]gnore, [E]dit or [Q]uit?").lower()
|
||||
if answer == 'i':
|
||||
break
|
||||
elif answer == 'e':
|
||||
subprocess.call([options.editor,
|
||||
os.path.join('metadata',
|
||||
app['id'] + '.txt')])
|
||||
break
|
||||
elif answer == 'q':
|
||||
sys.exit(0)
|
||||
else:
|
||||
apps_disabled += 1
|
||||
|
||||
of = open(os.path.join('repo','index.xml'), 'wb')
|
||||
if options.pretty:
|
||||
output = doc.toprettyxml()
|
||||
else:
|
||||
output = doc.toxml()
|
||||
of.write(output)
|
||||
of.close()
|
||||
|
||||
if repo_keyalias != None:
|
||||
|
||||
if not options.quiet:
|
||||
print "Creating signed index."
|
||||
print "Key fingerprint:", repo_pubkey_fingerprint
|
||||
|
||||
#Create a jar of the index...
|
||||
p = subprocess.Popen(['jar', 'cf', 'index.jar', 'index.xml'],
|
||||
cwd='repo', stdout=subprocess.PIPE)
|
||||
output = p.communicate()[0]
|
||||
if options.verbose:
|
||||
print output
|
||||
if p.returncode != 0:
|
||||
print "ERROR: Failed to create jar file"
|
||||
sys.exit(1)
|
||||
|
||||
# Sign the index...
|
||||
p = subprocess.Popen(['jarsigner', '-keystore', keystore,
|
||||
'-storepass', keystorepass, '-keypass', keypass,
|
||||
os.path.join('repo', 'index.jar') , repo_keyalias], stdout=subprocess.PIPE)
|
||||
output = p.communicate()[0]
|
||||
if p.returncode != 0:
|
||||
print "Failed to sign index"
|
||||
print output
|
||||
sys.exit(1)
|
||||
if options.verbose:
|
||||
print output
|
||||
|
||||
# Copy the repo icon into the repo directory...
|
||||
iconfilename = os.path.join(icon_dir, os.path.basename(repo_icon))
|
||||
shutil.copyfile(repo_icon, iconfilename)
|
||||
|
||||
# Write a category list in the repo to allow quick access...
|
||||
catdata = ''
|
||||
for cat in categories:
|
||||
catdata += cat + '\n'
|
||||
f = open('repo/categories.txt', 'w')
|
||||
f.write(catdata)
|
||||
f.close()
|
||||
|
||||
# Update known apks info...
|
||||
knownapks = common.KnownApks()
|
||||
for apk in apks:
|
||||
knownapks.recordapk(apk['apkname'], apk['id'])
|
||||
knownapks.writeifchanged()
|
||||
|
||||
# Generate latest apps data for widget
|
||||
data = ''
|
||||
for line in file(os.path.join('stats', 'latestapps.txt')):
|
||||
appid = line.rstrip()
|
||||
data += appid + "\t"
|
||||
for app in apps:
|
||||
if app['id'] == appid:
|
||||
data += app['Name'] + "\t"
|
||||
data += app['icon'] + "\t"
|
||||
data += app['License'] + "\n"
|
||||
break
|
||||
f = open('repo/latestapps.dat', 'w')
|
||||
f.write(data)
|
||||
f.close()
|
||||
|
||||
|
||||
|
||||
print "Finished."
|
||||
print str(apps_inrepo) + " apps in repo"
|
||||
print str(apps_disabled) + " disabled"
|
||||
print str(apps_nopkg) + " with no packages"
|
||||
print str(warnings) + " warnings"
|
||||
print "Finished."
|
||||
print str(apps_inrepo) + " apps in repo"
|
||||
print str(apps_disabled) + " disabled"
|
||||
print str(apps_nopkg) + " with no packages"
|
||||
print str(warnings) + " warnings"
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
|
218
updatestats.py
218
updatestats.py
@ -30,121 +30,125 @@ import HTMLParser
|
||||
import paramiko
|
||||
import common
|
||||
|
||||
#Read configuration...
|
||||
execfile('config.py')
|
||||
def main():
|
||||
|
||||
# Read configuration...
|
||||
execfile('config.py')
|
||||
|
||||
# Parse command line...
|
||||
parser = OptionParser()
|
||||
parser.add_option("-v", "--verbose", action="store_true", default=False,
|
||||
help="Spew out even more information than normal")
|
||||
parser.add_option("-d", "--download", action="store_true", default=False,
|
||||
help="Download logs we don't have")
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
|
||||
# Parse command line...
|
||||
parser = OptionParser()
|
||||
parser.add_option("-v", "--verbose", action="store_true", default=False,
|
||||
help="Spew out even more information than normal")
|
||||
parser.add_option("-d", "--download", action="store_true", default=False,
|
||||
help="Download logs we don't have")
|
||||
(options, args) = parser.parse_args()
|
||||
statsdir = 'stats'
|
||||
logsdir = os.path.join(statsdir, 'logs')
|
||||
logsarchivedir = os.path.join(logsdir, 'archive')
|
||||
datadir = os.path.join(statsdir, 'data')
|
||||
if not os.path.exists(statsdir):
|
||||
os.mkdir(statsdir)
|
||||
if not os.path.exists(logsdir):
|
||||
os.mkdir(logsdir)
|
||||
if not os.path.exists(datadir):
|
||||
os.mkdir(datadir)
|
||||
|
||||
if options.download:
|
||||
# Get any access logs we don't have...
|
||||
ssh = None
|
||||
ftp = None
|
||||
try:
|
||||
print 'Retrieving logs'
|
||||
ssh = paramiko.SSHClient()
|
||||
ssh.load_system_host_keys()
|
||||
ssh.connect('f-droid.org', username='fdroid', timeout=10,
|
||||
key_filename=webserver_keyfile)
|
||||
ftp = ssh.open_sftp()
|
||||
ftp.get_channel().settimeout(15)
|
||||
print "...connected"
|
||||
|
||||
statsdir = 'stats'
|
||||
logsdir = os.path.join(statsdir, 'logs')
|
||||
logsarchivedir = os.path.join(logsdir, 'archive')
|
||||
datadir = os.path.join(statsdir, 'data')
|
||||
if not os.path.exists(statsdir):
|
||||
os.mkdir(statsdir)
|
||||
if not os.path.exists(logsdir):
|
||||
os.mkdir(logsdir)
|
||||
if not os.path.exists(datadir):
|
||||
os.mkdir(datadir)
|
||||
ftp.chdir('logs')
|
||||
files = ftp.listdir()
|
||||
for f in files:
|
||||
if f.startswith('access-') and f.endswith('.log'):
|
||||
|
||||
if options.download:
|
||||
# Get any access logs we don't have...
|
||||
ssh = None
|
||||
ftp = None
|
||||
try:
|
||||
print 'Retrieving logs'
|
||||
ssh = paramiko.SSHClient()
|
||||
ssh.load_system_host_keys()
|
||||
ssh.connect('f-droid.org', username='fdroid', timeout=10,
|
||||
key_filename=webserver_keyfile)
|
||||
ftp = ssh.open_sftp()
|
||||
ftp.get_channel().settimeout(15)
|
||||
print "...connected"
|
||||
|
||||
ftp.chdir('logs')
|
||||
files = ftp.listdir()
|
||||
for f in files:
|
||||
if f.startswith('access-') and f.endswith('.log'):
|
||||
|
||||
destpath = os.path.join(logsdir, f)
|
||||
archivepath = os.path.join(logsarchivedir, f + '.gz')
|
||||
if os.path.exists(archivepath):
|
||||
if os.path.exists(destpath):
|
||||
# Just in case we have it archived but failed to remove
|
||||
# the original...
|
||||
os.remove(destpath)
|
||||
else:
|
||||
destsize = ftp.stat(f).st_size
|
||||
if (not os.path.exists(destpath) or
|
||||
os.path.getsize(destpath) != destsize):
|
||||
print "...retrieving " + f
|
||||
ftp.get(f, destpath)
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
finally:
|
||||
#Disconnect
|
||||
if ftp != None:
|
||||
ftp.close()
|
||||
if ssh != None:
|
||||
ssh.close()
|
||||
|
||||
# Process logs
|
||||
logexpr = '(?P<ip>[.:0-9a-fA-F]+) - - \[(?P<time>.*?)\] "GET (?P<uri>.*?) HTTP/1.\d" (?P<statuscode>\d+) \d+ "(?P<referral>.*?)" "(?P<useragent>.*?)"'
|
||||
logsearch = re.compile(logexpr).search
|
||||
apps = {}
|
||||
unknownapks = []
|
||||
knownapks = common.KnownApks()
|
||||
for logfile in glob.glob(os.path.join(logsdir,'access-*.log')):
|
||||
logdate = logfile[len(logsdir) + 1 + len('access-'):-4]
|
||||
matches = (logsearch(line) for line in file(logfile))
|
||||
for match in matches:
|
||||
if match and match.group('statuscode') == '200':
|
||||
uri = match.group('uri')
|
||||
if uri.endswith('.apk'):
|
||||
_, apkname = os.path.split(uri)
|
||||
app = knownapks.getapp(apkname)
|
||||
if app:
|
||||
appid, _ = app
|
||||
if appid in apps:
|
||||
apps[appid] += 1
|
||||
destpath = os.path.join(logsdir, f)
|
||||
archivepath = os.path.join(logsarchivedir, f + '.gz')
|
||||
if os.path.exists(archivepath):
|
||||
if os.path.exists(destpath):
|
||||
# Just in case we have it archived but failed to remove
|
||||
# the original...
|
||||
os.remove(destpath)
|
||||
else:
|
||||
apps[appid] = 1
|
||||
else:
|
||||
if not apkname in unknownapks:
|
||||
unknownapks.append(apkname)
|
||||
destsize = ftp.stat(f).st_size
|
||||
if (not os.path.exists(destpath) or
|
||||
os.path.getsize(destpath) != destsize):
|
||||
print "...retrieving " + f
|
||||
ftp.get(f, destpath)
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
finally:
|
||||
#Disconnect
|
||||
if ftp != None:
|
||||
ftp.close()
|
||||
if ssh != None:
|
||||
ssh.close()
|
||||
|
||||
# Calculate and write stats for total downloads...
|
||||
f = open('stats/total_downloads_app.txt', 'w')
|
||||
lst = []
|
||||
alldownloads = 0
|
||||
for app, count in apps.iteritems():
|
||||
lst.append(app + " " + str(count))
|
||||
alldownloads += count
|
||||
lst.append("ALL " + str(alldownloads))
|
||||
f.write('# Total downloads by application, since October 2011\n')
|
||||
for line in sorted(lst):
|
||||
f.write(line + '\n')
|
||||
f.close()
|
||||
# Process logs
|
||||
logexpr = '(?P<ip>[.:0-9a-fA-F]+) - - \[(?P<time>.*?)\] "GET (?P<uri>.*?) HTTP/1.\d" (?P<statuscode>\d+) \d+ "(?P<referral>.*?)" "(?P<useragent>.*?)"'
|
||||
logsearch = re.compile(logexpr).search
|
||||
apps = {}
|
||||
unknownapks = []
|
||||
knownapks = common.KnownApks()
|
||||
for logfile in glob.glob(os.path.join(logsdir,'access-*.log')):
|
||||
logdate = logfile[len(logsdir) + 1 + len('access-'):-4]
|
||||
matches = (logsearch(line) for line in file(logfile))
|
||||
for match in matches:
|
||||
if match and match.group('statuscode') == '200':
|
||||
uri = match.group('uri')
|
||||
if uri.endswith('.apk'):
|
||||
_, apkname = os.path.split(uri)
|
||||
app = knownapks.getapp(apkname)
|
||||
if app:
|
||||
appid, _ = app
|
||||
if appid in apps:
|
||||
apps[appid] += 1
|
||||
else:
|
||||
apps[appid] = 1
|
||||
else:
|
||||
if not apkname in unknownapks:
|
||||
unknownapks.append(apkname)
|
||||
|
||||
# Write list of latest apps added to the repo...
|
||||
latest = knownapks.getlatest(10)
|
||||
f = open('stats/latestapps.txt', 'w')
|
||||
for app in latest:
|
||||
f.write(app + '\n')
|
||||
f.close()
|
||||
# Calculate and write stats for total downloads...
|
||||
f = open('stats/total_downloads_app.txt', 'w')
|
||||
lst = []
|
||||
alldownloads = 0
|
||||
for app, count in apps.iteritems():
|
||||
lst.append(app + " " + str(count))
|
||||
alldownloads += count
|
||||
lst.append("ALL " + str(alldownloads))
|
||||
f.write('# Total downloads by application, since October 2011\n')
|
||||
for line in sorted(lst):
|
||||
f.write(line + '\n')
|
||||
f.close()
|
||||
|
||||
if len(unknownapks) > 0:
|
||||
print '\nUnknown apks:'
|
||||
for apk in unknownapks:
|
||||
print apk
|
||||
# Write list of latest apps added to the repo...
|
||||
latest = knownapks.getlatest(10)
|
||||
f = open('stats/latestapps.txt', 'w')
|
||||
for app in latest:
|
||||
f.write(app + '\n')
|
||||
f.close()
|
||||
|
||||
print "Finished."
|
||||
if len(unknownapks) > 0:
|
||||
print '\nUnknown apks:'
|
||||
for apk in unknownapks:
|
||||
print apk
|
||||
|
||||
print "Finished."
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user