1
0
mirror of https://gitlab.com/fdroid/fdroidserver.git synced 2024-11-04 14:30:11 +01:00

Map apps in memory from appid to appinfo

Instead of storing them in a list and doing linear searches by appinfo['id']
This commit is contained in:
Daniel Martí 2014-08-16 12:46:02 +02:00
parent 880f75110e
commit 94c29f9c37
11 changed files with 108 additions and 113 deletions

View File

@ -564,7 +564,7 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d
if jni_components == ['yes']:
jni_components = ['']
cmd = [os.path.join(config['ndk_path'], "ndk-build"), "-j1"]
cmd = [os.path.join(config['ndk_path'], "ndk-build"), "-j4"]
for d in jni_components:
if d:
logging.info("Building native code in '%s'" % d)
@ -990,14 +990,15 @@ def main():
allapps = metadata.read_metadata(xref=not options.onserver)
apps = common.read_app_args(args, allapps, True)
apps = [app for app in apps if (options.force or not app['Disabled']) and
len(app['Repo Type']) > 0 and len(app['builds']) > 0]
for appid, app in apps.items():
if (app['Disabled'] and not options.force) or not app['Repo Type'] or not app['builds']:
del apps[appid]
if len(apps) == 0:
if not apps:
raise FDroidException("No apps to process.")
if options.latest:
for app in apps:
for app in apps.itervalues():
for build in reversed(app['builds']):
if build['disable']:
continue
@ -1013,7 +1014,7 @@ def main():
# Build applications...
failed_apps = {}
build_succeeded = []
for app in apps:
for appid, app in apps.iteritems():
first = True
@ -1028,7 +1029,7 @@ def main():
if app['Repo Type'] == 'srclib':
build_dir = os.path.join('build', 'srclib', app['Repo'])
else:
build_dir = os.path.join('build', app['id'])
build_dir = os.path.join('build', appid)
# Set up vcs interface and make sure we have the latest code...
logging.debug("Getting {0} vcs interface for {1}"
@ -1046,39 +1047,39 @@ def main():
build_succeeded.append(app)
wikilog = "Build succeeded"
except BuildException as be:
logfile = open(os.path.join(log_dir, app['id'] + '.log'), 'a+')
logfile = open(os.path.join(log_dir, appid + '.log'), 'a+')
logfile.write(str(be))
logfile.close()
print("Could not build app %s due to BuildException: %s" % (app['id'], be))
print("Could not build app %s due to BuildException: %s" % (appid, be))
if options.stop:
sys.exit(1)
failed_apps[app['id']] = be
failed_apps[appid] = be
wikilog = be.get_wikitext()
except VCSException as vcse:
reason = str(vcse).split('\n', 1)[0] if options.verbose else str(vcse)
logging.error("VCS error while building app %s: %s" % (
app['id'], reason))
appid, reason))
if options.stop:
sys.exit(1)
failed_apps[app['id']] = vcse
failed_apps[appid] = vcse
wikilog = str(vcse)
except Exception as e:
logging.error("Could not build app %s due to unknown error: %s" % (
app['id'], traceback.format_exc()))
appid, traceback.format_exc()))
if options.stop:
sys.exit(1)
failed_apps[app['id']] = e
failed_apps[appid] = e
wikilog = str(e)
if options.wiki and wikilog:
try:
# Write a page with the last build log for this version code
lastbuildpage = app['id'] + '/lastbuild_' + thisbuild['vercode']
lastbuildpage = appid + '/lastbuild_' + thisbuild['vercode']
newpage = site.Pages[lastbuildpage]
txt = "Build completed at " + time.strftime("%Y-%m-%d %H:%M:%SZ", time.gmtime()) + "\n\n" + wikilog
newpage.save(txt, summary='Build log')
# Redirect from /lastbuild to the most recent build log
newpage = site.Pages[app['id'] + '/lastbuild']
newpage = site.Pages[appid + '/lastbuild']
newpage.save('#REDIRECT [[' + lastbuildpage + ']]', summary='Update redirect')
except:
logging.error("Error while attempting to publish build log")

View File

@ -570,13 +570,13 @@ def main():
.format(common.getappname(app), version))
return
for app in apps:
for appid, app in apps.iteritems():
if options.autoonly and app['Auto Update Mode'] in ('None', 'Static'):
logging.debug("Nothing to do for {0}...".format(app['id']))
logging.debug("Nothing to do for {0}...".format(appid))
continue
logging.info("Processing " + app['id'] + '...')
logging.info("Processing " + appid + '...')
checkupdates_app(app)

View File

@ -266,7 +266,10 @@ def read_app_args(args, allapps, allow_vercodes=False):
if not vercodes:
return allapps
apps = [app for app in allapps if app['id'] in vercodes]
apps = {}
for appid, app in allapps.iteritems():
if appid in vercodes:
apps[appid] = app
if len(apps) != len(vercodes):
allids = [app["id"] for app in allapps]
@ -278,17 +281,17 @@ def read_app_args(args, allapps, allow_vercodes=False):
raise FDroidException("No packages specified")
error = False
for app in apps:
vc = vercodes[app['id']]
for appid, app in apps.iteritems():
vc = vercodes[appid]
if not vc:
continue
app['builds'] = [b for b in app['builds'] if b['vercode'] in vc]
if len(app['builds']) != len(vercodes[app['id']]):
if len(app['builds']) != len(vercodes[appid]):
error = True
allvcs = [b['vercode'] for b in app['builds']]
for v in vercodes[app['id']]:
for v in vercodes[appid]:
if v not in allvcs:
logging.critical("No such vercode %s for app %s" % (v, app['id']))
logging.critical("No such vercode %s for app %s" % (v, appid))
if error:
raise FDroidException("Found invalid vercodes for some apps")
@ -617,15 +620,13 @@ class vcs_gitsvn(vcs):
gitsvn_cmd += ' -t %s' % i[5:]
elif i.startswith('branches='):
gitsvn_cmd += ' -b %s' % i[9:]
p = SilentPopen([gitsvn_cmd + " %s %s" % (remote_split[0], self.local)], shell=True)
if p.returncode != 0:
if subprocess.call([gitsvn_cmd + " %s %s" % (remote_split[0], self.local)], shell=True) != 0:
self.clone_failed = True
raise VCSException("Git svn clone failed", p.output)
raise VCSException("Git svn clone failed")
else:
p = SilentPopen([gitsvn_cmd + " %s %s" % (self.remote, self.local)], shell=True)
if p.returncode != 0:
if subprocess.call([gitsvn_cmd + " %s %s" % (self.remote, self.local)], shell=True) != 0:
self.clone_failed = True
raise VCSException("Git svn clone failed", p.output)
raise VCSException("Git svn clone failed")
self.checkrepo()
else:
self.checkrepo()

View File

@ -265,14 +265,12 @@ def main():
sys.exit(1)
# Make sure it's actually new...
for app in apps:
if app['id'] == package:
logging.error("Package " + package + " already exists")
sys.exit(1)
if package in apps:
logging.error("Package " + package + " already exists")
sys.exit(1)
# Construct the metadata...
app = metadata.parse_metadata(None)
app['id'] = package
app = metadata.parse_metadata(None)[1]
app['Web Site'] = website
app['Source Code'] = sourcecode
if issuetracker:

View File

@ -164,8 +164,7 @@ def main():
allapps = metadata.read_metadata(xref=False)
apps = common.read_app_args(args, allapps, False)
for app in apps:
appid = app['id']
for appid, app in apps.iteritems():
lastcommit = ''
if app['Disabled']:

View File

@ -480,30 +480,30 @@ def read_metadata(xref=True):
# their source repository.
read_srclibs()
apps = []
apps = {}
for basedir in ('metadata', 'tmp'):
if not os.path.exists(basedir):
os.makedirs(basedir)
for metafile in sorted(glob.glob(os.path.join('metadata', '*.txt'))):
appinfo = parse_metadata(metafile)
appid, appinfo = parse_metadata(metafile)
check_metadata(appinfo)
apps.append(appinfo)
apps[appid] = appinfo
if xref:
# Parse all descriptions at load time, just to ensure cross-referencing
# errors are caught early rather than when they hit the build server.
def linkres(link):
for app in apps:
if app['id'] == link:
return ("fdroid.app:" + link, "Dummy name - don't know yet")
raise MetaDataException("Cannot resolve app id " + link)
for app in apps:
def linkres(appid):
if appid in apps:
return ("fdroid:app" + appid, "Dummy name - don't know yet")
raise MetaDataException("Cannot resolve app id " + appid)
for appid, app in apps.iteritems():
try:
description_html(app['Description'], linkres)
except MetaDataException, e:
raise MetaDataException("Problem with description of " + app['id'] +
raise MetaDataException("Problem with description of " + appid +
" - " + str(e))
return apps
@ -568,7 +568,6 @@ def fill_build_defaults(build):
#
# Known keys not originating from the metadata are:
#
# 'id' - the application's package ID
# 'builds' - a list of dictionaries containing build information
# for each defined build
# 'comments' - a list of comments from the metadata file. Each is
@ -582,6 +581,7 @@ def fill_build_defaults(build):
#
def parse_metadata(metafile):
appid = None
linedesc = None
def add_buildflag(p, thisbuild):
@ -654,11 +654,10 @@ def parse_metadata(metafile):
if metafile:
if not isinstance(metafile, file):
metafile = open(metafile, "r")
thisinfo['id'] = metafile.name[9:-4]
else:
thisinfo['id'] = None
appid = metafile.name[9:-4]
thisinfo.update(app_defaults)
thisinfo['id'] = appid
# General defaults...
thisinfo['builds'] = []
@ -783,7 +782,7 @@ def parse_metadata(metafile):
for build in thisinfo['builds']:
fill_build_defaults(build)
return thisinfo
return (appid, thisinfo)
# Write a metadata file.

View File

@ -88,9 +88,9 @@ def main():
allapps = metadata.read_metadata()
vercodes = common.read_pkg_args(args, True)
allaliases = []
for app in allapps:
for appid in allapps:
m = md5.new()
m.update(app['id'])
m.update(appid)
keyalias = m.hexdigest()[:8]
if keyalias in allaliases:
logging.error("There is a keyalias collision - publishing halted")

View File

@ -45,9 +45,9 @@ def main():
allapps = metadata.read_metadata(xref=True)
apps = common.read_app_args(args, allapps, False)
for app in apps:
logging.info("Writing " + app['id'])
metadata.write_metadata(os.path.join('metadata', app['id']) + '.txt', app)
for appid, app in apps.iteritems():
logging.info("Writing " + appid)
metadata.write_metadata(os.path.join('metadata', appid) + '.txt', app)
logging.info("Finished.")

View File

@ -57,20 +57,20 @@ def main():
srclib_dir = os.path.join(build_dir, 'srclib')
extlib_dir = os.path.join(build_dir, 'extlib')
for app in apps:
for appid, app in apps.iteritems():
if app['Disabled']:
logging.info("Skipping %s: disabled" % app['id'])
logging.info("Skipping %s: disabled" % appid)
continue
if not app['builds']:
logging.info("Skipping %s: no builds specified" % app['id'])
logging.info("Skipping %s: no builds specified" % appid)
continue
logging.info("Processing " + app['id'])
logging.info("Processing " + appid)
try:
build_dir = 'build/' + app['id']
build_dir = 'build/' + appid
# Set up vcs interface and make sure we have the latest code...
vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir)
@ -91,17 +91,17 @@ def main():
# Do the scan...
buildprobs = common.scan_source(build_dir, root_dir, thisbuild)
for problem in buildprobs:
problems.append(problem + ' in ' + app['id']
problems.append(problem + ' in ' + appid
+ ' ' + thisbuild['version'])
except BuildException as be:
msg = "Could not scan app %s due to BuildException: %s" % (app['id'], be)
msg = "Could not scan app %s due to BuildException: %s" % (appid, be)
problems.append(msg)
except VCSException as vcse:
msg = "VCS error while scanning app %s: %s" % (app['id'], vcse)
msg = "VCS error while scanning app %s: %s" % (appid, vcse)
problems.append(msg)
except Exception:
msg = "Could not scan app %s due to unknown error: %s" % (app['id'], traceback.format_exc())
msg = "Could not scan app %s due to unknown error: %s" % (appid, traceback.format_exc())
problems.append(msg)
logging.info("Finished:")

View File

@ -71,7 +71,7 @@ def main():
sys.exit(1)
# Get all metadata-defined apps...
metaapps = [a for a in metadata.read_metadata() if not a['Disabled']]
metaapps = [a for a in metadata.read_metadata().itervalues() if not a['Disabled']]
statsdir = 'stats'
logsdir = os.path.join(statsdir, 'logs')

View File

@ -62,7 +62,7 @@ def get_icon_dirs(repodir):
yield os.path.join(repodir, "icons")
def update_wiki(apps, apks):
def update_wiki(apps, sortedids, apks):
"""Update the wiki
:param apps: fully populated list of all applications
@ -77,7 +77,10 @@ def update_wiki(apps, apks):
site.login(config['wiki_user'], config['wiki_password'])
generated_pages = {}
generated_redirects = {}
for app in apps:
for appid in sortedids:
app = apps[appid]
wikidata = ''
if app['Disabled']:
wikidata += '{{Disabled|' + app['Disabled'] + '}}\n'
@ -85,7 +88,7 @@ def update_wiki(apps, apks):
for af in app['AntiFeatures'].split(','):
wikidata += '{{AntiFeature|' + af + '}}\n'
wikidata += '{{App|id=%s|name=%s|added=%s|lastupdated=%s|source=%s|tracker=%s|web=%s|donate=%s|flattr=%s|bitcoin=%s|litecoin=%s|dogecoin=%s|license=%s|root=%s}}\n' % (
app['id'],
appid,
app['Name'],
time.strftime('%Y-%m-%d', app['added']) if 'added' in app else '',
time.strftime('%Y-%m-%d', app['lastupdated']) if 'lastupdated' in app else '',
@ -104,7 +107,7 @@ def update_wiki(apps, apks):
wikidata += "This app provides: %s" % ', '.join(app['Summary'].split(','))
wikidata += app['Summary']
wikidata += " - [https://f-droid.org/repository/browse/?fdid=" + app['id'] + " view in repository]\n\n"
wikidata += " - [https://f-droid.org/repository/browse/?fdid=" + appid + " view in repository]\n\n"
wikidata += "=Description=\n"
wikidata += metadata.description_wiki(app['Description']) + "\n"
@ -112,7 +115,7 @@ def update_wiki(apps, apks):
wikidata += "=Maintainer Notes=\n"
if 'Maintainer Notes' in app:
wikidata += metadata.description_wiki(app['Maintainer Notes']) + "\n"
wikidata += "\nMetadata: [https://gitlab.com/fdroid/fdroiddata/blob/master/metadata/{0}.txt current] [https://gitlab.com/fdroid/fdroiddata/commits/master/metadata/{0}.txt history]\n".format(app['id'])
wikidata += "\nMetadata: [https://gitlab.com/fdroid/fdroiddata/blob/master/metadata/{0}.txt current] [https://gitlab.com/fdroid/fdroiddata/commits/master/metadata/{0}.txt history]\n".format(appid)
# Get a list of all packages for this application...
apklist = []
@ -120,7 +123,7 @@ def update_wiki(apps, apks):
cantupdate = False
buildfails = False
for apk in apks:
if apk['id'] == app['id']:
if apk['id'] == appid:
if str(apk['versioncode']) == app['Current Version Code']:
gotcurrentver = True
apklist.append(apk)
@ -144,7 +147,7 @@ def update_wiki(apps, apks):
buildfails = True
apklist.append({'versioncode': int(thisbuild['vercode']),
'version': thisbuild['version'],
'buildproblem': "The build for this version appears to have failed. Check the [[{0}/lastbuild_{1}|build log]].".format(app['id'], thisbuild['vercode'])
'buildproblem': "The build for this version appears to have failed. Check the [[{0}/lastbuild_{1}|build log]].".format(appid, thisbuild['vercode'])
})
if app['Current Version Code'] == '0':
cantupdate = True
@ -200,7 +203,7 @@ def update_wiki(apps, apks):
# We can't have underscores in the page name, even if they're in
# the package ID, because MediaWiki messes with them...
pagename = app['id'].replace('_', ' ')
pagename = appid.replace('_', ' ')
# Drop a trailing newline, because mediawiki is going to drop it anyway
# and it we don't we'll think the page has changed when it hasn't...
@ -270,10 +273,10 @@ def delete_disabled_builds(apps, apkcache, repodirs):
:param apkcache: current apk cache information
:param repodirs: the repo directories to process
"""
for app in apps:
for appid, app in apps.iteritems():
for build in app['builds']:
if build['disable']:
apkfilename = app['id'] + '_' + str(build['vercode']) + '.apk'
apkfilename = appid + '_' + str(build['vercode']) + '.apk'
for repodir in repodirs:
apkpath = os.path.join(repodir, apkfilename)
ascpath = apkpath + ".asc"
@ -605,7 +608,7 @@ def scan_apks(apps, apkcache, repodir, knownapks):
repo_pubkey_fingerprint = None
def make_index(apps, apks, repodir, archive, categories):
def make_index(apps, sortedids, apks, repodir, archive, categories):
"""Make a repo index.
:param apps: fully populated apps list
@ -682,7 +685,8 @@ def make_index(apps, apks, repodir, archive, categories):
root.appendChild(repoel)
for app in apps:
for appid in sortedids:
app = apps[appid]
if app['Disabled'] is not None:
continue
@ -690,7 +694,7 @@ def make_index(apps, apks, repodir, archive, categories):
# Get a list of the apks for this app...
apklist = []
for apk in apks:
if apk['id'] == app['id']:
if apk['id'] == appid:
apklist.append(apk)
if len(apklist) == 0:
@ -710,11 +714,11 @@ def make_index(apps, apks, repodir, archive, categories):
if app['icon']:
addElement('icon', app['icon'], doc, apel)
def linkres(link):
for app in apps:
if app['id'] == link:
return ("fdroid.app:" + link, app['Name'])
raise MetaDataException("Cannot resolve app id " + link)
def linkres(appid):
if appid in apps:
return ("fdroid:app" + appid, apps[appid]['Name'])
raise MetaDataException("Cannot resolve app id " + appid)
addElement('desc',
metadata.description_html(app['Description'], linkres),
doc, apel)
@ -855,12 +859,12 @@ def make_index(apps, apks, repodir, archive, categories):
def archive_old_apks(apps, apks, archapks, repodir, archivedir, defaultkeepversions):
for app in apps:
for appid, app in apps.iteritems():
# Get a list of the apks for this app...
apklist = []
for apk in apks:
if apk['id'] == app['id']:
if apk['id'] == appid:
apklist.append(apk)
# Sort the apk list into version order...
@ -948,7 +952,7 @@ def main():
# Generate a list of categories...
categories = set()
for app in apps:
for app in apps.itervalues():
categories.update(app['Categories'])
# Read known apks data (will be updated and written back when we've finished)
@ -975,12 +979,7 @@ def main():
# metadata files, if requested on the command line)
newmetadata = False
for apk in apks:
found = False
for app in apps:
if app['id'] == apk['id']:
found = True
break
if not found:
if apk['id'] not in apps:
if options.create_metadata:
if 'name' not in apk:
logging.error(apk['id'] + ' does not have a name! Skipping...')
@ -1025,12 +1024,12 @@ def main():
# level. When doing this, we use the info from the most recent version's apk.
# We deal with figuring out when the app was added and last updated at the
# same time.
for app in apps:
for appid, app in apps.iteritems():
bestver = 0
added = None
lastupdated = None
for apk in apks + archapks:
if apk['id'] == app['id']:
if apk['id'] == appid:
if apk['versioncode'] > bestver:
bestver = apk['versioncode']
bestapk = apk
@ -1044,17 +1043,17 @@ def main():
if added:
app['added'] = added
else:
logging.warn("Don't know when " + app['id'] + " was added")
logging.warn("Don't know when " + appid + " was added")
if lastupdated:
app['lastupdated'] = lastupdated
else:
logging.warn("Don't know when " + app['id'] + " was last updated")
logging.warn("Don't know when " + appid + " was last updated")
if bestver == 0:
if app['Name'] is None:
app['Name'] = app['id']
app['Name'] = appid
app['icon'] = None
logging.warn("Application " + app['id'] + " has no packages")
logging.warn("Application " + appid + " has no packages")
else:
if app['Name'] is None:
app['Name'] = bestapk['name']
@ -1063,18 +1062,18 @@ def main():
# Sort the app list by name, then the web site doesn't have to by default.
# (we had to wait until we'd scanned the apks to do this, because mostly the
# name comes from there!)
apps = sorted(apps, key=lambda app: app['Name'].upper())
sortedids = sorted(apps.iterkeys(), key=lambda appid: apps[appid]['Name'].upper())
if len(repodirs) > 1:
archive_old_apks(apps, apks, archapks, repodirs[0], repodirs[1], config['archive_older'])
# Make the index for the main repo...
make_index(apps, apks, repodirs[0], False, categories)
make_index(apps, sortedids, apks, repodirs[0], False, categories)
# If there's an archive repo, make the index for it. We already scanned it
# earlier on.
if len(repodirs) > 1:
make_index(apps, archapks, repodirs[1], True, categories)
make_index(apps, sortedids, archapks, repodirs[1], True, categories)
if config['update_stats']:
@ -1087,13 +1086,11 @@ def main():
for line in file(os.path.join('stats', 'latestapps.txt')):
appid = line.rstrip()
data += appid + "\t"
for app in apps:
if app['id'] == appid:
data += app['Name'] + "\t"
if app['icon'] is not None:
data += app['icon'] + "\t"
data += app['License'] + "\n"
break
app = apps[appid]
data += app['Name'] + "\t"
if app['icon'] is not None:
data += app['icon'] + "\t"
data += app['License'] + "\n"
f = open(os.path.join(repodirs[0], 'latestapps.dat'), 'w')
f.write(data)
f.close()
@ -1104,7 +1101,7 @@ def main():
# Update the wiki...
if options.wiki:
update_wiki(apps, apks + archapks)
update_wiki(apps, sortedids, apks + archapks)
logging.info("Finished.")