2013-10-31 13:25:39 +01:00
|
|
|
#!/usr/bin/env python2
|
2011-01-26 17:26:51 +01:00
|
|
|
# -*- coding: utf-8 -*-
|
2010-10-22 00:26:38 +02:00
|
|
|
#
|
|
|
|
# update.py - part of the FDroid server tools
|
2013-05-09 21:09:17 +02:00
|
|
|
# Copyright (C) 2010-2013, Ciaran Gultnieks, ciaran@ciarang.com
|
2014-01-28 14:07:19 +01:00
|
|
|
# Copyright (C) 2013-2014 Daniel Martí <mvdan@mvdan.cc>
|
2010-10-22 00:26:38 +02:00
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU Affero General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU Affero General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU Affero General Public License
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
import sys
|
|
|
|
import os
|
|
|
|
import shutil
|
|
|
|
import glob
|
|
|
|
import re
|
|
|
|
import zipfile
|
2011-03-17 00:27:42 +01:00
|
|
|
import hashlib
|
2012-09-03 12:48:18 +02:00
|
|
|
import pickle
|
2010-10-22 00:26:38 +02:00
|
|
|
from xml.dom.minidom import Document
|
|
|
|
from optparse import OptionParser
|
2012-01-20 12:07:30 +01:00
|
|
|
import time
|
2014-08-30 04:53:55 +02:00
|
|
|
from pyasn1.error import PyAsn1Error
|
|
|
|
from pyasn1.codec.der import decoder, encoder
|
|
|
|
from pyasn1_modules import rfc2315
|
|
|
|
from hashlib import md5
|
|
|
|
|
2013-08-19 11:20:50 +02:00
|
|
|
from PIL import Image
|
2014-01-27 16:56:55 +01:00
|
|
|
import logging
|
2012-08-22 18:24:33 +02:00
|
|
|
|
2014-05-02 04:36:12 +02:00
|
|
|
import common
|
|
|
|
import metadata
|
2014-06-30 16:15:14 +02:00
|
|
|
from common import FDroidPopen, SilentPopen
|
2014-01-27 16:56:55 +01:00
|
|
|
from metadata import MetaDataException
|
2014-01-07 11:16:58 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-01-07 11:16:58 +01:00
|
|
|
def get_densities():
|
2014-01-07 15:55:07 +01:00
|
|
|
return ['640', '480', '320', '240', '160', '120']
|
2014-01-07 11:16:58 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-01-07 17:01:51 +01:00
|
|
|
def dpi_to_px(density):
|
2014-01-07 12:06:39 +01:00
|
|
|
return (int(density) * 48) / 160
|
2014-01-07 11:16:58 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-01-07 17:01:51 +01:00
|
|
|
def px_to_dpi(px):
|
|
|
|
return (int(px) * 160) / 48
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-01-07 12:06:39 +01:00
|
|
|
def get_icon_dir(repodir, density):
|
|
|
|
if density is None:
|
|
|
|
return os.path.join(repodir, "icons")
|
2014-01-07 11:16:58 +01:00
|
|
|
return os.path.join(repodir, "icons-%s" % density)
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-01-07 11:16:58 +01:00
|
|
|
def get_icon_dirs(repodir):
|
|
|
|
for density in get_densities():
|
2014-01-07 12:06:39 +01:00
|
|
|
yield get_icon_dir(repodir, density)
|
|
|
|
yield os.path.join(repodir, "icons")
|
2014-01-07 11:16:58 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-08-16 12:46:02 +02:00
|
|
|
def update_wiki(apps, sortedids, apks):
|
2013-05-10 16:04:59 +02:00
|
|
|
"""Update the wiki
|
2012-08-22 18:24:33 +02:00
|
|
|
|
2013-05-10 16:04:59 +02:00
|
|
|
:param apps: fully populated list of all applications
|
|
|
|
:param apks: all apks, except...
|
|
|
|
"""
|
2014-01-27 16:56:55 +01:00
|
|
|
logging.info("Updating wiki")
|
2012-08-22 18:24:33 +02:00
|
|
|
wikicat = 'Apps'
|
2013-10-09 15:39:00 +02:00
|
|
|
wikiredircat = 'App Redirects'
|
2012-08-22 18:24:33 +02:00
|
|
|
import mwclient
|
2013-10-31 16:37:39 +01:00
|
|
|
site = mwclient.Site((config['wiki_protocol'], config['wiki_server']),
|
2014-05-06 19:50:52 +02:00
|
|
|
path=config['wiki_path'])
|
2013-10-31 16:37:39 +01:00
|
|
|
site.login(config['wiki_user'], config['wiki_password'])
|
2012-08-22 18:24:33 +02:00
|
|
|
generated_pages = {}
|
2013-10-09 15:39:00 +02:00
|
|
|
generated_redirects = {}
|
2014-08-16 12:46:02 +02:00
|
|
|
|
|
|
|
for appid in sortedids:
|
|
|
|
app = apps[appid]
|
|
|
|
|
2012-08-22 18:24:33 +02:00
|
|
|
wikidata = ''
|
|
|
|
if app['Disabled']:
|
|
|
|
wikidata += '{{Disabled|' + app['Disabled'] + '}}\n'
|
2013-11-17 13:16:24 +01:00
|
|
|
if app['AntiFeatures']:
|
|
|
|
for af in app['AntiFeatures'].split(','):
|
|
|
|
wikidata += '{{AntiFeature|' + af + '}}\n'
|
2014-05-02 04:28:46 +02:00
|
|
|
wikidata += '{{App|id=%s|name=%s|added=%s|lastupdated=%s|source=%s|tracker=%s|web=%s|donate=%s|flattr=%s|bitcoin=%s|litecoin=%s|dogecoin=%s|license=%s|root=%s}}\n' % (
|
2014-08-16 12:46:02 +02:00
|
|
|
appid,
|
2014-05-06 19:50:52 +02:00
|
|
|
app['Name'],
|
|
|
|
time.strftime('%Y-%m-%d', app['added']) if 'added' in app else '',
|
|
|
|
time.strftime('%Y-%m-%d', app['lastupdated']) if 'lastupdated' in app else '',
|
|
|
|
app['Source Code'],
|
|
|
|
app['Issue Tracker'],
|
|
|
|
app['Web Site'],
|
|
|
|
app['Donate'],
|
|
|
|
app['FlattrID'],
|
|
|
|
app['Bitcoin'],
|
|
|
|
app['Litecoin'],
|
|
|
|
app['Dogecoin'],
|
|
|
|
app['License'],
|
|
|
|
app.get('Requires Root', 'No'))
|
2012-08-26 12:58:18 +02:00
|
|
|
|
2013-12-31 10:47:50 +01:00
|
|
|
if app['Provides']:
|
|
|
|
wikidata += "This app provides: %s" % ', '.join(app['Summary'].split(','))
|
|
|
|
|
2012-08-26 12:58:18 +02:00
|
|
|
wikidata += app['Summary']
|
2014-08-16 12:46:02 +02:00
|
|
|
wikidata += " - [https://f-droid.org/repository/browse/?fdid=" + appid + " view in repository]\n\n"
|
2012-08-26 12:58:18 +02:00
|
|
|
|
2012-08-22 18:24:33 +02:00
|
|
|
wikidata += "=Description=\n"
|
2013-11-19 15:35:16 +01:00
|
|
|
wikidata += metadata.description_wiki(app['Description']) + "\n"
|
2012-08-22 18:24:33 +02:00
|
|
|
|
2013-11-15 11:00:04 +01:00
|
|
|
wikidata += "=Maintainer Notes=\n"
|
2013-11-05 17:39:45 +01:00
|
|
|
if 'Maintainer Notes' in app:
|
2014-03-25 09:41:55 +01:00
|
|
|
wikidata += metadata.description_wiki(app['Maintainer Notes']) + "\n"
|
2014-08-16 12:46:02 +02:00
|
|
|
wikidata += "\nMetadata: [https://gitlab.com/fdroid/fdroiddata/blob/master/metadata/{0}.txt current] [https://gitlab.com/fdroid/fdroiddata/commits/master/metadata/{0}.txt history]\n".format(appid)
|
2013-11-05 17:39:45 +01:00
|
|
|
|
2012-08-26 12:58:18 +02:00
|
|
|
# Get a list of all packages for this application...
|
2012-08-22 18:24:33 +02:00
|
|
|
apklist = []
|
|
|
|
gotcurrentver = False
|
2012-08-26 12:58:18 +02:00
|
|
|
cantupdate = False
|
2013-10-09 10:57:00 +02:00
|
|
|
buildfails = False
|
2012-08-22 18:24:33 +02:00
|
|
|
for apk in apks:
|
2014-08-16 12:46:02 +02:00
|
|
|
if apk['id'] == appid:
|
2012-08-22 18:24:33 +02:00
|
|
|
if str(apk['versioncode']) == app['Current Version Code']:
|
|
|
|
gotcurrentver = True
|
|
|
|
apklist.append(apk)
|
2012-08-26 12:58:18 +02:00
|
|
|
# Include ones we can't build, as a special case...
|
|
|
|
for thisbuild in app['builds']:
|
2014-05-31 23:10:16 +02:00
|
|
|
if thisbuild['disable']:
|
2012-08-26 12:58:18 +02:00
|
|
|
if thisbuild['vercode'] == app['Current Version Code']:
|
|
|
|
cantupdate = True
|
2014-05-28 09:33:14 +02:00
|
|
|
# TODO: Nasty: vercode is a string in the build, and an int elsewhere
|
2014-05-06 19:50:52 +02:00
|
|
|
apklist.append({'versioncode': int(thisbuild['vercode']),
|
|
|
|
'version': thisbuild['version'],
|
|
|
|
'buildproblem': thisbuild['disable']
|
|
|
|
})
|
2013-10-09 10:48:06 +02:00
|
|
|
else:
|
|
|
|
builtit = False
|
|
|
|
for apk in apklist:
|
|
|
|
if apk['versioncode'] == int(thisbuild['vercode']):
|
|
|
|
builtit = True
|
|
|
|
break
|
|
|
|
if not builtit:
|
2013-10-09 10:57:00 +02:00
|
|
|
buildfails = True
|
2014-05-06 19:50:52 +02:00
|
|
|
apklist.append({'versioncode': int(thisbuild['vercode']),
|
|
|
|
'version': thisbuild['version'],
|
2014-08-16 12:46:02 +02:00
|
|
|
'buildproblem': "The build for this version appears to have failed. Check the [[{0}/lastbuild_{1}|build log]].".format(appid, thisbuild['vercode'])
|
2014-05-06 19:50:52 +02:00
|
|
|
})
|
2014-02-17 16:51:22 +01:00
|
|
|
if app['Current Version Code'] == '0':
|
|
|
|
cantupdate = True
|
2012-08-26 12:58:18 +02:00
|
|
|
# Sort with most recent first...
|
2012-08-22 18:24:33 +02:00
|
|
|
apklist = sorted(apklist, key=lambda apk: apk['versioncode'], reverse=True)
|
2012-08-26 12:58:18 +02:00
|
|
|
|
2012-08-22 18:24:33 +02:00
|
|
|
wikidata += "=Versions=\n"
|
|
|
|
if len(apklist) == 0:
|
2012-08-31 15:50:45 +02:00
|
|
|
wikidata += "We currently have no versions of this app available."
|
2012-08-22 18:24:33 +02:00
|
|
|
elif not gotcurrentver:
|
2012-08-31 15:50:45 +02:00
|
|
|
wikidata += "We don't have the current version of this app."
|
2012-09-15 00:15:28 +02:00
|
|
|
else:
|
|
|
|
wikidata += "We have the current version of this app."
|
2013-07-16 10:43:35 +02:00
|
|
|
wikidata += " (Check mode: " + app['Update Check Mode'] + ") "
|
|
|
|
wikidata += " (Auto-update mode: " + app['Auto Update Mode'] + ")\n\n"
|
2013-05-19 14:54:58 +02:00
|
|
|
if len(app['No Source Since']) > 0:
|
2013-05-26 11:54:53 +02:00
|
|
|
wikidata += "This application has partially or entirely been missing source code since version " + app['No Source Since'] + ".\n\n"
|
2012-08-22 18:24:33 +02:00
|
|
|
if len(app['Current Version']) > 0:
|
|
|
|
wikidata += "The current (recommended) version is " + app['Current Version']
|
|
|
|
wikidata += " (version code " + app['Current Version Code'] + ").\n\n"
|
2012-09-21 17:07:18 +02:00
|
|
|
validapks = 0
|
2012-08-22 18:24:33 +02:00
|
|
|
for apk in apklist:
|
|
|
|
wikidata += "==" + apk['version'] + "==\n"
|
2012-08-26 12:58:18 +02:00
|
|
|
|
|
|
|
if 'buildproblem' in apk:
|
|
|
|
wikidata += "We can't build this version: " + apk['buildproblem'] + "\n\n"
|
|
|
|
else:
|
2012-09-21 17:07:18 +02:00
|
|
|
validapks += 1
|
2012-08-26 12:58:18 +02:00
|
|
|
wikidata += "This version is built and signed by "
|
2013-03-13 17:56:17 +01:00
|
|
|
if 'srcname' in apk:
|
2012-08-26 12:58:18 +02:00
|
|
|
wikidata += "F-Droid, and guaranteed to correspond to the source tarball published with it.\n\n"
|
|
|
|
else:
|
|
|
|
wikidata += "the original developer.\n\n"
|
2012-08-22 18:24:33 +02:00
|
|
|
wikidata += "Version code: " + str(apk['versioncode']) + '\n'
|
|
|
|
|
|
|
|
wikidata += '\n[[Category:' + wikicat + ']]\n'
|
2013-05-16 21:45:25 +02:00
|
|
|
if len(app['No Source Since']) > 0:
|
2013-05-19 14:54:58 +02:00
|
|
|
wikidata += '\n[[Category:Apps missing source code]]\n'
|
2014-01-16 09:58:49 +01:00
|
|
|
if validapks == 0 and not app['Disabled']:
|
2012-08-22 18:24:33 +02:00
|
|
|
wikidata += '\n[[Category:Apps with no packages]]\n'
|
2014-01-16 09:58:49 +01:00
|
|
|
if cantupdate and not app['Disabled']:
|
2012-08-26 12:58:18 +02:00
|
|
|
wikidata += "\n[[Category:Apps we can't update]]\n"
|
2014-01-16 09:58:49 +01:00
|
|
|
if buildfails and not app['Disabled']:
|
2013-10-09 10:57:00 +02:00
|
|
|
wikidata += "\n[[Category:Apps with failing builds]]\n"
|
2014-01-20 12:50:48 +01:00
|
|
|
elif not gotcurrentver and not cantupdate and not app['Disabled'] and app['Update Check Mode'] != "Static":
|
2012-08-22 18:24:33 +02:00
|
|
|
wikidata += '\n[[Category:Apps to Update]]\n'
|
2013-10-23 14:19:33 +02:00
|
|
|
if app['Disabled']:
|
|
|
|
wikidata += '\n[[Category:Apps that are disabled]]\n'
|
|
|
|
if app['Update Check Mode'] == 'None' and not app['Disabled']:
|
2012-08-31 15:50:45 +02:00
|
|
|
wikidata += '\n[[Category:Apps with no update check]]\n'
|
2014-02-19 10:21:13 +01:00
|
|
|
for appcat in app['Categories']:
|
2014-01-07 14:39:48 +01:00
|
|
|
wikidata += '\n[[Category:{0}]]\n'.format(appcat)
|
2012-08-26 12:58:18 +02:00
|
|
|
|
|
|
|
# We can't have underscores in the page name, even if they're in
|
|
|
|
# the package ID, because MediaWiki messes with them...
|
2014-08-16 12:46:02 +02:00
|
|
|
pagename = appid.replace('_', ' ')
|
2013-04-23 14:28:43 +02:00
|
|
|
|
|
|
|
# Drop a trailing newline, because mediawiki is going to drop it anyway
|
|
|
|
# and it we don't we'll think the page has changed when it hasn't...
|
|
|
|
if wikidata.endswith('\n'):
|
|
|
|
wikidata = wikidata[:-1]
|
|
|
|
|
2012-08-26 12:58:18 +02:00
|
|
|
generated_pages[pagename] = wikidata
|
2012-08-22 18:24:33 +02:00
|
|
|
|
|
|
|
# Make a redirect from the name to the ID too, unless there's
|
|
|
|
# already an existing page with the name and it isn't a redirect.
|
|
|
|
noclobber = False
|
2013-06-15 23:02:59 +02:00
|
|
|
apppagename = app['Name'].replace('_', ' ')
|
|
|
|
apppagename = apppagename.replace('{', '')
|
|
|
|
apppagename = apppagename.replace('}', ' ')
|
2013-09-05 11:32:33 +02:00
|
|
|
apppagename = apppagename.replace(':', ' ')
|
2013-10-09 14:23:17 +02:00
|
|
|
# Drop double spaces caused mostly by replacing ':' above
|
|
|
|
apppagename = apppagename.replace(' ', ' ')
|
|
|
|
for expagename in site.allpages(prefix=apppagename,
|
2014-05-06 19:50:52 +02:00
|
|
|
filterredir='nonredirects',
|
|
|
|
generator=False):
|
2013-10-09 14:23:17 +02:00
|
|
|
if expagename == apppagename:
|
2012-08-22 18:24:33 +02:00
|
|
|
noclobber = True
|
|
|
|
# Another reason not to make the redirect page is if the app name
|
|
|
|
# is the same as it's ID, because that will overwrite the real page
|
|
|
|
# with an redirect to itself! (Although it seems like an odd
|
|
|
|
# scenario this happens a lot, e.g. where there is metadata but no
|
|
|
|
# builds or binaries to extract a name from.
|
2012-09-10 23:49:10 +02:00
|
|
|
if apppagename == pagename:
|
2012-08-22 18:24:33 +02:00
|
|
|
noclobber = True
|
|
|
|
if not noclobber:
|
2013-10-09 15:39:00 +02:00
|
|
|
generated_redirects[apppagename] = "#REDIRECT [[" + pagename + "]]\n[[Category:" + wikiredircat + "]]"
|
|
|
|
|
|
|
|
for tcat, genp in [(wikicat, generated_pages),
|
2014-05-06 19:50:52 +02:00
|
|
|
(wikiredircat, generated_redirects)]:
|
2013-10-09 15:39:00 +02:00
|
|
|
catpages = site.Pages['Category:' + tcat]
|
|
|
|
existingpages = []
|
|
|
|
for page in catpages:
|
|
|
|
existingpages.append(page.name)
|
|
|
|
if page.name in genp:
|
|
|
|
pagetxt = page.edit()
|
|
|
|
if pagetxt != genp[page.name]:
|
2014-02-22 11:00:29 +01:00
|
|
|
logging.debug("Updating modified page " + page.name)
|
2013-10-09 15:39:00 +02:00
|
|
|
page.save(genp[page.name], summary='Auto-updated')
|
|
|
|
else:
|
2014-02-22 11:00:29 +01:00
|
|
|
logging.debug("Page " + page.name + " is unchanged")
|
2012-08-22 18:24:33 +02:00
|
|
|
else:
|
2014-02-22 11:00:29 +01:00
|
|
|
logging.warn("Deleting page " + page.name)
|
2013-10-09 15:39:00 +02:00
|
|
|
page.delete('No longer published')
|
|
|
|
for pagename, text in genp.items():
|
2014-02-22 11:00:29 +01:00
|
|
|
logging.debug("Checking " + pagename)
|
2014-05-07 16:13:22 +02:00
|
|
|
if pagename not in existingpages:
|
2014-02-22 11:00:29 +01:00
|
|
|
logging.debug("Creating page " + pagename)
|
2013-10-09 15:39:00 +02:00
|
|
|
try:
|
|
|
|
newpage = site.Pages[pagename]
|
|
|
|
newpage.save(text, summary='Auto-created')
|
|
|
|
except:
|
2014-02-22 11:00:29 +01:00
|
|
|
logging.error("...FAILED to create page")
|
2012-08-22 18:24:33 +02:00
|
|
|
|
2013-10-10 10:47:48 +02:00
|
|
|
# Purge server cache to ensure counts are up to date
|
|
|
|
site.pages['Repository Maintenance'].purge()
|
2012-08-22 18:24:33 +02:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-05-09 21:09:17 +02:00
|
|
|
def delete_disabled_builds(apps, apkcache, repodirs):
|
|
|
|
"""Delete disabled build outputs.
|
2010-10-22 00:26:38 +02:00
|
|
|
|
2013-11-19 15:35:16 +01:00
|
|
|
:param apps: list of all applications, as per metadata.read_metadata
|
2013-05-09 21:09:17 +02:00
|
|
|
:param apkcache: current apk cache information
|
|
|
|
:param repodirs: the repo directories to process
|
|
|
|
"""
|
2014-08-16 12:46:02 +02:00
|
|
|
for appid, app in apps.iteritems():
|
2013-05-09 21:09:17 +02:00
|
|
|
for build in app['builds']:
|
2014-05-31 23:10:16 +02:00
|
|
|
if build['disable']:
|
2014-08-16 12:46:02 +02:00
|
|
|
apkfilename = appid + '_' + str(build['vercode']) + '.apk'
|
2013-05-09 21:09:17 +02:00
|
|
|
for repodir in repodirs:
|
|
|
|
apkpath = os.path.join(repodir, apkfilename)
|
2014-08-11 13:48:27 +02:00
|
|
|
ascpath = apkpath + ".asc"
|
2013-05-09 21:09:17 +02:00
|
|
|
srcpath = os.path.join(repodir, apkfilename[:-4] + "_src.tar.gz")
|
2014-08-11 13:48:27 +02:00
|
|
|
for name in [apkpath, srcpath, ascpath]:
|
2013-05-09 21:09:17 +02:00
|
|
|
if os.path.exists(name):
|
2014-02-22 11:00:29 +01:00
|
|
|
logging.warn("Deleting disabled build output " + apkfilename)
|
2013-05-09 21:09:17 +02:00
|
|
|
os.remove(name)
|
|
|
|
if apkfilename in apkcache:
|
|
|
|
del apkcache[apkfilename]
|
2012-02-26 15:18:58 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-01-07 12:06:39 +01:00
|
|
|
def resize_icon(iconpath, density):
|
|
|
|
|
2014-01-07 17:02:59 +01:00
|
|
|
if not os.path.isfile(iconpath):
|
|
|
|
return
|
|
|
|
|
2013-08-21 22:47:48 +02:00
|
|
|
try:
|
|
|
|
im = Image.open(iconpath)
|
2014-01-07 17:01:51 +01:00
|
|
|
size = dpi_to_px(density)
|
2014-01-07 12:06:39 +01:00
|
|
|
|
|
|
|
if any(length > size for length in im.size):
|
2014-01-07 16:25:29 +01:00
|
|
|
oldsize = im.size
|
2014-01-07 12:06:39 +01:00
|
|
|
im.thumbnail((size, size), Image.ANTIALIAS)
|
2014-02-22 11:00:29 +01:00
|
|
|
logging.debug("%s was too large at %s - new size is %s" % (
|
2014-02-07 21:36:19 +01:00
|
|
|
iconpath, oldsize, im.size))
|
2013-08-21 22:47:48 +02:00
|
|
|
im.save(iconpath, "PNG")
|
2014-01-07 12:06:39 +01:00
|
|
|
|
2014-05-02 04:16:32 +02:00
|
|
|
except Exception, e:
|
2014-02-22 11:00:29 +01:00
|
|
|
logging.error("Failed resizing {0} - {1}".format(iconpath, e))
|
2013-08-19 11:30:54 +02:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-08-19 11:30:54 +02:00
|
|
|
def resize_all_icons(repodirs):
|
2013-08-19 11:36:26 +02:00
|
|
|
"""Resize all icons that exceed the max size
|
2013-08-19 11:30:54 +02:00
|
|
|
|
|
|
|
:param repodirs: the repo directories to process
|
|
|
|
"""
|
|
|
|
for repodir in repodirs:
|
2014-01-07 12:06:39 +01:00
|
|
|
for density in get_densities():
|
|
|
|
icon_dir = get_icon_dir(repodir, density)
|
2014-01-07 12:24:15 +01:00
|
|
|
icon_glob = os.path.join(icon_dir, '*.png')
|
2014-01-07 12:06:39 +01:00
|
|
|
for iconpath in glob.glob(icon_glob):
|
2014-01-07 12:24:15 +01:00
|
|
|
resize_icon(iconpath, density)
|
2012-02-26 15:18:58 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-08-30 04:53:55 +02:00
|
|
|
cert_path_regex = re.compile(r'^META-INF/.*\.RSA$')
|
|
|
|
|
|
|
|
|
|
|
|
def getsig(apkpath):
|
|
|
|
""" Get the signing certificate of an apk. To get the same md5 has that
|
|
|
|
Android gets, we encode the .RSA certificate in a specific format and pass
|
|
|
|
it hex-encoded to the md5 digest algorithm.
|
|
|
|
|
|
|
|
:param apkpath: path to the apk
|
|
|
|
:returns: A string containing the md5 of the signature of the apk or None
|
|
|
|
if an error occurred.
|
|
|
|
"""
|
|
|
|
|
|
|
|
cert = None
|
|
|
|
|
2014-08-30 06:45:12 +02:00
|
|
|
# verify the jar signature is correct
|
|
|
|
args = ['jarsigner', '-verify', apkpath]
|
|
|
|
p = FDroidPopen(args)
|
|
|
|
if p.returncode != 0:
|
|
|
|
logging.critical(apkpath + " has a bad signature!")
|
|
|
|
return None
|
|
|
|
|
2014-08-30 04:53:55 +02:00
|
|
|
with zipfile.ZipFile(apkpath, 'r') as apk:
|
|
|
|
|
|
|
|
certs = [n for n in apk.namelist() if cert_path_regex.match(n)]
|
|
|
|
|
|
|
|
if len(certs) < 1:
|
|
|
|
logging.error("Found no signing certificates on %s" % apkpath)
|
|
|
|
return None
|
|
|
|
if len(certs) > 1:
|
|
|
|
logging.error("Found multiple signing certificates on %s" % apkpath)
|
|
|
|
return None
|
|
|
|
|
|
|
|
cert = apk.read(certs[0])
|
|
|
|
|
|
|
|
content = decoder.decode(cert, asn1Spec=rfc2315.ContentInfo())[0]
|
|
|
|
if content.getComponentByName('contentType') != rfc2315.signedData:
|
|
|
|
logging.error("Unexpected format.")
|
|
|
|
return None
|
|
|
|
|
|
|
|
content = decoder.decode(content.getComponentByName('content'),
|
|
|
|
asn1Spec=rfc2315.SignedData())[0]
|
|
|
|
try:
|
|
|
|
certificates = content.getComponentByName('certificates')
|
|
|
|
except PyAsn1Error:
|
|
|
|
logging.error("Certificates not found.")
|
|
|
|
return None
|
|
|
|
|
|
|
|
cert_encoded = encoder.encode(certificates)[4:]
|
|
|
|
|
|
|
|
return md5(cert_encoded.encode('hex')).hexdigest()
|
|
|
|
|
|
|
|
|
2013-05-09 21:09:17 +02:00
|
|
|
def scan_apks(apps, apkcache, repodir, knownapks):
|
|
|
|
"""Scan the apks in the given repo directory.
|
2012-02-26 15:18:58 +01:00
|
|
|
|
2013-05-09 21:09:17 +02:00
|
|
|
This also extracts the icons.
|
2012-02-26 15:18:58 +01:00
|
|
|
|
2013-11-19 15:35:16 +01:00
|
|
|
:param apps: list of all applications, as per metadata.read_metadata
|
2013-05-09 21:09:17 +02:00
|
|
|
:param apkcache: current apk cache information
|
|
|
|
:param repodir: repo directory to scan
|
|
|
|
:param knownapks: known apks info
|
|
|
|
:returns: (apks, cachechanged) where apks is a list of apk information,
|
|
|
|
and cachechanged is True if the apkcache got changed.
|
|
|
|
"""
|
2012-02-26 15:18:58 +01:00
|
|
|
|
2013-05-09 21:09:17 +02:00
|
|
|
cachechanged = False
|
|
|
|
|
2014-01-07 12:06:39 +01:00
|
|
|
icon_dirs = get_icon_dirs(repodir)
|
|
|
|
for icon_dir in icon_dirs:
|
|
|
|
if os.path.exists(icon_dir):
|
|
|
|
if options.clean:
|
|
|
|
shutil.rmtree(icon_dir)
|
|
|
|
os.makedirs(icon_dir)
|
|
|
|
else:
|
|
|
|
os.makedirs(icon_dir)
|
|
|
|
|
2012-02-26 15:18:58 +01:00
|
|
|
apks = []
|
2013-05-16 19:47:38 +02:00
|
|
|
name_pat = re.compile(".*name='([a-zA-Z0-9._]*)'.*")
|
|
|
|
vercode_pat = re.compile(".*versionCode='([0-9]*)'.*")
|
2013-05-21 16:51:02 +02:00
|
|
|
vername_pat = re.compile(".*versionName='([^']*)'.*")
|
2013-10-23 09:58:32 +02:00
|
|
|
label_pat = re.compile(".*label='(.*?)'(\n| [a-z]*?=).*")
|
2014-01-07 12:06:39 +01:00
|
|
|
icon_pat = re.compile(".*application-icon-([0-9]+):'([^']+?)'.*")
|
2014-01-07 16:16:26 +01:00
|
|
|
icon_pat_nodpi = re.compile(".*icon='([^']+?)'.*")
|
2013-05-16 19:47:38 +02:00
|
|
|
sdkversion_pat = re.compile(".*'([0-9]*)'.*")
|
|
|
|
string_pat = re.compile(".*'([^']*)'.*")
|
2013-05-09 21:09:17 +02:00
|
|
|
for apkfile in glob.glob(os.path.join(repodir, '*.apk')):
|
2012-02-26 15:18:58 +01:00
|
|
|
|
2013-05-16 10:30:08 +02:00
|
|
|
apkfilename = apkfile[len(repodir) + 1:]
|
2013-12-20 09:42:10 +01:00
|
|
|
if ' ' in apkfilename:
|
2014-07-01 20:32:49 +02:00
|
|
|
logging.critical("Spaces in filenames are not allowed.")
|
2012-02-26 15:18:58 +01:00
|
|
|
sys.exit(1)
|
|
|
|
|
2013-03-13 17:56:17 +01:00
|
|
|
if apkfilename in apkcache:
|
2014-02-22 11:00:29 +01:00
|
|
|
logging.debug("Reading " + apkfilename + " from cache")
|
2012-09-03 12:48:18 +02:00
|
|
|
thisinfo = apkcache[apkfilename]
|
|
|
|
|
|
|
|
else:
|
2014-06-22 21:28:33 +02:00
|
|
|
logging.debug("Processing " + apkfilename)
|
2012-09-03 12:48:18 +02:00
|
|
|
thisinfo = {}
|
|
|
|
thisinfo['apkname'] = apkfilename
|
2013-05-15 18:09:37 +02:00
|
|
|
srcfilename = apkfilename[:-4] + "_src.tar.gz"
|
2013-05-09 21:09:17 +02:00
|
|
|
if os.path.exists(os.path.join(repodir, srcfilename)):
|
2012-09-03 12:48:18 +02:00
|
|
|
thisinfo['srcname'] = srcfilename
|
|
|
|
thisinfo['size'] = os.path.getsize(apkfile)
|
2014-08-05 23:04:21 +02:00
|
|
|
thisinfo['permissions'] = set()
|
|
|
|
thisinfo['features'] = set()
|
2014-01-07 16:34:24 +01:00
|
|
|
thisinfo['icons_src'] = {}
|
|
|
|
thisinfo['icons'] = {}
|
2014-07-03 00:27:44 +02:00
|
|
|
p = SilentPopen([config['aapt'], 'dump', 'badging', apkfile])
|
2012-09-03 12:48:18 +02:00
|
|
|
if p.returncode != 0:
|
2014-06-02 22:18:12 +02:00
|
|
|
if options.delete_unknown:
|
|
|
|
if os.path.exists(apkfile):
|
|
|
|
logging.error("Failed to get apk information, deleting " + apkfile)
|
|
|
|
os.remove(apkfile)
|
|
|
|
else:
|
|
|
|
logging.error("Could not find {0} to remove it".format(apkfile))
|
|
|
|
else:
|
|
|
|
logging.error("Failed to get apk information, skipping " + apkfile)
|
|
|
|
continue
|
2014-07-01 18:04:41 +02:00
|
|
|
for line in p.output.splitlines():
|
2012-09-03 12:48:18 +02:00
|
|
|
if line.startswith("package:"):
|
2013-05-21 16:51:02 +02:00
|
|
|
try:
|
|
|
|
thisinfo['id'] = re.match(name_pat, line).group(1)
|
|
|
|
thisinfo['versioncode'] = int(re.match(vercode_pat, line).group(1))
|
|
|
|
thisinfo['version'] = re.match(vername_pat, line).group(1)
|
|
|
|
except Exception, e:
|
2014-07-01 20:32:49 +02:00
|
|
|
logging.error("Package matching failed: " + str(e))
|
2014-01-27 16:56:55 +01:00
|
|
|
logging.info("Line was: " + line)
|
2013-05-21 16:51:02 +02:00
|
|
|
sys.exit(1)
|
2013-05-16 19:47:38 +02:00
|
|
|
elif line.startswith("application:"):
|
|
|
|
thisinfo['name'] = re.match(label_pat, line).group(1)
|
2014-01-07 16:16:26 +01:00
|
|
|
# Keep path to non-dpi icon in case we need it
|
|
|
|
match = re.match(icon_pat_nodpi, line)
|
|
|
|
if match:
|
|
|
|
thisinfo['icons_src']['-1'] = match.group(1)
|
|
|
|
elif line.startswith("launchable-activity:"):
|
|
|
|
# Only use launchable-activity as fallback to application
|
2014-05-23 09:47:11 +02:00
|
|
|
if not thisinfo['name']:
|
|
|
|
thisinfo['name'] = re.match(label_pat, line).group(1)
|
2014-01-07 16:34:24 +01:00
|
|
|
if '-1' not in thisinfo['icons_src']:
|
2014-01-07 16:16:26 +01:00
|
|
|
match = re.match(icon_pat_nodpi, line)
|
|
|
|
if match:
|
|
|
|
thisinfo['icons_src']['-1'] = match.group(1)
|
2014-01-07 12:06:39 +01:00
|
|
|
elif line.startswith("application-icon-"):
|
2013-11-02 22:14:32 +01:00
|
|
|
match = re.match(icon_pat, line)
|
|
|
|
if match:
|
2014-01-07 12:06:39 +01:00
|
|
|
density = match.group(1)
|
|
|
|
path = match.group(2)
|
|
|
|
thisinfo['icons_src'][density] = path
|
2013-05-16 19:47:38 +02:00
|
|
|
elif line.startswith("sdkVersion:"):
|
2014-06-03 19:28:35 +02:00
|
|
|
m = re.match(sdkversion_pat, line)
|
|
|
|
if m is None:
|
|
|
|
logging.error(line.replace('sdkVersion:', '')
|
|
|
|
+ ' is not a valid minSdkVersion!')
|
|
|
|
else:
|
|
|
|
thisinfo['sdkversion'] = m.group(1)
|
2014-02-19 17:30:06 +01:00
|
|
|
elif line.startswith("maxSdkVersion:"):
|
|
|
|
thisinfo['maxsdkversion'] = re.match(sdkversion_pat, line).group(1)
|
2013-05-16 19:47:38 +02:00
|
|
|
elif line.startswith("native-code:"):
|
2013-07-22 16:12:20 +02:00
|
|
|
thisinfo['nativecode'] = []
|
|
|
|
for arch in line[13:].split(' '):
|
|
|
|
thisinfo['nativecode'].append(arch[1:-1])
|
2013-05-16 19:47:38 +02:00
|
|
|
elif line.startswith("uses-permission:"):
|
|
|
|
perm = re.match(string_pat, line).group(1)
|
2012-09-03 12:48:18 +02:00
|
|
|
if perm.startswith("android.permission."):
|
|
|
|
perm = perm[19:]
|
2014-08-05 23:04:21 +02:00
|
|
|
thisinfo['permissions'].add(perm)
|
2013-05-16 19:47:38 +02:00
|
|
|
elif line.startswith("uses-feature:"):
|
|
|
|
perm = re.match(string_pat, line).group(1)
|
2014-05-07 16:13:22 +02:00
|
|
|
# Filter out this, it's only added with the latest SDK tools and
|
|
|
|
# causes problems for lots of apps.
|
2014-05-06 19:50:52 +02:00
|
|
|
if perm != "android.hardware.screen.portrait" \
|
|
|
|
and perm != "android.hardware.screen.landscape":
|
2012-09-03 12:48:18 +02:00
|
|
|
if perm.startswith("android.feature."):
|
|
|
|
perm = perm[16:]
|
2014-08-05 23:04:21 +02:00
|
|
|
thisinfo['features'].add(perm)
|
2012-09-03 12:48:18 +02:00
|
|
|
|
2014-05-07 16:13:22 +02:00
|
|
|
if 'sdkversion' not in thisinfo:
|
2014-08-08 13:34:16 +02:00
|
|
|
logging.warn("No SDK version information found in {0}".format(apkfile))
|
2012-09-03 12:48:18 +02:00
|
|
|
thisinfo['sdkversion'] = 0
|
|
|
|
|
2013-04-15 14:04:13 +02:00
|
|
|
# Check for debuggable apks...
|
2013-10-31 16:37:39 +01:00
|
|
|
if common.isApkDebuggable(apkfile, config):
|
2014-08-08 13:34:16 +02:00
|
|
|
logging.warn('{0} is set to android:debuggable="true"'.format(apkfile))
|
2013-04-15 14:04:13 +02:00
|
|
|
|
2013-08-26 10:44:52 +02:00
|
|
|
# Calculate the sha256...
|
2012-09-03 12:48:18 +02:00
|
|
|
sha = hashlib.sha256()
|
|
|
|
with open(apkfile, 'rb') as f:
|
|
|
|
while True:
|
|
|
|
t = f.read(1024)
|
|
|
|
if len(t) == 0:
|
|
|
|
break
|
|
|
|
sha.update(t)
|
|
|
|
thisinfo['sha256'] = sha.hexdigest()
|
|
|
|
|
|
|
|
# Get the signature (or md5 of, to be precise)...
|
2014-08-30 04:53:55 +02:00
|
|
|
thisinfo['sig'] = getsig(os.path.join(os.getcwd(), apkfile))
|
|
|
|
if not thisinfo['sig']:
|
2014-01-27 16:56:55 +01:00
|
|
|
logging.critical("Failed to get apk signature")
|
2012-09-03 12:48:18 +02:00
|
|
|
sys.exit(1)
|
|
|
|
|
2014-01-07 16:17:38 +01:00
|
|
|
apk = zipfile.ZipFile(apkfile, 'r')
|
|
|
|
|
2014-01-07 12:06:39 +01:00
|
|
|
iconfilename = "%s.%s.png" % (
|
2014-05-06 19:50:52 +02:00
|
|
|
thisinfo['id'],
|
|
|
|
thisinfo['versioncode'])
|
2014-01-07 12:06:39 +01:00
|
|
|
|
2012-09-03 12:48:18 +02:00
|
|
|
# Extract the icon file...
|
2014-01-07 12:20:24 +01:00
|
|
|
densities = get_densities()
|
2014-01-07 12:06:39 +01:00
|
|
|
empty_densities = []
|
2014-01-07 12:20:24 +01:00
|
|
|
for density in densities:
|
2014-01-07 12:06:39 +01:00
|
|
|
if density not in thisinfo['icons_src']:
|
|
|
|
empty_densities.append(density)
|
|
|
|
continue
|
|
|
|
iconsrc = thisinfo['icons_src'][density]
|
|
|
|
icon_dir = get_icon_dir(repodir, density)
|
|
|
|
icondest = os.path.join(icon_dir, iconfilename)
|
|
|
|
|
2013-11-02 22:14:32 +01:00
|
|
|
try:
|
2014-01-07 12:06:39 +01:00
|
|
|
iconfile = open(icondest, 'wb')
|
|
|
|
iconfile.write(apk.read(iconsrc))
|
2013-11-02 22:14:32 +01:00
|
|
|
iconfile.close()
|
2014-05-02 04:26:54 +02:00
|
|
|
thisinfo['icons'][density] = iconfilename
|
2014-01-07 12:06:39 +01:00
|
|
|
|
2013-11-02 22:14:32 +01:00
|
|
|
except:
|
2014-02-07 21:38:28 +01:00
|
|
|
logging.warn("Error retrieving icon file")
|
2014-01-07 15:55:07 +01:00
|
|
|
del thisinfo['icons'][density]
|
2014-01-07 12:06:39 +01:00
|
|
|
del thisinfo['icons_src'][density]
|
|
|
|
empty_densities.append(density)
|
|
|
|
|
2014-01-07 16:34:24 +01:00
|
|
|
if '-1' in thisinfo['icons_src']:
|
2014-01-07 17:01:51 +01:00
|
|
|
iconsrc = thisinfo['icons_src']['-1']
|
|
|
|
iconpath = os.path.join(
|
2014-05-06 19:50:52 +02:00
|
|
|
get_icon_dir(repodir, None), iconfilename)
|
2014-01-07 17:01:51 +01:00
|
|
|
iconfile = open(iconpath, 'wb')
|
|
|
|
iconfile.write(apk.read(iconsrc))
|
|
|
|
iconfile.close()
|
2014-01-14 10:28:13 +01:00
|
|
|
try:
|
|
|
|
im = Image.open(iconpath)
|
|
|
|
dpi = px_to_dpi(im.size[0])
|
|
|
|
for density in densities:
|
|
|
|
if density in thisinfo['icons']:
|
|
|
|
break
|
|
|
|
if density == densities[-1] or dpi >= int(density):
|
|
|
|
thisinfo['icons'][density] = iconfilename
|
|
|
|
shutil.move(iconpath,
|
2014-05-06 19:50:52 +02:00
|
|
|
os.path.join(get_icon_dir(repodir, density), iconfilename))
|
2014-01-14 10:28:13 +01:00
|
|
|
empty_densities.remove(density)
|
|
|
|
break
|
2014-05-02 04:16:32 +02:00
|
|
|
except Exception, e:
|
2014-02-07 21:38:28 +01:00
|
|
|
logging.warn("Failed reading {0} - {1}".format(iconpath, e))
|
2014-01-07 16:34:24 +01:00
|
|
|
|
2014-01-07 19:22:07 +01:00
|
|
|
if thisinfo['icons']:
|
|
|
|
thisinfo['icon'] = iconfilename
|
|
|
|
|
2014-01-07 16:17:38 +01:00
|
|
|
apk.close()
|
|
|
|
|
2014-01-07 12:20:24 +01:00
|
|
|
# First try resizing down to not lose quality
|
2014-01-07 16:16:26 +01:00
|
|
|
last_density = None
|
2014-01-07 12:20:24 +01:00
|
|
|
for density in densities:
|
|
|
|
if density not in empty_densities:
|
|
|
|
last_density = density
|
|
|
|
continue
|
|
|
|
if last_density is None:
|
|
|
|
continue
|
2014-07-01 20:32:49 +02:00
|
|
|
logging.debug("Density %s not available, resizing down from %s"
|
|
|
|
% (density, last_density))
|
2014-01-07 12:20:24 +01:00
|
|
|
|
|
|
|
last_iconpath = os.path.join(
|
2014-05-06 19:50:52 +02:00
|
|
|
get_icon_dir(repodir, last_density), iconfilename)
|
2014-01-07 12:20:24 +01:00
|
|
|
iconpath = os.path.join(
|
2014-05-06 19:50:52 +02:00
|
|
|
get_icon_dir(repodir, density), iconfilename)
|
2014-01-24 12:55:43 +01:00
|
|
|
try:
|
|
|
|
im = Image.open(last_iconpath)
|
|
|
|
except:
|
2014-02-07 21:38:28 +01:00
|
|
|
logging.warn("Invalid image file at %s" % last_iconpath)
|
2014-01-24 12:55:43 +01:00
|
|
|
continue
|
|
|
|
|
2014-01-07 17:01:51 +01:00
|
|
|
size = dpi_to_px(density)
|
2014-01-07 12:20:24 +01:00
|
|
|
|
|
|
|
im.thumbnail((size, size), Image.ANTIALIAS)
|
|
|
|
im.save(iconpath, "PNG")
|
|
|
|
empty_densities.remove(density)
|
|
|
|
|
|
|
|
# Then just copy from the highest resolution available
|
2014-01-07 16:16:26 +01:00
|
|
|
last_density = None
|
2014-01-07 12:20:24 +01:00
|
|
|
for density in reversed(densities):
|
|
|
|
if density not in empty_densities:
|
|
|
|
last_density = density
|
|
|
|
continue
|
|
|
|
if last_density is None:
|
|
|
|
continue
|
2014-07-01 20:32:49 +02:00
|
|
|
logging.debug("Density %s not available, copying from lower density %s"
|
|
|
|
% (density, last_density))
|
2014-01-07 12:20:24 +01:00
|
|
|
|
|
|
|
shutil.copyfile(
|
2014-05-06 19:50:52 +02:00
|
|
|
os.path.join(get_icon_dir(repodir, last_density), iconfilename),
|
|
|
|
os.path.join(get_icon_dir(repodir, density), iconfilename))
|
2014-01-07 12:20:24 +01:00
|
|
|
|
|
|
|
empty_densities.remove(density)
|
2014-01-07 12:06:39 +01:00
|
|
|
|
2014-01-07 17:01:51 +01:00
|
|
|
for density in densities:
|
|
|
|
icon_dir = get_icon_dir(repodir, density)
|
|
|
|
icondest = os.path.join(icon_dir, iconfilename)
|
|
|
|
resize_icon(icondest, density)
|
|
|
|
|
2014-01-07 12:06:39 +01:00
|
|
|
# Copy from icons-mdpi to icons since mdpi is the baseline density
|
2014-01-07 17:02:59 +01:00
|
|
|
baseline = os.path.join(get_icon_dir(repodir, '160'), iconfilename)
|
|
|
|
if os.path.isfile(baseline):
|
|
|
|
shutil.copyfile(baseline,
|
2014-05-06 19:50:52 +02:00
|
|
|
os.path.join(get_icon_dir(repodir, None), iconfilename))
|
2013-08-19 11:20:50 +02:00
|
|
|
|
2012-09-03 12:48:18 +02:00
|
|
|
# Record in known apks, getting the added date at the same time..
|
|
|
|
added = knownapks.recordapk(thisinfo['apkname'], thisinfo['id'])
|
|
|
|
if added:
|
|
|
|
thisinfo['added'] = added
|
|
|
|
|
|
|
|
apkcache[apkfilename] = thisinfo
|
|
|
|
cachechanged = True
|
2012-07-12 22:48:59 +02:00
|
|
|
|
2012-02-26 15:18:58 +01:00
|
|
|
apks.append(thisinfo)
|
2010-12-17 13:31:04 +01:00
|
|
|
|
2013-05-09 21:09:17 +02:00
|
|
|
return apks, cachechanged
|
2012-09-03 12:48:18 +02:00
|
|
|
|
2012-02-26 15:18:58 +01:00
|
|
|
|
2013-10-31 23:16:05 +01:00
|
|
|
repo_pubkey_fingerprint = None
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-08-16 12:46:02 +02:00
|
|
|
def make_index(apps, sortedids, apks, repodir, archive, categories):
|
2013-05-09 21:09:17 +02:00
|
|
|
"""Make a repo index.
|
2012-07-12 22:48:59 +02:00
|
|
|
|
2013-05-09 21:09:17 +02:00
|
|
|
:param apps: fully populated apps list
|
|
|
|
:param apks: full populated apks list
|
|
|
|
:param repodir: the repo directory
|
|
|
|
:param archive: True if this is the archive repo, False if it's the
|
|
|
|
main one.
|
|
|
|
:param categories: list of categories
|
|
|
|
"""
|
2010-10-22 21:38:54 +02:00
|
|
|
|
2012-02-26 15:18:58 +01:00
|
|
|
doc = Document()
|
|
|
|
|
|
|
|
def addElement(name, value, doc, parent):
|
|
|
|
el = doc.createElement(name)
|
|
|
|
el.appendChild(doc.createTextNode(value))
|
|
|
|
parent.appendChild(el)
|
2014-05-02 05:41:44 +02:00
|
|
|
|
2012-09-17 22:49:56 +02:00
|
|
|
def addElementCDATA(name, value, doc, parent):
|
|
|
|
el = doc.createElement(name)
|
|
|
|
el.appendChild(doc.createCDATASection(value))
|
|
|
|
parent.appendChild(el)
|
2012-02-26 15:18:58 +01:00
|
|
|
|
|
|
|
root = doc.createElement("fdroid")
|
|
|
|
doc.appendChild(root)
|
|
|
|
|
|
|
|
repoel = doc.createElement("repo")
|
2014-01-03 20:49:20 +01:00
|
|
|
|
2013-05-09 21:09:17 +02:00
|
|
|
if archive:
|
2013-10-31 16:37:39 +01:00
|
|
|
repoel.setAttribute("name", config['archive_name'])
|
2013-11-26 23:07:47 +01:00
|
|
|
if config['repo_maxage'] != 0:
|
|
|
|
repoel.setAttribute("maxage", str(config['repo_maxage']))
|
2013-10-31 16:37:39 +01:00
|
|
|
repoel.setAttribute("icon", os.path.basename(config['archive_icon']))
|
|
|
|
repoel.setAttribute("url", config['archive_url'])
|
|
|
|
addElement('description', config['archive_description'], doc, repoel)
|
2014-01-03 20:49:20 +01:00
|
|
|
|
2013-05-09 21:09:17 +02:00
|
|
|
else:
|
2013-10-31 16:37:39 +01:00
|
|
|
repoel.setAttribute("name", config['repo_name'])
|
2013-11-26 23:07:47 +01:00
|
|
|
if config['repo_maxage'] != 0:
|
|
|
|
repoel.setAttribute("maxage", str(config['repo_maxage']))
|
2013-10-31 16:37:39 +01:00
|
|
|
repoel.setAttribute("icon", os.path.basename(config['repo_icon']))
|
|
|
|
repoel.setAttribute("url", config['repo_url'])
|
|
|
|
addElement('description', config['repo_description'], doc, repoel)
|
2014-01-03 20:49:20 +01:00
|
|
|
|
2014-02-19 17:30:06 +01:00
|
|
|
repoel.setAttribute("version", "12")
|
2013-12-11 12:22:12 +01:00
|
|
|
repoel.setAttribute("timestamp", str(int(time.time())))
|
2012-02-26 15:18:58 +01:00
|
|
|
|
2014-04-04 03:44:40 +02:00
|
|
|
if 'repo_keyalias' in config:
|
2012-02-26 15:18:58 +01:00
|
|
|
|
|
|
|
# Generate a certificate fingerprint the same way keytool does it
|
|
|
|
# (but with slightly different formatting)
|
|
|
|
def cert_fingerprint(data):
|
2014-04-26 03:07:47 +02:00
|
|
|
digest = hashlib.sha256(data).digest()
|
2012-02-26 15:18:58 +01:00
|
|
|
ret = []
|
2014-04-26 03:07:47 +02:00
|
|
|
ret.append(' '.join("%02X" % ord(b) for b in digest))
|
2012-02-26 15:18:58 +01:00
|
|
|
return " ".join(ret)
|
|
|
|
|
|
|
|
def extract_pubkey():
|
2014-01-27 16:22:25 +01:00
|
|
|
p = FDroidPopen(['keytool', '-exportcert',
|
2014-05-06 19:50:52 +02:00
|
|
|
'-alias', config['repo_keyalias'],
|
|
|
|
'-keystore', config['keystore'],
|
|
|
|
'-storepass:file', config['keystorepassfile']]
|
2014-06-25 12:43:23 +02:00
|
|
|
+ config['smartcardoptions'], output=False)
|
2012-02-26 15:18:58 +01:00
|
|
|
if p.returncode != 0:
|
2014-04-17 20:39:47 +02:00
|
|
|
msg = "Failed to get repo pubkey!"
|
|
|
|
if config['keystore'] == 'NONE':
|
|
|
|
msg += ' Is your crypto smartcard plugged in?'
|
|
|
|
logging.critical(msg)
|
2012-02-26 15:18:58 +01:00
|
|
|
sys.exit(1)
|
|
|
|
global repo_pubkey_fingerprint
|
2014-07-01 18:04:41 +02:00
|
|
|
repo_pubkey_fingerprint = cert_fingerprint(p.output)
|
|
|
|
return "".join("%02x" % ord(b) for b in p.output)
|
2012-02-26 15:18:58 +01:00
|
|
|
|
|
|
|
repoel.setAttribute("pubkey", extract_pubkey())
|
|
|
|
|
|
|
|
root.appendChild(repoel)
|
|
|
|
|
2014-08-16 12:46:02 +02:00
|
|
|
for appid in sortedids:
|
|
|
|
app = apps[appid]
|
2010-10-22 21:38:54 +02:00
|
|
|
|
2013-10-10 16:45:17 +02:00
|
|
|
if app['Disabled'] is not None:
|
|
|
|
continue
|
2012-02-26 15:18:58 +01:00
|
|
|
|
2013-10-10 16:45:17 +02:00
|
|
|
# Get a list of the apks for this app...
|
|
|
|
apklist = []
|
|
|
|
for apk in apks:
|
2014-08-16 12:46:02 +02:00
|
|
|
if apk['id'] == appid:
|
2013-10-10 16:45:17 +02:00
|
|
|
apklist.append(apk)
|
|
|
|
|
|
|
|
if len(apklist) == 0:
|
|
|
|
continue
|
|
|
|
|
|
|
|
apel = doc.createElement("application")
|
|
|
|
apel.setAttribute("id", app['id'])
|
|
|
|
root.appendChild(apel)
|
|
|
|
|
|
|
|
addElement('id', app['id'], doc, apel)
|
|
|
|
if 'added' in app:
|
|
|
|
addElement('added', time.strftime('%Y-%m-%d', app['added']), doc, apel)
|
|
|
|
if 'lastupdated' in app:
|
|
|
|
addElement('lastupdated', time.strftime('%Y-%m-%d', app['lastupdated']), doc, apel)
|
|
|
|
addElement('name', app['Name'], doc, apel)
|
|
|
|
addElement('summary', app['Summary'], doc, apel)
|
2013-12-31 11:57:35 +01:00
|
|
|
if app['icon']:
|
2013-11-02 22:14:32 +01:00
|
|
|
addElement('icon', app['icon'], doc, apel)
|
2014-05-02 05:41:44 +02:00
|
|
|
|
2014-08-16 12:46:02 +02:00
|
|
|
def linkres(appid):
|
|
|
|
if appid in apps:
|
2014-08-28 15:36:09 +02:00
|
|
|
return ("fdroid.app:" + appid, apps[appid]['Name'])
|
2014-08-16 12:46:02 +02:00
|
|
|
raise MetaDataException("Cannot resolve app id " + appid)
|
|
|
|
|
2013-12-30 17:04:16 +01:00
|
|
|
addElement('desc',
|
2014-05-06 19:50:52 +02:00
|
|
|
metadata.description_html(app['Description'], linkres),
|
|
|
|
doc, apel)
|
2013-10-10 16:45:17 +02:00
|
|
|
addElement('license', app['License'], doc, apel)
|
2013-11-02 01:14:01 +01:00
|
|
|
if 'Categories' in app:
|
2014-02-19 10:21:13 +01:00
|
|
|
addElement('categories', ','.join(app["Categories"]), doc, apel)
|
2013-10-10 16:45:17 +02:00
|
|
|
# We put the first (primary) category in LAST, which will have
|
|
|
|
# the desired effect of making clients that only understand one
|
|
|
|
# category see that one.
|
2014-02-19 10:21:13 +01:00
|
|
|
addElement('category', app["Categories"][0], doc, apel)
|
2013-10-10 16:45:17 +02:00
|
|
|
addElement('web', app['Web Site'], doc, apel)
|
|
|
|
addElement('source', app['Source Code'], doc, apel)
|
|
|
|
addElement('tracker', app['Issue Tracker'], doc, apel)
|
2013-12-31 11:57:35 +01:00
|
|
|
if app['Donate']:
|
2013-10-10 16:45:17 +02:00
|
|
|
addElement('donate', app['Donate'], doc, apel)
|
2013-12-31 11:57:35 +01:00
|
|
|
if app['Bitcoin']:
|
2013-10-10 16:45:17 +02:00
|
|
|
addElement('bitcoin', app['Bitcoin'], doc, apel)
|
2013-12-31 11:57:35 +01:00
|
|
|
if app['Litecoin']:
|
2013-10-13 00:03:11 +02:00
|
|
|
addElement('litecoin', app['Litecoin'], doc, apel)
|
2013-12-31 11:57:35 +01:00
|
|
|
if app['Dogecoin']:
|
|
|
|
addElement('dogecoin', app['Dogecoin'], doc, apel)
|
|
|
|
if app['FlattrID']:
|
2013-10-10 16:45:17 +02:00
|
|
|
addElement('flattr', app['FlattrID'], doc, apel)
|
|
|
|
|
|
|
|
# These elements actually refer to the current version (i.e. which
|
|
|
|
# one is recommended. They are historically mis-named, and need
|
|
|
|
# changing, but stay like this for now to support existing clients.
|
|
|
|
addElement('marketversion', app['Current Version'], doc, apel)
|
|
|
|
addElement('marketvercode', app['Current Version Code'], doc, apel)
|
|
|
|
|
|
|
|
if app['AntiFeatures']:
|
2013-11-27 11:39:36 +01:00
|
|
|
af = app['AntiFeatures'].split(',')
|
|
|
|
# TODO: Temporarily not including UpstreamNonFree in the index,
|
|
|
|
# because current F-Droid clients do not understand it, and also
|
|
|
|
# look ugly when they encounter an unknown antifeature. This
|
|
|
|
# filtering can be removed in time...
|
|
|
|
if 'UpstreamNonFree' in af:
|
|
|
|
af.remove('UpstreamNonFree')
|
2013-12-23 12:04:50 +01:00
|
|
|
if af:
|
|
|
|
addElement('antifeatures', ','.join(af), doc, apel)
|
2013-12-31 10:47:50 +01:00
|
|
|
if app['Provides']:
|
|
|
|
pv = app['Provides'].split(',')
|
|
|
|
addElement('provides', ','.join(pv), doc, apel)
|
2013-10-25 17:19:23 +02:00
|
|
|
if app['Requires Root']:
|
|
|
|
addElement('requirements', 'root', doc, apel)
|
2013-10-10 16:45:17 +02:00
|
|
|
|
|
|
|
# Sort the apk list into version order, just so the web site
|
|
|
|
# doesn't have to do any work by default...
|
|
|
|
apklist = sorted(apklist, key=lambda apk: apk['versioncode'], reverse=True)
|
|
|
|
|
|
|
|
# Check for duplicates - they will make the client unhappy...
|
|
|
|
for i in range(len(apklist) - 1):
|
2014-05-06 19:56:44 +02:00
|
|
|
if apklist[i]['versioncode'] == apklist[i + 1]['versioncode']:
|
2014-01-27 16:56:55 +01:00
|
|
|
logging.critical("duplicate versions: '%s' - '%s'" % (
|
2014-05-06 19:56:44 +02:00
|
|
|
apklist[i]['apkname'], apklist[i + 1]['apkname']))
|
2013-10-10 16:45:17 +02:00
|
|
|
sys.exit(1)
|
|
|
|
|
static URLs to "Current Version" of each app
I wrote up the feature to automatically generate symlinks with a constant name
that points to the current release version. I have it on by default, with a
*config.py* option to turn it off. There is also an option to set where the
symlink name comes from which defaults to app['Name'] i.e. F-Droid.apk, but
can easily be set to app['id'], i.e. _org.fdroid.fdroid.apk_. I think the best
place for the symlinks is in the root of the repo, so like
https://f-droid.org/F-Droid.apk or https://guardianproject.info/fdroid/ChatSecure.apk
For the case of the current FDroid static link https://f-droid.org/FDroid.apk
it can just be a symlink to the generated one (https://f-droid.org/F-Droid.apk
or https://f-droid.org/org.fdroid.fdroid.apk). Right now, this feature is all
or nothing, meaning it generates symlinks for all apps in the repo, or none. I
can’t think of any problems that this might cause since its only symlinks, so
the amount of disk space is tiny. Also, I think it would be useful for having
an easy “Download this app” button on each app’s page on the “Browse” view. As
long as this button is less prominent than the “Download F-Droid” button, and
it is clear that it is better to use the FDroid app than doing direct
downloads. For the f-droid.org repo, the symlinks should probably be based on
app['id'] to prevent name conflicts.
more info here:
https://f-droid.org/forums/topic/static-urls-to-current-version-of-each-app/
2014-10-11 02:47:21 +02:00
|
|
|
current_version_code = 0
|
|
|
|
current_version_file = None
|
2013-10-10 16:45:17 +02:00
|
|
|
for apk in apklist:
|
static URLs to "Current Version" of each app
I wrote up the feature to automatically generate symlinks with a constant name
that points to the current release version. I have it on by default, with a
*config.py* option to turn it off. There is also an option to set where the
symlink name comes from which defaults to app['Name'] i.e. F-Droid.apk, but
can easily be set to app['id'], i.e. _org.fdroid.fdroid.apk_. I think the best
place for the symlinks is in the root of the repo, so like
https://f-droid.org/F-Droid.apk or https://guardianproject.info/fdroid/ChatSecure.apk
For the case of the current FDroid static link https://f-droid.org/FDroid.apk
it can just be a symlink to the generated one (https://f-droid.org/F-Droid.apk
or https://f-droid.org/org.fdroid.fdroid.apk). Right now, this feature is all
or nothing, meaning it generates symlinks for all apps in the repo, or none. I
can’t think of any problems that this might cause since its only symlinks, so
the amount of disk space is tiny. Also, I think it would be useful for having
an easy “Download this app” button on each app’s page on the “Browse” view. As
long as this button is less prominent than the “Download F-Droid” button, and
it is clear that it is better to use the FDroid app than doing direct
downloads. For the f-droid.org repo, the symlinks should probably be based on
app['id'] to prevent name conflicts.
more info here:
https://f-droid.org/forums/topic/static-urls-to-current-version-of-each-app/
2014-10-11 02:47:21 +02:00
|
|
|
# find the APK for the "Current Version"
|
|
|
|
if current_version_code < apk['versioncode']:
|
|
|
|
current_version_code = apk['versioncode']
|
|
|
|
if current_version_code < int(app['Current Version Code']):
|
|
|
|
current_version_file = apk['apkname']
|
|
|
|
|
2013-10-10 16:45:17 +02:00
|
|
|
apkel = doc.createElement("package")
|
|
|
|
apel.appendChild(apkel)
|
|
|
|
addElement('version', apk['version'], doc, apkel)
|
|
|
|
addElement('versioncode', str(apk['versioncode']), doc, apkel)
|
|
|
|
addElement('apkname', apk['apkname'], doc, apkel)
|
|
|
|
if 'srcname' in apk:
|
|
|
|
addElement('srcname', apk['srcname'], doc, apkel)
|
|
|
|
for hash_type in ['sha256']:
|
2014-05-07 16:13:22 +02:00
|
|
|
if hash_type not in apk:
|
2013-10-10 16:45:17 +02:00
|
|
|
continue
|
|
|
|
hashel = doc.createElement("hash")
|
|
|
|
hashel.setAttribute("type", hash_type)
|
|
|
|
hashel.appendChild(doc.createTextNode(apk[hash_type]))
|
|
|
|
apkel.appendChild(hashel)
|
|
|
|
addElement('sig', apk['sig'], doc, apkel)
|
|
|
|
addElement('size', str(apk['size']), doc, apkel)
|
|
|
|
addElement('sdkver', str(apk['sdkversion']), doc, apkel)
|
2014-02-19 17:30:06 +01:00
|
|
|
if 'maxsdkversion' in apk:
|
|
|
|
addElement('maxsdkver', str(apk['maxsdkversion']), doc, apkel)
|
2013-10-10 16:45:17 +02:00
|
|
|
if 'added' in apk:
|
|
|
|
addElement('added', time.strftime('%Y-%m-%d', apk['added']), doc, apkel)
|
2013-10-23 01:00:21 +02:00
|
|
|
if app['Requires Root']:
|
|
|
|
if 'ACCESS_SUPERUSER' not in apk['permissions']:
|
2014-08-08 13:34:37 +02:00
|
|
|
apk['permissions'].add('ACCESS_SUPERUSER')
|
2013-12-30 17:04:16 +01:00
|
|
|
|
2013-10-10 16:45:17 +02:00
|
|
|
if len(apk['permissions']) > 0:
|
|
|
|
addElement('permissions', ','.join(apk['permissions']), doc, apkel)
|
|
|
|
if 'nativecode' in apk and len(apk['nativecode']) > 0:
|
|
|
|
addElement('nativecode', ','.join(apk['nativecode']), doc, apkel)
|
|
|
|
if len(apk['features']) > 0:
|
|
|
|
addElement('features', ','.join(apk['features']), doc, apkel)
|
2012-02-26 15:18:58 +01:00
|
|
|
|
static URLs to "Current Version" of each app
I wrote up the feature to automatically generate symlinks with a constant name
that points to the current release version. I have it on by default, with a
*config.py* option to turn it off. There is also an option to set where the
symlink name comes from which defaults to app['Name'] i.e. F-Droid.apk, but
can easily be set to app['id'], i.e. _org.fdroid.fdroid.apk_. I think the best
place for the symlinks is in the root of the repo, so like
https://f-droid.org/F-Droid.apk or https://guardianproject.info/fdroid/ChatSecure.apk
For the case of the current FDroid static link https://f-droid.org/FDroid.apk
it can just be a symlink to the generated one (https://f-droid.org/F-Droid.apk
or https://f-droid.org/org.fdroid.fdroid.apk). Right now, this feature is all
or nothing, meaning it generates symlinks for all apps in the repo, or none. I
can’t think of any problems that this might cause since its only symlinks, so
the amount of disk space is tiny. Also, I think it would be useful for having
an easy “Download this app” button on each app’s page on the “Browse” view. As
long as this button is less prominent than the “Download F-Droid” button, and
it is clear that it is better to use the FDroid app than doing direct
downloads. For the f-droid.org repo, the symlinks should probably be based on
app['id'] to prevent name conflicts.
more info here:
https://f-droid.org/forums/topic/static-urls-to-current-version-of-each-app/
2014-10-11 02:47:21 +02:00
|
|
|
if current_version_file is not None \
|
|
|
|
and config['make_current_version_link'] \
|
|
|
|
and repodir == 'repo': # only create these
|
|
|
|
apklinkname = app[config['current_version_name_source']] + '.apk'
|
|
|
|
if os.path.exists(apklinkname):
|
|
|
|
os.remove(apklinkname)
|
|
|
|
os.symlink(os.path.join(repodir, current_version_file), apklinkname)
|
|
|
|
|
2013-05-09 21:09:17 +02:00
|
|
|
of = open(os.path.join(repodir, 'index.xml'), 'wb')
|
2012-02-26 15:18:58 +01:00
|
|
|
if options.pretty:
|
|
|
|
output = doc.toprettyxml()
|
2010-10-22 21:38:54 +02:00
|
|
|
else:
|
2012-02-26 15:18:58 +01:00
|
|
|
output = doc.toxml()
|
|
|
|
of.write(output)
|
|
|
|
of.close()
|
|
|
|
|
2014-04-04 03:44:40 +02:00
|
|
|
if 'repo_keyalias' in config:
|
2012-02-26 15:18:58 +01:00
|
|
|
|
2014-06-02 22:12:50 +02:00
|
|
|
logging.info("Creating signed index with this key (SHA256):")
|
|
|
|
logging.info("%s" % repo_pubkey_fingerprint)
|
2013-12-30 17:04:16 +01:00
|
|
|
|
2014-05-07 16:13:22 +02:00
|
|
|
# Create a jar of the index...
|
2014-01-27 16:22:25 +01:00
|
|
|
p = FDroidPopen(['jar', 'cf', 'index.jar', 'index.xml'], cwd=repodir)
|
2012-02-26 15:18:58 +01:00
|
|
|
if p.returncode != 0:
|
2014-01-27 16:56:55 +01:00
|
|
|
logging.critical("Failed to create jar file")
|
2012-02-26 15:18:58 +01:00
|
|
|
sys.exit(1)
|
2011-01-29 10:32:21 +01:00
|
|
|
|
2012-02-26 15:18:58 +01:00
|
|
|
# Sign the index...
|
2014-04-04 06:05:22 +02:00
|
|
|
args = ['jarsigner', '-keystore', config['keystore'],
|
|
|
|
'-storepass:file', config['keystorepassfile'],
|
|
|
|
'-digestalg', 'SHA1', '-sigalg', 'MD5withRSA',
|
|
|
|
os.path.join(repodir, 'index.jar'), config['repo_keyalias']]
|
|
|
|
if config['keystore'] == 'NONE':
|
|
|
|
args += config['smartcardoptions']
|
|
|
|
else: # smardcards never use -keypass
|
|
|
|
args += ['-keypass:file', config['keypassfile']]
|
|
|
|
p = FDroidPopen(args)
|
2014-04-01 23:37:37 +02:00
|
|
|
# TODO keypass should be sent via stdin
|
2012-02-26 15:18:58 +01:00
|
|
|
if p.returncode != 0:
|
2014-07-01 20:32:49 +02:00
|
|
|
logging.critical("Failed to sign index")
|
2012-02-26 15:18:58 +01:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
# Copy the repo icon into the repo directory...
|
2014-05-02 04:06:59 +02:00
|
|
|
icon_dir = os.path.join(repodir, 'icons')
|
2013-10-31 16:37:39 +01:00
|
|
|
iconfilename = os.path.join(icon_dir, os.path.basename(config['repo_icon']))
|
|
|
|
shutil.copyfile(config['repo_icon'], iconfilename)
|
2012-02-26 15:18:58 +01:00
|
|
|
|
|
|
|
# Write a category list in the repo to allow quick access...
|
|
|
|
catdata = ''
|
|
|
|
for cat in categories:
|
|
|
|
catdata += cat + '\n'
|
2013-05-09 21:09:17 +02:00
|
|
|
f = open(os.path.join(repodir, 'categories.txt'), 'w')
|
2012-02-26 15:18:58 +01:00
|
|
|
f.write(catdata)
|
|
|
|
f.close()
|
|
|
|
|
2013-05-09 21:09:17 +02:00
|
|
|
|
2014-01-09 13:33:48 +01:00
|
|
|
def archive_old_apks(apps, apks, archapks, repodir, archivedir, defaultkeepversions):
|
2013-05-09 21:09:17 +02:00
|
|
|
|
2014-08-16 12:46:02 +02:00
|
|
|
for appid, app in apps.iteritems():
|
2013-05-09 21:09:17 +02:00
|
|
|
|
|
|
|
# Get a list of the apks for this app...
|
|
|
|
apklist = []
|
|
|
|
for apk in apks:
|
2014-08-16 12:46:02 +02:00
|
|
|
if apk['id'] == appid:
|
2013-05-09 21:09:17 +02:00
|
|
|
apklist.append(apk)
|
|
|
|
|
|
|
|
# Sort the apk list into version order...
|
|
|
|
apklist = sorted(apklist, key=lambda apk: apk['versioncode'], reverse=True)
|
|
|
|
|
2013-10-14 17:16:34 +02:00
|
|
|
if app['Archive Policy']:
|
|
|
|
keepversions = int(app['Archive Policy'][:-9])
|
|
|
|
else:
|
|
|
|
keepversions = defaultkeepversions
|
|
|
|
|
2013-05-09 21:09:17 +02:00
|
|
|
if len(apklist) > keepversions:
|
|
|
|
for apk in apklist[keepversions:]:
|
2014-01-27 16:56:55 +01:00
|
|
|
logging.info("Moving " + apk['apkname'] + " to archive")
|
2013-05-09 21:09:17 +02:00
|
|
|
shutil.move(os.path.join(repodir, apk['apkname']),
|
2014-05-06 19:50:52 +02:00
|
|
|
os.path.join(archivedir, apk['apkname']))
|
2013-05-09 21:09:17 +02:00
|
|
|
if 'srcname' in apk:
|
|
|
|
shutil.move(os.path.join(repodir, apk['srcname']),
|
2014-05-06 19:50:52 +02:00
|
|
|
os.path.join(archivedir, apk['srcname']))
|
2014-07-01 03:47:47 +02:00
|
|
|
# Move GPG signature too...
|
|
|
|
sigfile = apk['srcname'] + '.asc'
|
|
|
|
sigsrc = os.path.join(repodir, sigfile)
|
|
|
|
if os.path.exists(sigsrc):
|
|
|
|
shutil.move(sigsrc, os.path.join(archivedir, sigfile))
|
2014-06-30 23:53:02 +02:00
|
|
|
|
2014-01-09 13:33:48 +01:00
|
|
|
archapks.append(apk)
|
2013-05-09 21:09:17 +02:00
|
|
|
apks.remove(apk)
|
|
|
|
|
|
|
|
|
2013-11-01 12:10:57 +01:00
|
|
|
config = None
|
2013-10-31 16:37:39 +01:00
|
|
|
options = None
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-05-09 21:09:17 +02:00
|
|
|
def main():
|
|
|
|
|
2013-11-01 12:10:57 +01:00
|
|
|
global config, options
|
2013-05-09 21:09:17 +02:00
|
|
|
|
|
|
|
# Parse command line...
|
|
|
|
parser = OptionParser()
|
2014-05-30 23:07:19 +02:00
|
|
|
parser.add_option("-c", "--create-metadata", action="store_true", default=False,
|
2013-05-09 21:09:17 +02:00
|
|
|
help="Create skeleton metadata files that are missing")
|
2014-05-29 19:40:06 +02:00
|
|
|
parser.add_option("--delete-unknown", action="store_true", default=False,
|
|
|
|
help="Delete APKs without metadata from the repo")
|
2013-05-09 21:09:17 +02:00
|
|
|
parser.add_option("-v", "--verbose", action="store_true", default=False,
|
|
|
|
help="Spew out even more information than normal")
|
|
|
|
parser.add_option("-q", "--quiet", action="store_true", default=False,
|
2014-02-22 10:46:24 +01:00
|
|
|
help="Restrict output to warnings and errors")
|
2013-05-09 21:09:17 +02:00
|
|
|
parser.add_option("-b", "--buildreport", action="store_true", default=False,
|
|
|
|
help="Report on build data status")
|
|
|
|
parser.add_option("-i", "--interactive", default=False, action="store_true",
|
|
|
|
help="Interactively ask about things that need updating.")
|
2013-08-19 11:30:54 +02:00
|
|
|
parser.add_option("-I", "--icons", action="store_true", default=False,
|
|
|
|
help="Resize all the icons exceeding the max pixel size and exit")
|
2013-05-09 21:09:17 +02:00
|
|
|
parser.add_option("-e", "--editor", default="/etc/alternatives/editor",
|
2014-05-02 04:30:44 +02:00
|
|
|
help="Specify editor to use in interactive mode. Default " +
|
2014-05-06 19:50:52 +02:00
|
|
|
"is /etc/alternatives/editor")
|
2013-05-09 21:09:17 +02:00
|
|
|
parser.add_option("-w", "--wiki", default=False, action="store_true",
|
|
|
|
help="Update the wiki")
|
|
|
|
parser.add_option("", "--pretty", action="store_true", default=False,
|
|
|
|
help="Produce human-readable index.xml")
|
|
|
|
parser.add_option("--clean", action="store_true", default=False,
|
|
|
|
help="Clean update - don't uses caches, reprocess all apks")
|
|
|
|
(options, args) = parser.parse_args()
|
|
|
|
|
2013-11-01 12:10:57 +01:00
|
|
|
config = common.read_config(options)
|
|
|
|
|
2013-08-19 11:30:54 +02:00
|
|
|
repodirs = ['repo']
|
2013-10-31 16:37:39 +01:00
|
|
|
if config['archive_older'] != 0:
|
2013-08-19 11:30:54 +02:00
|
|
|
repodirs.append('archive')
|
|
|
|
if not os.path.exists('archive'):
|
|
|
|
os.mkdir('archive')
|
|
|
|
|
|
|
|
if options.icons:
|
|
|
|
resize_all_icons(repodirs)
|
|
|
|
sys.exit(0)
|
|
|
|
|
2014-06-30 18:19:47 +02:00
|
|
|
# check that icons exist now, rather than fail at the end of `fdroid update`
|
|
|
|
for k in ['repo_icon', 'archive_icon']:
|
|
|
|
if k in config:
|
|
|
|
if not os.path.exists(config[k]):
|
2014-07-01 20:32:49 +02:00
|
|
|
logging.critical(k + ' "' + config[k] + '" does not exist! Correct it in config.py.')
|
2014-06-30 18:19:47 +02:00
|
|
|
sys.exit(1)
|
|
|
|
|
2013-05-09 21:09:17 +02:00
|
|
|
# Get all apps...
|
2013-11-19 15:35:16 +01:00
|
|
|
apps = metadata.read_metadata()
|
2013-05-09 21:09:17 +02:00
|
|
|
|
|
|
|
# Generate a list of categories...
|
2014-02-19 10:21:13 +01:00
|
|
|
categories = set()
|
2014-08-16 12:46:02 +02:00
|
|
|
for app in apps.itervalues():
|
2014-02-19 10:21:13 +01:00
|
|
|
categories.update(app['Categories'])
|
2013-05-09 21:09:17 +02:00
|
|
|
|
|
|
|
# Read known apks data (will be updated and written back when we've finished)
|
|
|
|
knownapks = common.KnownApks()
|
|
|
|
|
|
|
|
# Gather information about all the apk files in the repo directory, using
|
|
|
|
# cached data if possible.
|
|
|
|
apkcachefile = os.path.join('tmp', 'apkcache')
|
|
|
|
if not options.clean and os.path.exists(apkcachefile):
|
|
|
|
with open(apkcachefile, 'rb') as cf:
|
|
|
|
apkcache = pickle.load(cf)
|
|
|
|
else:
|
|
|
|
apkcache = {}
|
|
|
|
cachechanged = False
|
|
|
|
|
|
|
|
delete_disabled_builds(apps, apkcache, repodirs)
|
|
|
|
|
2013-11-27 10:35:43 +01:00
|
|
|
# Scan all apks in the main repo
|
2013-05-09 21:09:17 +02:00
|
|
|
apks, cc = scan_apks(apps, apkcache, repodirs[0], knownapks)
|
|
|
|
if cc:
|
|
|
|
cachechanged = True
|
|
|
|
|
2014-05-29 21:29:57 +02:00
|
|
|
# Generate warnings for apk's with no metadata (or create skeleton
|
|
|
|
# metadata files, if requested on the command line)
|
|
|
|
newmetadata = False
|
|
|
|
for apk in apks:
|
2014-08-16 12:46:02 +02:00
|
|
|
if apk['id'] not in apps:
|
2014-05-30 23:07:19 +02:00
|
|
|
if options.create_metadata:
|
2014-06-03 20:18:08 +02:00
|
|
|
if 'name' not in apk:
|
|
|
|
logging.error(apk['id'] + ' does not have a name! Skipping...')
|
|
|
|
continue
|
2014-05-29 21:29:57 +02:00
|
|
|
f = open(os.path.join('metadata', apk['id'] + '.txt'), 'w')
|
|
|
|
f.write("License:Unknown\n")
|
|
|
|
f.write("Web Site:\n")
|
|
|
|
f.write("Source Code:\n")
|
|
|
|
f.write("Issue Tracker:\n")
|
|
|
|
f.write("Summary:" + apk['name'] + "\n")
|
|
|
|
f.write("Description:\n")
|
|
|
|
f.write(apk['name'] + "\n")
|
|
|
|
f.write(".\n")
|
|
|
|
f.close()
|
|
|
|
logging.info("Generated skeleton metadata for " + apk['id'])
|
|
|
|
newmetadata = True
|
|
|
|
else:
|
|
|
|
msg = apk['apkname'] + " (" + apk['id'] + ") has no metadata!"
|
|
|
|
if options.delete_unknown:
|
|
|
|
logging.warn(msg + "\n\tdeleting: repo/" + apk['apkname'])
|
|
|
|
rmf = os.path.join(repodirs[0], apk['apkname'])
|
|
|
|
if not os.path.exists(rmf):
|
|
|
|
logging.error("Could not find {0} to remove it".format(rmf))
|
|
|
|
else:
|
|
|
|
os.remove(rmf)
|
|
|
|
else:
|
|
|
|
logging.warn(msg + "\n\tUse `fdroid update -c` to create it.")
|
|
|
|
|
|
|
|
# update the metadata with the newly created ones included
|
|
|
|
if newmetadata:
|
|
|
|
apps = metadata.read_metadata()
|
|
|
|
|
2013-11-27 10:35:43 +01:00
|
|
|
# Scan the archive repo for apks as well
|
|
|
|
if len(repodirs) > 1:
|
|
|
|
archapks, cc = scan_apks(apps, apkcache, repodirs[1], knownapks)
|
|
|
|
if cc:
|
|
|
|
cachechanged = True
|
|
|
|
else:
|
|
|
|
archapks = []
|
|
|
|
|
2013-05-09 21:09:17 +02:00
|
|
|
# Some information from the apks needs to be applied up to the application
|
|
|
|
# level. When doing this, we use the info from the most recent version's apk.
|
|
|
|
# We deal with figuring out when the app was added and last updated at the
|
|
|
|
# same time.
|
2014-08-16 12:46:02 +02:00
|
|
|
for appid, app in apps.iteritems():
|
2013-05-09 21:09:17 +02:00
|
|
|
bestver = 0
|
|
|
|
added = None
|
|
|
|
lastupdated = None
|
2013-11-27 10:35:43 +01:00
|
|
|
for apk in apks + archapks:
|
2014-08-16 12:46:02 +02:00
|
|
|
if apk['id'] == appid:
|
2013-05-09 21:09:17 +02:00
|
|
|
if apk['versioncode'] > bestver:
|
|
|
|
bestver = apk['versioncode']
|
|
|
|
bestapk = apk
|
|
|
|
|
|
|
|
if 'added' in apk:
|
|
|
|
if not added or apk['added'] < added:
|
|
|
|
added = apk['added']
|
|
|
|
if not lastupdated or apk['added'] > lastupdated:
|
|
|
|
lastupdated = apk['added']
|
|
|
|
|
|
|
|
if added:
|
|
|
|
app['added'] = added
|
2014-01-07 12:24:15 +01:00
|
|
|
else:
|
2014-08-16 12:46:02 +02:00
|
|
|
logging.warn("Don't know when " + appid + " was added")
|
2013-05-09 21:09:17 +02:00
|
|
|
if lastupdated:
|
|
|
|
app['lastupdated'] = lastupdated
|
2014-01-07 12:24:15 +01:00
|
|
|
else:
|
2014-08-16 12:46:02 +02:00
|
|
|
logging.warn("Don't know when " + appid + " was last updated")
|
2013-05-09 21:09:17 +02:00
|
|
|
|
|
|
|
if bestver == 0:
|
|
|
|
if app['Name'] is None:
|
2014-09-23 09:56:09 +02:00
|
|
|
app['Name'] = app['Auto Name'] or appid
|
2013-11-02 22:14:32 +01:00
|
|
|
app['icon'] = None
|
2014-08-16 12:46:02 +02:00
|
|
|
logging.warn("Application " + appid + " has no packages")
|
2013-05-09 21:09:17 +02:00
|
|
|
else:
|
|
|
|
if app['Name'] is None:
|
|
|
|
app['Name'] = bestapk['name']
|
2013-11-02 22:14:32 +01:00
|
|
|
app['icon'] = bestapk['icon'] if 'icon' in bestapk else None
|
2013-05-09 21:09:17 +02:00
|
|
|
|
|
|
|
# Sort the app list by name, then the web site doesn't have to by default.
|
|
|
|
# (we had to wait until we'd scanned the apks to do this, because mostly the
|
|
|
|
# name comes from there!)
|
2014-08-16 12:46:02 +02:00
|
|
|
sortedids = sorted(apps.iterkeys(), key=lambda appid: apps[appid]['Name'].upper())
|
2013-05-09 21:09:17 +02:00
|
|
|
|
|
|
|
if len(repodirs) > 1:
|
2014-01-09 13:33:48 +01:00
|
|
|
archive_old_apks(apps, apks, archapks, repodirs[0], repodirs[1], config['archive_older'])
|
2013-05-09 21:09:17 +02:00
|
|
|
|
2013-05-15 18:09:37 +02:00
|
|
|
# Make the index for the main repo...
|
2014-08-16 12:46:02 +02:00
|
|
|
make_index(apps, sortedids, apks, repodirs[0], False, categories)
|
2013-05-09 21:09:17 +02:00
|
|
|
|
2013-11-27 10:35:43 +01:00
|
|
|
# If there's an archive repo, make the index for it. We already scanned it
|
|
|
|
# earlier on.
|
2013-05-09 21:09:17 +02:00
|
|
|
if len(repodirs) > 1:
|
2014-08-16 12:46:02 +02:00
|
|
|
make_index(apps, sortedids, archapks, repodirs[1], True, categories)
|
2013-05-09 21:09:17 +02:00
|
|
|
|
2013-10-31 16:37:39 +01:00
|
|
|
if config['update_stats']:
|
2012-08-31 19:17:38 +02:00
|
|
|
|
|
|
|
# Update known apks info...
|
|
|
|
knownapks.writeifchanged()
|
|
|
|
|
|
|
|
# Generate latest apps data for widget
|
|
|
|
if os.path.exists(os.path.join('stats', 'latestapps.txt')):
|
|
|
|
data = ''
|
|
|
|
for line in file(os.path.join('stats', 'latestapps.txt')):
|
|
|
|
appid = line.rstrip()
|
|
|
|
data += appid + "\t"
|
2014-08-16 12:46:02 +02:00
|
|
|
app = apps[appid]
|
|
|
|
data += app['Name'] + "\t"
|
|
|
|
if app['icon'] is not None:
|
|
|
|
data += app['icon'] + "\t"
|
|
|
|
data += app['License'] + "\n"
|
2013-05-09 21:09:17 +02:00
|
|
|
f = open(os.path.join(repodirs[0], 'latestapps.dat'), 'w')
|
2012-08-31 19:17:38 +02:00
|
|
|
f.write(data)
|
|
|
|
f.close()
|
2012-02-26 15:18:58 +01:00
|
|
|
|
2013-05-09 21:09:17 +02:00
|
|
|
if cachechanged:
|
|
|
|
with open(apkcachefile, 'wb') as cf:
|
|
|
|
pickle.dump(apkcache, cf)
|
|
|
|
|
2012-08-22 18:24:33 +02:00
|
|
|
# Update the wiki...
|
|
|
|
if options.wiki:
|
2014-08-16 12:46:02 +02:00
|
|
|
update_wiki(apps, sortedids, apks + archapks)
|
2012-02-26 15:18:58 +01:00
|
|
|
|
2014-01-27 16:56:55 +01:00
|
|
|
logging.info("Finished.")
|
2012-02-26 15:18:58 +01:00
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|