2011-01-26 17:26:51 +01:00
|
|
|
# -*- coding: utf-8 -*-
|
2010-11-11 23:34:39 +01:00
|
|
|
#
|
2011-02-17 21:16:26 +01:00
|
|
|
# common.py - part of the FDroid server tools
|
2013-03-18 10:17:23 +01:00
|
|
|
# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com
|
2013-09-28 21:06:33 +02:00
|
|
|
# Copyright (C) 2013 Daniel Martí <mvdan@mvdan.cc>
|
2010-11-11 23:34:39 +01:00
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU Affero General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU Affero General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU Affero General Public License
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
2011-03-01 01:11:07 +01:00
|
|
|
import glob, os, sys, re
|
2012-01-04 22:37:11 +01:00
|
|
|
import shutil
|
2013-11-05 23:27:08 +01:00
|
|
|
import stat
|
2011-08-07 17:14:54 +02:00
|
|
|
import subprocess
|
2012-01-20 00:03:35 +01:00
|
|
|
import time
|
2012-01-22 15:03:56 +01:00
|
|
|
import operator
|
2012-09-17 22:49:56 +02:00
|
|
|
import cgi
|
2013-10-16 23:17:51 +02:00
|
|
|
import Queue
|
|
|
|
import threading
|
2013-10-16 22:50:07 +02:00
|
|
|
import magic
|
2011-08-07 17:14:54 +02:00
|
|
|
|
2013-11-01 12:10:57 +01:00
|
|
|
config = None
|
|
|
|
options = None
|
|
|
|
|
2013-11-09 12:48:39 +01:00
|
|
|
# These can only contain 'yes' or 'no'
|
|
|
|
bool_keys = (
|
|
|
|
'submodules', 'oldsdkloc',
|
|
|
|
'forceversion', 'forcevercode',
|
|
|
|
'fixtrans', 'fixapos', 'novcheck')
|
|
|
|
|
2013-11-04 10:22:22 +01:00
|
|
|
def read_config(opts, config_file='config.py'):
|
2013-10-31 16:37:39 +01:00
|
|
|
"""Read the repository config
|
|
|
|
|
2013-11-04 10:22:22 +01:00
|
|
|
The config is read from config_file, which is in the current directory when
|
2013-10-31 16:37:39 +01:00
|
|
|
any of the repo management commands are used.
|
|
|
|
"""
|
2013-11-01 12:10:57 +01:00
|
|
|
global config, options
|
|
|
|
|
|
|
|
if config is not None:
|
|
|
|
return config
|
2013-11-04 10:22:22 +01:00
|
|
|
if not os.path.isfile(config_file):
|
2013-10-31 16:37:39 +01:00
|
|
|
print "Missing config file - is this a repo directory?"
|
|
|
|
sys.exit(2)
|
2013-11-05 23:27:08 +01:00
|
|
|
st = os.stat(config_file)
|
|
|
|
if st.st_mode & stat.S_IRWXG or st.st_mode & stat.S_IRWXO:
|
|
|
|
print("WARNING: unsafe permissions on config.py (should be 0600)!")
|
2013-11-01 12:10:57 +01:00
|
|
|
|
|
|
|
options = opts
|
2013-11-02 22:52:52 +01:00
|
|
|
if not hasattr(options, 'verbose'):
|
|
|
|
options.verbose = False
|
2013-11-01 12:10:57 +01:00
|
|
|
|
|
|
|
config = {
|
|
|
|
'build_server_always': False,
|
|
|
|
'mvn3': "mvn3",
|
|
|
|
'archive_older': 0,
|
|
|
|
'gradle': 'gradle',
|
|
|
|
'update_stats': False,
|
|
|
|
'archive_older': 0,
|
|
|
|
'max_icon_size': 72,
|
|
|
|
'stats_to_carbon': False
|
|
|
|
}
|
2013-11-12 21:00:15 +01:00
|
|
|
if options.verbose:
|
|
|
|
print "Reading %s..." % config_file
|
2013-11-04 10:22:22 +01:00
|
|
|
execfile(config_file, config)
|
2013-11-01 12:10:57 +01:00
|
|
|
return config
|
2013-10-31 16:37:39 +01:00
|
|
|
|
|
|
|
|
2013-11-08 20:44:27 +01:00
|
|
|
def getvcs(vcstype, remote, local):
|
2011-08-07 17:14:54 +02:00
|
|
|
if vcstype == 'git':
|
2013-11-08 20:44:27 +01:00
|
|
|
return vcs_git(remote, local)
|
2013-04-08 12:20:21 +02:00
|
|
|
if vcstype == 'svn':
|
2013-11-08 20:44:27 +01:00
|
|
|
return vcs_svn(remote, local)
|
2013-04-08 12:20:21 +02:00
|
|
|
if vcstype == 'git-svn':
|
2013-11-08 20:44:27 +01:00
|
|
|
return vcs_gitsvn(remote, local)
|
2013-04-08 12:20:21 +02:00
|
|
|
if vcstype == 'hg':
|
2013-11-08 20:44:27 +01:00
|
|
|
return vcs_hg(remote, local)
|
2013-04-08 12:20:21 +02:00
|
|
|
if vcstype == 'bzr':
|
2013-11-08 20:44:27 +01:00
|
|
|
return vcs_bzr(remote, local)
|
2013-04-08 12:20:21 +02:00
|
|
|
if vcstype == 'srclib':
|
2013-06-05 13:54:47 +02:00
|
|
|
if local != 'build/srclib/' + remote:
|
2013-10-09 11:11:34 +02:00
|
|
|
raise VCSException("Error: srclib paths are hard-coded!")
|
2013-11-08 20:44:27 +01:00
|
|
|
return getsrclib(remote, 'build/srclib', raw=True)
|
2012-01-02 12:51:14 +01:00
|
|
|
raise VCSException("Invalid vcs type " + vcstype)
|
2011-08-07 17:14:54 +02:00
|
|
|
|
2013-05-24 23:35:56 +02:00
|
|
|
def getsrclibvcs(name):
|
|
|
|
srclib_path = os.path.join('srclibs', name + ".txt")
|
2013-05-31 08:50:47 +02:00
|
|
|
if not os.path.exists(srclib_path):
|
|
|
|
raise VCSException("Missing srclib " + name)
|
2013-05-24 23:35:56 +02:00
|
|
|
return parse_srclib(srclib_path)['Repo Type']
|
|
|
|
|
2011-08-07 17:14:54 +02:00
|
|
|
class vcs:
|
2013-11-08 20:44:27 +01:00
|
|
|
def __init__(self, remote, local):
|
2011-08-07 17:14:54 +02:00
|
|
|
|
2013-11-05 19:42:29 +01:00
|
|
|
# svn, git-svn and bzr may require auth
|
2013-10-02 11:25:26 +02:00
|
|
|
self.username = None
|
2013-11-05 19:42:29 +01:00
|
|
|
if self.repotype() in ('svn', 'git-svn', 'bzr'):
|
|
|
|
if '@' in remote:
|
|
|
|
self.username, remote = remote.split('@')
|
|
|
|
if ':' not in self.username:
|
2013-10-02 11:25:26 +02:00
|
|
|
raise VCSException("Password required with username")
|
2013-11-05 19:42:29 +01:00
|
|
|
self.username, self.password = self.username.split(':')
|
2011-08-07 17:14:54 +02:00
|
|
|
|
|
|
|
self.remote = remote
|
|
|
|
self.local = local
|
2012-01-23 15:15:40 +01:00
|
|
|
self.refreshed = False
|
2012-01-30 22:11:50 +01:00
|
|
|
self.srclib = None
|
2012-01-08 19:13:15 +01:00
|
|
|
|
2012-01-23 15:15:40 +01:00
|
|
|
# Take the local repository to a clean version of the given revision, which
|
|
|
|
# is specificed in the VCS's native format. Beforehand, the repository can
|
|
|
|
# be dirty, or even non-existent. If the repository does already exist
|
|
|
|
# locally, it will be updated from the origin, but only once in the
|
|
|
|
# lifetime of the vcs object.
|
2012-02-05 12:02:01 +01:00
|
|
|
# None is acceptable for 'rev' if you know you are cloning a clean copy of
|
|
|
|
# the repo - otherwise it must specify a valid revision.
|
2012-01-23 15:15:40 +01:00
|
|
|
def gotorevision(self, rev):
|
2012-08-13 18:59:03 +02:00
|
|
|
|
|
|
|
# The .fdroidvcs-id file for a repo tells us what VCS type
|
|
|
|
# and remote that directory was created from, allowing us to drop it
|
|
|
|
# automatically if either of those things changes.
|
|
|
|
fdpath = os.path.join(self.local, '..',
|
|
|
|
'.fdroidvcs-' + os.path.basename(self.local))
|
|
|
|
cdata = self.repotype() + ' ' + self.remote
|
|
|
|
writeback = True
|
2013-06-05 13:54:47 +02:00
|
|
|
deleterepo = False
|
2012-08-13 18:59:03 +02:00
|
|
|
if os.path.exists(self.local):
|
|
|
|
if os.path.exists(fdpath):
|
|
|
|
with open(fdpath, 'r') as f:
|
|
|
|
fsdata = f.read()
|
|
|
|
if fsdata == cdata:
|
|
|
|
writeback = False
|
|
|
|
else:
|
2013-06-05 13:54:47 +02:00
|
|
|
deleterepo = True
|
2012-08-13 18:59:03 +02:00
|
|
|
print "*** Repository details changed - deleting ***"
|
2013-06-05 13:54:47 +02:00
|
|
|
else:
|
|
|
|
deleterepo = True
|
|
|
|
print "*** Repository details missing - deleting ***"
|
|
|
|
if deleterepo:
|
|
|
|
shutil.rmtree(self.local)
|
2012-08-13 18:59:03 +02:00
|
|
|
|
|
|
|
self.gotorevisionx(rev)
|
|
|
|
|
|
|
|
# If necessary, write the .fdroidvcs file.
|
|
|
|
if writeback:
|
|
|
|
with open(fdpath, 'w') as f:
|
|
|
|
f.write(cdata)
|
|
|
|
|
|
|
|
# Derived classes need to implement this. It's called once basic checking
|
|
|
|
# has been performend.
|
|
|
|
def gotorevisionx(self, rev):
|
|
|
|
raise VCSException("This VCS type doesn't define gotorevisionx")
|
2011-08-07 17:14:54 +02:00
|
|
|
|
|
|
|
# Initialise and update submodules
|
|
|
|
def initsubmodules(self):
|
2012-01-23 15:15:40 +01:00
|
|
|
raise VCSException('Submodules not supported for this vcs type')
|
2011-08-07 17:14:54 +02:00
|
|
|
|
2012-08-23 15:25:39 +02:00
|
|
|
# Get a list of all known tags
|
|
|
|
def gettags(self):
|
|
|
|
raise VCSException('gettags not supported for this vcs type')
|
|
|
|
|
2013-10-17 23:27:55 +02:00
|
|
|
# Get current commit reference (hash, revision, etc)
|
|
|
|
def getref(self):
|
|
|
|
raise VCSException('getref not supported for this vcs type')
|
|
|
|
|
2012-01-30 22:11:50 +01:00
|
|
|
# Returns the srclib (name, path) used in setting up the current
|
|
|
|
# revision, or None.
|
|
|
|
def getsrclib(self):
|
|
|
|
return self.srclib
|
|
|
|
|
2011-08-07 17:14:54 +02:00
|
|
|
class vcs_git(vcs):
|
|
|
|
|
2012-08-13 18:59:03 +02:00
|
|
|
def repotype(self):
|
|
|
|
return 'git'
|
|
|
|
|
2012-01-23 15:15:40 +01:00
|
|
|
# If the local directory exists, but is somehow not a git repository, git
|
|
|
|
# will traverse up the directory tree until it finds one that is (i.e.
|
2012-08-13 18:59:03 +02:00
|
|
|
# fdroidserver) and then we'll proceed to destroy it! This is called as
|
2012-01-23 15:15:40 +01:00
|
|
|
# a safety check.
|
2012-01-08 14:43:59 +01:00
|
|
|
def checkrepo(self):
|
2012-01-08 15:13:41 +01:00
|
|
|
p = subprocess.Popen(['git', 'rev-parse', '--show-toplevel'],
|
|
|
|
stdout=subprocess.PIPE, cwd=self.local)
|
2012-01-08 14:43:59 +01:00
|
|
|
result = p.communicate()[0].rstrip()
|
2012-01-08 15:13:41 +01:00
|
|
|
if not result.endswith(self.local):
|
2012-01-08 14:43:59 +01:00
|
|
|
raise VCSException('Repository mismatch')
|
|
|
|
|
2012-08-13 18:59:03 +02:00
|
|
|
def gotorevisionx(self, rev):
|
2012-01-23 15:15:40 +01:00
|
|
|
if not os.path.exists(self.local):
|
|
|
|
# Brand new checkout...
|
|
|
|
if subprocess.call(['git', 'clone', self.remote, self.local]) != 0:
|
|
|
|
raise VCSException("Git clone failed")
|
|
|
|
self.checkrepo()
|
|
|
|
else:
|
|
|
|
self.checkrepo()
|
|
|
|
# Discard any working tree changes...
|
|
|
|
if subprocess.call(['git', 'reset', '--hard'], cwd=self.local) != 0:
|
|
|
|
raise VCSException("Git reset failed")
|
|
|
|
# Remove untracked files now, in case they're tracked in the target
|
|
|
|
# revision (it happens!)...
|
2012-01-29 22:45:34 +01:00
|
|
|
if subprocess.call(['git', 'clean', '-dffx'], cwd=self.local) != 0:
|
2012-01-23 15:15:40 +01:00
|
|
|
raise VCSException("Git clean failed")
|
|
|
|
if not self.refreshed:
|
|
|
|
# Get latest commits and tags from remote...
|
2012-01-25 21:21:58 +01:00
|
|
|
if subprocess.call(['git', 'fetch', 'origin'],
|
|
|
|
cwd=self.local) != 0:
|
|
|
|
raise VCSException("Git fetch failed")
|
2012-01-23 15:15:40 +01:00
|
|
|
if subprocess.call(['git', 'fetch', '--tags', 'origin'],
|
|
|
|
cwd=self.local) != 0:
|
|
|
|
raise VCSException("Git fetch failed")
|
|
|
|
self.refreshed = True
|
|
|
|
# Check out the appropriate revision...
|
2013-09-15 23:20:27 +02:00
|
|
|
rev = str(rev if rev else 'origin/master')
|
2013-11-01 13:25:50 +01:00
|
|
|
if subprocess.call(['git', 'checkout', '-f', rev], cwd=self.local) != 0:
|
2013-09-15 23:20:27 +02:00
|
|
|
raise VCSException("Git checkout failed")
|
2012-01-23 15:15:40 +01:00
|
|
|
# Get rid of any uncontrolled files left behind...
|
2012-01-29 22:45:34 +01:00
|
|
|
if subprocess.call(['git', 'clean', '-dffx'], cwd=self.local) != 0:
|
2012-01-02 12:51:14 +01:00
|
|
|
raise VCSException("Git clean failed")
|
2011-08-07 17:14:54 +02:00
|
|
|
|
|
|
|
def initsubmodules(self):
|
2012-01-08 14:43:59 +01:00
|
|
|
self.checkrepo()
|
2011-08-07 17:14:54 +02:00
|
|
|
if subprocess.call(['git', 'submodule', 'init'],
|
|
|
|
cwd=self.local) != 0:
|
2012-01-02 12:51:14 +01:00
|
|
|
raise VCSException("Git submodule init failed")
|
2011-08-07 17:14:54 +02:00
|
|
|
if subprocess.call(['git', 'submodule', 'update'],
|
|
|
|
cwd=self.local) != 0:
|
2012-01-02 12:51:14 +01:00
|
|
|
raise VCSException("Git submodule update failed")
|
2013-11-01 13:25:50 +01:00
|
|
|
if subprocess.call(['git', 'submodule', 'foreach',
|
|
|
|
'git', 'reset', '--hard'],
|
|
|
|
cwd=self.local) != 0:
|
|
|
|
raise VCSException("Git submodule reset failed")
|
|
|
|
if subprocess.call(['git', 'submodule', 'foreach',
|
|
|
|
'git', 'clean', '-dffx'],
|
|
|
|
cwd=self.local) != 0:
|
|
|
|
raise VCSException("Git submodule clean failed")
|
2011-08-07 17:14:54 +02:00
|
|
|
|
2012-08-23 15:25:39 +02:00
|
|
|
def gettags(self):
|
|
|
|
self.checkrepo()
|
|
|
|
p = subprocess.Popen(['git', 'tag'],
|
|
|
|
stdout=subprocess.PIPE, cwd=self.local)
|
|
|
|
return p.communicate()[0].splitlines()
|
|
|
|
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2012-01-04 22:37:11 +01:00
|
|
|
class vcs_gitsvn(vcs):
|
|
|
|
|
2012-08-13 18:59:03 +02:00
|
|
|
def repotype(self):
|
|
|
|
return 'git-svn'
|
|
|
|
|
2013-10-31 11:53:12 +01:00
|
|
|
# Damn git-svn tries to use a graphical password prompt, so we have to
|
|
|
|
# trick it into taking the password from stdin
|
|
|
|
def userargs(self):
|
|
|
|
if self.username is None:
|
|
|
|
return ('', '')
|
|
|
|
return ('echo "%s" | DISPLAY="" ' % self.password, '--username "%s"' % self.username)
|
|
|
|
|
2012-01-23 15:15:40 +01:00
|
|
|
# If the local directory exists, but is somehow not a git repository, git
|
|
|
|
# will traverse up the directory tree until it finds one that is (i.e.
|
|
|
|
# fdroidserver) and then we'll proceed to destory it! This is called as
|
|
|
|
# a safety check.
|
2012-01-08 14:43:59 +01:00
|
|
|
def checkrepo(self):
|
2012-01-08 15:16:42 +01:00
|
|
|
p = subprocess.Popen(['git', 'rev-parse', '--show-toplevel'],
|
|
|
|
stdout=subprocess.PIPE, cwd=self.local)
|
2012-01-08 14:43:59 +01:00
|
|
|
result = p.communicate()[0].rstrip()
|
2012-01-08 15:16:42 +01:00
|
|
|
if not result.endswith(self.local):
|
2012-01-08 14:43:59 +01:00
|
|
|
raise VCSException('Repository mismatch')
|
|
|
|
|
2012-08-13 18:59:03 +02:00
|
|
|
def gotorevisionx(self, rev):
|
2012-01-23 15:15:40 +01:00
|
|
|
if not os.path.exists(self.local):
|
|
|
|
# Brand new checkout...
|
2013-10-31 11:53:12 +01:00
|
|
|
gitsvn_cmd = '%sgit svn clone %s' % self.userargs()
|
|
|
|
if ';' in self.remote:
|
|
|
|
remote_split = self.remote.split(';')
|
2013-04-05 21:55:34 +02:00
|
|
|
for i in remote_split[1:]:
|
|
|
|
if i.startswith('trunk='):
|
2013-10-31 15:54:52 +01:00
|
|
|
gitsvn_cmd += ' -T %s' % i[6:]
|
2013-04-05 21:55:34 +02:00
|
|
|
elif i.startswith('tags='):
|
2013-10-31 15:54:52 +01:00
|
|
|
gitsvn_cmd += ' -t %s' % i[5:]
|
2013-05-27 15:00:35 +02:00
|
|
|
elif i.startswith('branches='):
|
2013-10-31 15:54:52 +01:00
|
|
|
gitsvn_cmd += ' -b %s' % i[9:]
|
2013-10-31 11:53:12 +01:00
|
|
|
if subprocess.call([gitsvn_cmd + " %s %s" % (remote_split[0], self.local)],
|
|
|
|
shell=True) != 0:
|
2013-04-05 21:55:34 +02:00
|
|
|
raise VCSException("Git clone failed")
|
|
|
|
else:
|
2013-10-31 11:53:12 +01:00
|
|
|
if subprocess.call([gitsvn_cmd + " %s %s" % (self.remote, self.local)],
|
|
|
|
shell=True) != 0:
|
2013-04-05 21:55:34 +02:00
|
|
|
raise VCSException("Git clone failed")
|
2012-01-23 15:15:40 +01:00
|
|
|
self.checkrepo()
|
2012-01-04 22:37:11 +01:00
|
|
|
else:
|
2012-01-23 15:15:40 +01:00
|
|
|
self.checkrepo()
|
|
|
|
# Discard any working tree changes...
|
|
|
|
if subprocess.call(['git', 'reset', '--hard'], cwd=self.local) != 0:
|
|
|
|
raise VCSException("Git reset failed")
|
|
|
|
# Remove untracked files now, in case they're tracked in the target
|
|
|
|
# revision (it happens!)...
|
2012-01-29 22:45:34 +01:00
|
|
|
if subprocess.call(['git', 'clean', '-dffx'], cwd=self.local) != 0:
|
2012-01-23 15:15:40 +01:00
|
|
|
raise VCSException("Git clean failed")
|
|
|
|
if not self.refreshed:
|
|
|
|
# Get new commits and tags from repo...
|
2013-10-31 11:53:12 +01:00
|
|
|
if subprocess.call(['%sgit svn rebase %s' % self.userargs()],
|
|
|
|
cwd=self.local, shell=True) != 0:
|
2012-01-23 15:15:40 +01:00
|
|
|
raise VCSException("Git svn rebase failed")
|
|
|
|
self.refreshed = True
|
2013-09-15 23:20:27 +02:00
|
|
|
|
|
|
|
rev = str(rev if rev else 'master')
|
2012-02-05 12:02:01 +01:00
|
|
|
if rev:
|
2013-06-03 11:20:49 +02:00
|
|
|
nospaces_rev = rev.replace(' ', '%20')
|
2013-05-28 16:25:23 +02:00
|
|
|
# Try finding a svn tag
|
2013-06-03 11:20:49 +02:00
|
|
|
p = subprocess.Popen(['git', 'checkout', 'tags/' + nospaces_rev],
|
|
|
|
cwd=self.local, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
|
|
out, err = p.communicate()
|
2013-05-28 16:25:23 +02:00
|
|
|
if p.returncode == 0:
|
2013-06-03 11:20:49 +02:00
|
|
|
print out
|
2013-04-05 15:43:12 +02:00
|
|
|
else:
|
2013-05-28 16:25:23 +02:00
|
|
|
# No tag found, normal svn rev translation
|
|
|
|
# Translate svn rev into git format
|
2013-11-04 10:43:28 +01:00
|
|
|
p = subprocess.Popen(['git', 'svn', 'find-rev', 'r' + rev],
|
2013-05-28 16:25:23 +02:00
|
|
|
cwd=self.local, stdout=subprocess.PIPE)
|
2013-06-03 10:55:58 +02:00
|
|
|
git_rev = p.communicate()[0].rstrip()
|
2013-11-05 19:42:29 +01:00
|
|
|
if p.returncode != 0 or not git_rev:
|
2013-05-28 16:36:52 +02:00
|
|
|
# Try a plain git checkout as a last resort
|
2013-06-03 11:20:49 +02:00
|
|
|
p = subprocess.Popen(['git', 'checkout', rev], cwd=self.local,
|
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
|
|
out, err = p.communicate()
|
|
|
|
if p.returncode == 0:
|
|
|
|
print out
|
|
|
|
else:
|
2013-05-28 16:25:23 +02:00
|
|
|
raise VCSException("No git treeish found and direct git checkout failed")
|
2013-06-03 11:20:49 +02:00
|
|
|
else:
|
|
|
|
# Check out the git rev equivalent to the svn rev
|
|
|
|
p = subprocess.Popen(['git', 'checkout', git_rev], cwd=self.local,
|
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
|
|
out, err = p.communicate()
|
|
|
|
if p.returncode == 0:
|
|
|
|
print out
|
|
|
|
else:
|
|
|
|
raise VCSException("Git svn checkout failed")
|
2012-01-23 15:15:40 +01:00
|
|
|
# Get rid of any uncontrolled files left behind...
|
2012-01-29 22:45:34 +01:00
|
|
|
if subprocess.call(['git', 'clean', '-dffx'], cwd=self.local) != 0:
|
2012-01-04 22:37:11 +01:00
|
|
|
raise VCSException("Git clean failed")
|
|
|
|
|
2013-04-05 15:43:12 +02:00
|
|
|
def gettags(self):
|
|
|
|
self.checkrepo()
|
2013-06-03 10:55:58 +02:00
|
|
|
return os.listdir(os.path.join(self.local, '.git/svn/refs/remotes/tags'))
|
2013-04-05 15:43:12 +02:00
|
|
|
|
2013-10-17 23:27:55 +02:00
|
|
|
def getref(self):
|
|
|
|
self.checkrepo()
|
|
|
|
p = subprocess.Popen(['git', 'svn', 'find-rev', 'HEAD'],
|
|
|
|
stdout=subprocess.PIPE, cwd=self.local)
|
2013-10-30 21:40:48 +01:00
|
|
|
return p.communicate()[0].strip()
|
2013-10-17 23:27:55 +02:00
|
|
|
|
2011-08-07 17:14:54 +02:00
|
|
|
class vcs_svn(vcs):
|
|
|
|
|
2012-08-13 18:59:03 +02:00
|
|
|
def repotype(self):
|
|
|
|
return 'svn'
|
|
|
|
|
2011-08-07 17:14:54 +02:00
|
|
|
def userargs(self):
|
|
|
|
if self.username is None:
|
2012-01-02 12:51:14 +01:00
|
|
|
return ['--non-interactive']
|
2011-08-07 17:14:54 +02:00
|
|
|
return ['--username', self.username,
|
|
|
|
'--password', self.password,
|
|
|
|
'--non-interactive']
|
|
|
|
|
2012-08-13 18:59:03 +02:00
|
|
|
def gotorevisionx(self, rev):
|
2012-01-23 15:15:40 +01:00
|
|
|
if not os.path.exists(self.local):
|
|
|
|
if subprocess.call(['svn', 'checkout', self.remote, self.local] +
|
|
|
|
self.userargs()) != 0:
|
|
|
|
raise VCSException("Svn checkout failed")
|
2011-08-07 17:14:54 +02:00
|
|
|
else:
|
2012-01-23 15:15:40 +01:00
|
|
|
for svncommand in (
|
|
|
|
'svn revert -R .',
|
|
|
|
r"svn status | awk '/\?/ {print $2}' | xargs rm -rf"):
|
2013-10-11 17:08:07 +02:00
|
|
|
if subprocess.call(svncommand, cwd=self.local, shell=True) != 0:
|
2013-05-22 11:15:57 +02:00
|
|
|
raise VCSException("Svn reset ({0}) failed in {1}".format(svncommand, self.local))
|
2012-01-23 15:15:40 +01:00
|
|
|
if not self.refreshed:
|
|
|
|
if subprocess.call(['svn', 'update'] +
|
|
|
|
self.userargs(), cwd=self.local) != 0:
|
|
|
|
raise VCSException("Svn update failed")
|
|
|
|
self.refreshed = True
|
2013-09-15 23:20:27 +02:00
|
|
|
|
|
|
|
revargs = list(['-r', rev] if rev else [])
|
|
|
|
if subprocess.call(['svn', 'update', '--force'] + revargs +
|
|
|
|
self.userargs(), cwd=self.local) != 0:
|
|
|
|
raise VCSException("Svn update failed")
|
2011-08-07 17:14:54 +02:00
|
|
|
|
2013-10-17 23:27:55 +02:00
|
|
|
def getref(self):
|
|
|
|
p = subprocess.Popen(['svn', 'info'],
|
|
|
|
stdout=subprocess.PIPE, cwd=self.local)
|
|
|
|
for line in p.communicate()[0].splitlines():
|
|
|
|
if line is not None and line.startswith('Last Changed Rev: '):
|
|
|
|
return line[18:]
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2011-08-07 17:14:54 +02:00
|
|
|
class vcs_hg(vcs):
|
|
|
|
|
2012-08-13 18:59:03 +02:00
|
|
|
def repotype(self):
|
|
|
|
return 'hg'
|
|
|
|
|
|
|
|
def gotorevisionx(self, rev):
|
2012-01-23 15:15:40 +01:00
|
|
|
if not os.path.exists(self.local):
|
|
|
|
if subprocess.call(['hg', 'clone', self.remote, self.local]) !=0:
|
|
|
|
raise VCSException("Hg clone failed")
|
2011-08-07 17:14:54 +02:00
|
|
|
else:
|
2013-11-01 19:12:22 +01:00
|
|
|
if subprocess.call('hg status -uS | xargs rm -rf',
|
2013-10-21 17:00:42 +02:00
|
|
|
cwd=self.local, shell=True) != 0:
|
2012-01-23 15:15:40 +01:00
|
|
|
raise VCSException("Hg clean failed")
|
|
|
|
if not self.refreshed:
|
|
|
|
if subprocess.call(['hg', 'pull'],
|
|
|
|
cwd=self.local) != 0:
|
|
|
|
raise VCSException("Hg pull failed")
|
|
|
|
self.refreshed = True
|
2013-09-15 23:20:27 +02:00
|
|
|
|
|
|
|
rev = str(rev if rev else 'default')
|
2013-11-01 19:12:22 +01:00
|
|
|
if not rev:
|
|
|
|
return
|
|
|
|
if subprocess.call(['hg', 'update', '-C', rev],
|
|
|
|
cwd=self.local) != 0:
|
|
|
|
raise VCSException("Hg checkout failed")
|
2011-08-07 17:14:54 +02:00
|
|
|
|
2013-09-06 20:33:47 +02:00
|
|
|
def gettags(self):
|
|
|
|
p = subprocess.Popen(['hg', 'tags', '-q'],
|
|
|
|
stdout=subprocess.PIPE, cwd=self.local)
|
|
|
|
return p.communicate()[0].splitlines()[1:]
|
|
|
|
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2011-08-07 17:14:54 +02:00
|
|
|
class vcs_bzr(vcs):
|
|
|
|
|
2012-08-13 18:59:03 +02:00
|
|
|
def repotype(self):
|
|
|
|
return 'bzr'
|
|
|
|
|
|
|
|
def gotorevisionx(self, rev):
|
2012-01-23 15:15:40 +01:00
|
|
|
if not os.path.exists(self.local):
|
|
|
|
if subprocess.call(['bzr', 'branch', self.remote, self.local]) != 0:
|
|
|
|
raise VCSException("Bzr branch failed")
|
2011-08-07 17:14:54 +02:00
|
|
|
else:
|
2012-01-23 15:15:40 +01:00
|
|
|
if subprocess.call(['bzr', 'clean-tree', '--force',
|
|
|
|
'--unknown', '--ignored'], cwd=self.local) != 0:
|
|
|
|
raise VCSException("Bzr revert failed")
|
|
|
|
if not self.refreshed:
|
|
|
|
if subprocess.call(['bzr', 'pull'],
|
|
|
|
cwd=self.local) != 0:
|
|
|
|
raise VCSException("Bzr update failed")
|
|
|
|
self.refreshed = True
|
2013-09-15 23:20:27 +02:00
|
|
|
|
|
|
|
revargs = list(['-r', rev] if rev else [])
|
|
|
|
if subprocess.call(['bzr', 'revert'] + revargs,
|
|
|
|
cwd=self.local) != 0:
|
|
|
|
raise VCSException("Bzr revert failed")
|
2011-08-07 17:14:54 +02:00
|
|
|
|
2013-10-30 21:54:09 +01:00
|
|
|
def gettags(self):
|
|
|
|
p = subprocess.Popen(['bzr', 'tags'],
|
|
|
|
stdout=subprocess.PIPE, cwd=self.local)
|
|
|
|
return [tag.split(' ')[0].strip() for tag in
|
|
|
|
p.communicate()[0].splitlines()]
|
|
|
|
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2012-01-10 19:57:07 +01:00
|
|
|
# Get the type expected for a given metadata field.
|
|
|
|
def metafieldtype(name):
|
2013-11-05 17:21:35 +01:00
|
|
|
if name in ['Description', 'Maintainer Notes']:
|
2012-01-10 19:57:07 +01:00
|
|
|
return 'multiline'
|
2013-04-08 12:20:21 +02:00
|
|
|
if name == 'Requires Root':
|
2012-01-10 19:57:07 +01:00
|
|
|
return 'flag'
|
2013-04-08 12:20:21 +02:00
|
|
|
if name == 'Build Version':
|
2012-01-10 19:57:07 +01:00
|
|
|
return 'build'
|
2013-10-27 15:06:46 +01:00
|
|
|
if name == 'Build':
|
|
|
|
return 'buildv2'
|
2013-04-08 12:20:21 +02:00
|
|
|
if name == 'Use Built':
|
2012-01-10 19:57:07 +01:00
|
|
|
return 'obsolete'
|
|
|
|
return 'string'
|
2011-02-17 21:16:26 +01:00
|
|
|
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2012-01-10 19:57:07 +01:00
|
|
|
# Parse metadata for a single application.
|
|
|
|
#
|
|
|
|
# 'metafile' - the filename to read. The package id for the application comes
|
2012-02-05 12:02:01 +01:00
|
|
|
# from this filename. Pass None to get a blank entry.
|
2012-01-10 19:57:07 +01:00
|
|
|
#
|
|
|
|
# Returns a dictionary containing all the details of the application. There are
|
|
|
|
# two major kinds of information in the dictionary. Keys beginning with capital
|
|
|
|
# letters correspond directory to identically named keys in the metadata file.
|
|
|
|
# Keys beginning with lower case letters are generated in one way or another,
|
|
|
|
# and are not found verbatim in the metadata.
|
|
|
|
#
|
|
|
|
# Known keys not originating from the metadata are:
|
|
|
|
#
|
|
|
|
# 'id' - the application's package ID
|
|
|
|
# 'builds' - a list of dictionaries containing build information
|
|
|
|
# for each defined build
|
|
|
|
# 'comments' - a list of comments from the metadata file. Each is
|
|
|
|
# a tuple of the form (field, comment) where field is
|
|
|
|
# the name of the field it preceded in the metadata
|
2012-01-11 00:24:28 +01:00
|
|
|
# file. Where field is None, the comment goes at the
|
|
|
|
# end of the file. Alternatively, 'build:version' is
|
|
|
|
# for a comment before a particular build version.
|
|
|
|
# 'descriptionlines' - original lines of description as formatted in the
|
|
|
|
# metadata file.
|
2012-01-10 19:57:07 +01:00
|
|
|
#
|
2013-11-01 12:10:57 +01:00
|
|
|
def parse_metadata(metafile):
|
2010-11-11 23:34:39 +01:00
|
|
|
|
2012-01-10 19:57:07 +01:00
|
|
|
def parse_buildline(lines):
|
|
|
|
value = "".join(lines)
|
2011-03-01 01:11:07 +01:00
|
|
|
parts = [p.replace("\\,", ",")
|
|
|
|
for p in re.split(r"(?<!\\),", value)]
|
|
|
|
if len(parts) < 3:
|
2012-01-02 12:51:14 +01:00
|
|
|
raise MetaDataException("Invalid build format: " + value + " in " + metafile.name)
|
2011-03-01 01:11:07 +01:00
|
|
|
thisbuild = {}
|
2012-01-10 19:57:07 +01:00
|
|
|
thisbuild['origlines'] = lines
|
2013-10-27 22:43:24 +01:00
|
|
|
thisbuild['version'] = parts[0]
|
|
|
|
thisbuild['vercode'] = parts[1]
|
2013-04-14 11:48:40 +02:00
|
|
|
try:
|
2013-10-31 16:46:32 +01:00
|
|
|
int(thisbuild['vercode'])
|
2013-04-14 11:48:40 +02:00
|
|
|
except:
|
|
|
|
raise MetaDataException("Invalid version code for build in " + metafile.name)
|
2013-10-27 15:35:57 +01:00
|
|
|
if parts[2].startswith('!'):
|
|
|
|
# For backwards compatibility, handle old-style disabling,
|
|
|
|
# including attempting to extract the commit from the message
|
2013-10-29 13:36:06 +01:00
|
|
|
thisbuild['disable'] = parts[2][1:]
|
2013-10-27 15:35:57 +01:00
|
|
|
commit = 'unknown - see disabled'
|
|
|
|
index = parts[2].rfind('at ')
|
|
|
|
if index != -1:
|
|
|
|
commit = parts[2][index+3:]
|
|
|
|
if commit.endswith(')'):
|
|
|
|
commit = commit[:-1]
|
|
|
|
thisbuild['commit'] = commit
|
|
|
|
else:
|
|
|
|
thisbuild['commit'] = parts[2]
|
2011-03-01 01:11:07 +01:00
|
|
|
for p in parts[3:]:
|
|
|
|
pk, pv = p.split('=', 1)
|
2013-09-11 00:57:22 +02:00
|
|
|
thisbuild[pk.strip()] = pv
|
2013-11-09 12:21:43 +01:00
|
|
|
|
2011-03-01 01:11:07 +01:00
|
|
|
return thisbuild
|
2010-11-11 23:34:39 +01:00
|
|
|
|
2012-01-11 00:24:28 +01:00
|
|
|
def add_comments(key):
|
2013-11-05 19:42:29 +01:00
|
|
|
if not curcomments:
|
2013-10-27 15:06:46 +01:00
|
|
|
return
|
2012-01-11 00:24:28 +01:00
|
|
|
for comment in curcomments:
|
|
|
|
thisinfo['comments'].append((key, comment))
|
|
|
|
del curcomments[:]
|
|
|
|
|
2013-11-09 12:21:43 +01:00
|
|
|
|
2011-03-01 01:11:07 +01:00
|
|
|
thisinfo = {}
|
2012-02-05 12:02:01 +01:00
|
|
|
if metafile:
|
|
|
|
if not isinstance(metafile, file):
|
|
|
|
metafile = open(metafile, "r")
|
|
|
|
thisinfo['id'] = metafile.name[9:-4]
|
|
|
|
else:
|
|
|
|
thisinfo['id'] = None
|
2012-01-10 19:57:07 +01:00
|
|
|
|
|
|
|
# Defaults for fields that come from metadata...
|
2012-01-11 00:24:28 +01:00
|
|
|
thisinfo['Name'] = None
|
2013-06-11 00:30:30 +02:00
|
|
|
thisinfo['Auto Name'] = ''
|
2013-11-02 01:14:01 +01:00
|
|
|
thisinfo['Categories'] = 'None'
|
2012-01-11 00:24:28 +01:00
|
|
|
thisinfo['Description'] = []
|
2012-01-10 19:57:07 +01:00
|
|
|
thisinfo['Summary'] = ''
|
|
|
|
thisinfo['License'] = 'Unknown'
|
|
|
|
thisinfo['Web Site'] = ''
|
|
|
|
thisinfo['Source Code'] = ''
|
|
|
|
thisinfo['Issue Tracker'] = ''
|
|
|
|
thisinfo['Donate'] = None
|
2012-06-28 13:51:48 +02:00
|
|
|
thisinfo['FlattrID'] = None
|
2012-08-22 18:26:19 +02:00
|
|
|
thisinfo['Bitcoin'] = None
|
2013-10-13 00:03:11 +02:00
|
|
|
thisinfo['Litecoin'] = None
|
2012-01-10 19:57:07 +01:00
|
|
|
thisinfo['Disabled'] = None
|
|
|
|
thisinfo['AntiFeatures'] = None
|
2013-10-14 17:16:34 +02:00
|
|
|
thisinfo['Archive Policy'] = None
|
2013-08-08 16:47:26 +02:00
|
|
|
thisinfo['Update Check Mode'] = 'None'
|
2013-11-01 13:25:50 +01:00
|
|
|
thisinfo['Vercode Operation'] = None
|
2012-09-20 15:16:55 +02:00
|
|
|
thisinfo['Auto Update Mode'] = 'None'
|
2012-01-26 18:53:59 +01:00
|
|
|
thisinfo['Current Version'] = ''
|
|
|
|
thisinfo['Current Version Code'] = '0'
|
2012-01-10 19:57:07 +01:00
|
|
|
thisinfo['Repo Type'] = ''
|
|
|
|
thisinfo['Repo'] = ''
|
|
|
|
thisinfo['Requires Root'] = False
|
2013-05-16 21:45:25 +02:00
|
|
|
thisinfo['No Source Since'] = ''
|
2012-01-10 19:57:07 +01:00
|
|
|
|
|
|
|
# General defaults...
|
2011-03-01 01:11:07 +01:00
|
|
|
thisinfo['builds'] = []
|
2012-01-10 19:57:07 +01:00
|
|
|
thisinfo['comments'] = []
|
|
|
|
|
2012-02-05 12:02:01 +01:00
|
|
|
if metafile is None:
|
|
|
|
return thisinfo
|
|
|
|
|
2011-03-01 01:11:07 +01:00
|
|
|
mode = 0
|
2012-01-10 19:57:07 +01:00
|
|
|
buildlines = []
|
|
|
|
curcomments = []
|
2013-10-27 15:06:46 +01:00
|
|
|
curbuild = None
|
2012-01-10 19:57:07 +01:00
|
|
|
|
2011-03-01 01:11:07 +01:00
|
|
|
for line in metafile:
|
|
|
|
line = line.rstrip('\r\n')
|
2013-10-27 15:06:46 +01:00
|
|
|
if mode == 3:
|
2013-10-27 22:16:41 +01:00
|
|
|
if not any(line.startswith(s) for s in (' ', '\t')):
|
2013-10-28 09:53:18 +01:00
|
|
|
if 'commit' not in curbuild and 'disable' not in curbuild:
|
2013-10-27 15:06:46 +01:00
|
|
|
raise MetaDataException("No commit specified for {0} in {1}".format(
|
|
|
|
curbuild['version'], metafile.name))
|
|
|
|
thisinfo['builds'].append(curbuild)
|
|
|
|
add_comments('build:' + curbuild['version'])
|
|
|
|
mode = 0
|
|
|
|
else:
|
|
|
|
if line.endswith('\\'):
|
|
|
|
buildlines.append(line[:-1].lstrip())
|
|
|
|
else:
|
|
|
|
buildlines.append(line.lstrip())
|
|
|
|
bl = ''.join(buildlines)
|
|
|
|
bv = bl.split('=', 1)
|
|
|
|
if len(bv) != 2:
|
|
|
|
raise MetaDataException("Invalid build flag at {0} in {1}".
|
|
|
|
format(buildlines[0], metafile.name))
|
|
|
|
name, val = bv
|
|
|
|
if name in curbuild:
|
|
|
|
raise MetaDataException("Duplicate definition on {0} in version {1} of {2}".
|
|
|
|
format(name, curbuild['version'], metafile.name))
|
|
|
|
curbuild[name] = val.lstrip()
|
|
|
|
buildlines = []
|
|
|
|
|
2012-01-11 00:24:28 +01:00
|
|
|
if mode == 0:
|
2013-11-05 19:42:29 +01:00
|
|
|
if not line:
|
2011-03-01 01:11:07 +01:00
|
|
|
continue
|
2012-01-11 00:24:28 +01:00
|
|
|
if line.startswith("#"):
|
|
|
|
curcomments.append(line)
|
|
|
|
continue
|
2011-03-01 01:11:07 +01:00
|
|
|
index = line.find(':')
|
|
|
|
if index == -1:
|
2012-01-02 12:51:14 +01:00
|
|
|
raise MetaDataException("Invalid metadata in " + metafile.name + " at: " + line)
|
2011-03-01 01:11:07 +01:00
|
|
|
field = line[:index]
|
|
|
|
value = line[index+1:]
|
2012-01-10 19:57:07 +01:00
|
|
|
|
2012-01-26 18:53:59 +01:00
|
|
|
# Translate obsolete fields...
|
|
|
|
if field == 'Market Version':
|
|
|
|
field = 'Current Version'
|
|
|
|
if field == 'Market Version Code':
|
|
|
|
field = 'Current Version Code'
|
2012-01-10 19:57:07 +01:00
|
|
|
|
|
|
|
fieldtype = metafieldtype(field)
|
2013-10-27 15:06:46 +01:00
|
|
|
if fieldtype not in ['build', 'buildv2']:
|
2012-01-11 00:24:28 +01:00
|
|
|
add_comments(field)
|
2012-01-10 19:57:07 +01:00
|
|
|
if fieldtype == 'multiline':
|
2011-03-01 01:11:07 +01:00
|
|
|
mode = 1
|
2012-01-11 00:24:28 +01:00
|
|
|
thisinfo[field] = []
|
2013-11-05 19:42:29 +01:00
|
|
|
if value:
|
2012-01-10 19:57:07 +01:00
|
|
|
raise MetaDataException("Unexpected text on same line as " + field + " in " + metafile.name)
|
|
|
|
elif fieldtype == 'string':
|
2013-11-02 12:04:02 +01:00
|
|
|
if field == 'Category' and thisinfo['Categories'] == 'None':
|
|
|
|
thisinfo['Categories'] = value.replace(';',',')
|
2012-01-10 19:57:07 +01:00
|
|
|
thisinfo[field] = value
|
|
|
|
elif fieldtype == 'flag':
|
|
|
|
if value == 'Yes':
|
|
|
|
thisinfo[field] = True
|
|
|
|
elif value == 'No':
|
|
|
|
thisinfo[field] = False
|
|
|
|
else:
|
|
|
|
raise MetaDataException("Expected Yes or No for " + field + " in " + metafile.name)
|
|
|
|
elif fieldtype == 'build':
|
2011-03-01 01:11:07 +01:00
|
|
|
if value.endswith("\\"):
|
|
|
|
mode = 2
|
2012-01-10 19:57:07 +01:00
|
|
|
buildlines = [value[:-1]]
|
2011-03-01 01:11:07 +01:00
|
|
|
else:
|
2012-01-10 19:57:07 +01:00
|
|
|
thisinfo['builds'].append(parse_buildline([value]))
|
2012-01-11 00:24:28 +01:00
|
|
|
add_comments('build:' + thisinfo['builds'][-1]['version'])
|
2013-10-27 15:06:46 +01:00
|
|
|
elif fieldtype == 'buildv2':
|
|
|
|
curbuild = {}
|
|
|
|
vv = value.split(',')
|
|
|
|
if len(vv) != 2:
|
|
|
|
raise MetaDataException('Build should have comma-separated version and vercode, not "{0}", in {1}'.
|
|
|
|
format(value, metafile.name))
|
|
|
|
curbuild['version'] = vv[0]
|
|
|
|
curbuild['vercode'] = vv[1]
|
|
|
|
try:
|
2013-10-31 16:46:32 +01:00
|
|
|
int(curbuild['vercode'])
|
2013-10-27 15:06:46 +01:00
|
|
|
except:
|
|
|
|
raise MetaDataException("Invalid version code for build in " + metafile.name)
|
|
|
|
buildlines = []
|
|
|
|
mode = 3
|
2012-01-10 19:57:07 +01:00
|
|
|
elif fieldtype == 'obsolete':
|
|
|
|
pass # Just throw it away!
|
2011-03-01 01:11:07 +01:00
|
|
|
else:
|
2012-01-10 19:57:07 +01:00
|
|
|
raise MetaDataException("Unrecognised field type for " + field + " in " + metafile.name)
|
|
|
|
elif mode == 1: # Multiline field
|
2011-03-01 01:11:07 +01:00
|
|
|
if line == '.':
|
|
|
|
mode = 0
|
|
|
|
else:
|
2012-01-11 00:24:28 +01:00
|
|
|
thisinfo[field].append(line)
|
2012-01-10 19:57:07 +01:00
|
|
|
elif mode == 2: # Line continuation mode in Build Version
|
2011-03-01 01:11:07 +01:00
|
|
|
if line.endswith("\\"):
|
2012-01-10 19:57:07 +01:00
|
|
|
buildlines.append(line[:-1])
|
2011-03-01 01:11:07 +01:00
|
|
|
else:
|
2012-01-10 19:57:07 +01:00
|
|
|
buildlines.append(line)
|
2011-03-01 01:11:07 +01:00
|
|
|
thisinfo['builds'].append(
|
2012-01-10 19:57:07 +01:00
|
|
|
parse_buildline(buildlines))
|
2012-01-11 00:24:28 +01:00
|
|
|
add_comments('build:' + thisinfo['builds'][-1]['version'])
|
2011-03-01 01:11:07 +01:00
|
|
|
mode = 0
|
2012-01-11 00:24:28 +01:00
|
|
|
add_comments(None)
|
2012-01-10 19:57:07 +01:00
|
|
|
|
2013-11-09 12:48:39 +01:00
|
|
|
for key in bool_keys:
|
2013-11-09 12:21:43 +01:00
|
|
|
for build in thisinfo['builds']:
|
|
|
|
if key not in build:
|
|
|
|
build[key] = False
|
|
|
|
continue
|
|
|
|
if build[key] == 'yes':
|
|
|
|
build[key] = True
|
|
|
|
elif build[key] == 'no':
|
|
|
|
build[key] = False
|
|
|
|
else:
|
|
|
|
raise MetaDataException("Invalid value %s assigned to boolean build flag %s"
|
|
|
|
% (build[key], key))
|
|
|
|
|
2012-01-11 00:24:28 +01:00
|
|
|
# Mode at end of file should always be 0...
|
2011-03-01 01:11:07 +01:00
|
|
|
if mode == 1:
|
2012-01-10 19:57:07 +01:00
|
|
|
raise MetaDataException(field + " not terminated in " + metafile.name)
|
|
|
|
elif mode == 2:
|
|
|
|
raise MetaDataException("Unterminated continuation in " + metafile.name)
|
2013-10-27 15:06:46 +01:00
|
|
|
elif mode == 3:
|
|
|
|
raise MetaDataException("Unterminated build in " + metafile.name)
|
2012-01-10 19:57:07 +01:00
|
|
|
|
2013-11-05 19:42:29 +01:00
|
|
|
if not thisinfo['Description']:
|
2012-01-11 00:24:28 +01:00
|
|
|
thisinfo['Description'].append('No description available')
|
2012-01-10 19:57:07 +01:00
|
|
|
|
2013-10-14 17:16:34 +02:00
|
|
|
# Validate archive policy...
|
|
|
|
if thisinfo['Archive Policy']:
|
|
|
|
if not thisinfo['Archive Policy'].endswith(' versions'):
|
|
|
|
raise MetaDataException("Invalid archive policy")
|
|
|
|
try:
|
|
|
|
versions = int(thisinfo['Archive Policy'][:-9])
|
|
|
|
if versions < 1 or versions > 20:
|
2013-10-15 16:52:53 +02:00
|
|
|
raise MetaDataException("Silly number of versions for archive policy")
|
2013-10-14 17:16:34 +02:00
|
|
|
except:
|
2013-10-15 16:52:53 +02:00
|
|
|
raise MetaDataException("Incomprehensible number of versions for archive policy")
|
2013-10-14 17:16:34 +02:00
|
|
|
|
2012-01-10 19:57:07 +01:00
|
|
|
# Ensure all AntiFeatures are recognised...
|
|
|
|
if thisinfo['AntiFeatures']:
|
|
|
|
parts = thisinfo['AntiFeatures'].split(",")
|
|
|
|
for part in parts:
|
|
|
|
if (part != "Ads" and
|
|
|
|
part != "Tracking" and
|
|
|
|
part != "NonFreeNet" and
|
|
|
|
part != "NonFreeDep" and
|
|
|
|
part != "NonFreeAdd"):
|
|
|
|
raise MetaDataException("Unrecognised antifeature '" + part + "' in " \
|
|
|
|
+ metafile.name)
|
|
|
|
|
2011-03-01 01:11:07 +01:00
|
|
|
return thisinfo
|
2010-11-11 23:34:39 +01:00
|
|
|
|
2013-10-19 12:18:48 +02:00
|
|
|
def getvercode(build):
|
2013-10-25 10:37:53 +02:00
|
|
|
return "%s" % (build['vercode'])
|
2013-10-19 12:18:48 +02:00
|
|
|
|
|
|
|
def getapkname(app, build):
|
|
|
|
return "%s_%s.apk" % (app['id'], getvercode(build))
|
|
|
|
|
|
|
|
def getsrcname(app, build):
|
|
|
|
return "%s_%s_src.tar.gz" % (app['id'], getvercode(build))
|
|
|
|
|
2012-01-11 00:24:28 +01:00
|
|
|
# Write a metadata file.
|
|
|
|
#
|
|
|
|
# 'dest' - The path to the output file
|
|
|
|
# 'app' - The app data
|
2013-11-01 12:10:57 +01:00
|
|
|
def write_metadata(dest, app):
|
2012-01-11 00:24:28 +01:00
|
|
|
|
|
|
|
def writecomments(key):
|
2013-10-27 15:06:46 +01:00
|
|
|
written = 0
|
2012-01-11 00:24:28 +01:00
|
|
|
for pf, comment in app['comments']:
|
|
|
|
if pf == key:
|
|
|
|
mf.write(comment + '\n')
|
2013-10-27 15:06:46 +01:00
|
|
|
written += 1
|
2013-11-01 12:10:57 +01:00
|
|
|
if options.verbose and written > 0:
|
2013-10-27 15:06:46 +01:00
|
|
|
print "...writing comments for " + (key if key else 'EOF')
|
2012-01-11 00:24:28 +01:00
|
|
|
|
|
|
|
def writefield(field, value=None):
|
|
|
|
writecomments(field)
|
|
|
|
if value is None:
|
|
|
|
value = app[field]
|
|
|
|
mf.write(field + ':' + value + '\n')
|
|
|
|
|
|
|
|
mf = open(dest, 'w')
|
|
|
|
if app['Disabled']:
|
|
|
|
writefield('Disabled')
|
|
|
|
if app['AntiFeatures']:
|
|
|
|
writefield('AntiFeatures')
|
2013-11-02 01:14:01 +01:00
|
|
|
writefield('Categories')
|
2012-01-11 00:24:28 +01:00
|
|
|
writefield('License')
|
|
|
|
writefield('Web Site')
|
|
|
|
writefield('Source Code')
|
|
|
|
writefield('Issue Tracker')
|
|
|
|
if app['Donate']:
|
|
|
|
writefield('Donate')
|
2012-06-28 13:51:48 +02:00
|
|
|
if app['FlattrID']:
|
|
|
|
writefield('FlattrID')
|
2012-08-22 18:26:19 +02:00
|
|
|
if app['Bitcoin']:
|
|
|
|
writefield('Bitcoin')
|
2013-10-13 00:03:11 +02:00
|
|
|
if app['Litecoin']:
|
|
|
|
writefield('Litecoin')
|
2012-01-11 00:24:28 +01:00
|
|
|
mf.write('\n')
|
|
|
|
if app['Name']:
|
|
|
|
writefield('Name')
|
2013-11-05 19:42:29 +01:00
|
|
|
if app['Auto Name']:
|
2013-10-27 15:06:46 +01:00
|
|
|
writefield('Auto Name')
|
2012-01-11 00:24:28 +01:00
|
|
|
writefield('Summary')
|
|
|
|
writefield('Description', '')
|
|
|
|
for line in app['Description']:
|
|
|
|
mf.write(line + '\n')
|
|
|
|
mf.write('.\n')
|
|
|
|
mf.write('\n')
|
|
|
|
if app['Requires Root']:
|
|
|
|
writefield('Requires Root', 'Yes')
|
|
|
|
mf.write('\n')
|
2013-11-05 19:42:29 +01:00
|
|
|
if app['Repo Type']:
|
2012-01-11 00:24:28 +01:00
|
|
|
writefield('Repo Type')
|
|
|
|
writefield('Repo')
|
|
|
|
mf.write('\n')
|
|
|
|
for build in app['builds']:
|
|
|
|
writecomments('build:' + build['version'])
|
2013-10-27 15:06:46 +01:00
|
|
|
mf.write('Build:')
|
|
|
|
mf.write("%s,%s\n" % (
|
|
|
|
build['version'],
|
|
|
|
getvercode(build)))
|
|
|
|
|
|
|
|
# This defines the preferred order for the build items - as in the
|
|
|
|
# manual, they're roughly in order of application.
|
2013-10-27 15:40:02 +01:00
|
|
|
keyorder = ['disable', 'commit', 'subdir', 'submodules', 'init',
|
2013-11-16 22:51:03 +01:00
|
|
|
'gradle', 'maven', 'oldsdkloc', 'target', 'compilesdk',
|
|
|
|
'update', 'encoding', 'forceversion', 'forcevercode', 'rm',
|
|
|
|
'fixtrans', 'fixapos', 'extlibs', 'srclibs', 'patch',
|
|
|
|
'prebuild', 'scanignore', 'scandelete', 'build', 'buildjni',
|
|
|
|
'preassemble', 'bindir', 'antcommand', 'novcheck']
|
2013-10-27 15:06:46 +01:00
|
|
|
|
|
|
|
def write_builditem(key, value):
|
|
|
|
if key not in ['version', 'vercode', 'origlines']:
|
2013-11-09 12:48:39 +01:00
|
|
|
if key in bool_keys:
|
|
|
|
if not value:
|
|
|
|
return
|
|
|
|
value = 'yes'
|
2013-11-01 12:10:57 +01:00
|
|
|
if options.verbose:
|
2013-10-27 15:06:46 +01:00
|
|
|
print "...writing {0} : {1}".format(key, value)
|
2013-11-09 12:48:39 +01:00
|
|
|
outline = ' %s=' % key
|
|
|
|
outline += '&& \\\n '.join([s.lstrip() for s in value.split('&& ')])
|
2013-10-27 15:06:46 +01:00
|
|
|
outline += '\n'
|
|
|
|
mf.write(outline)
|
2013-11-09 12:48:39 +01:00
|
|
|
|
2013-10-27 15:06:46 +01:00
|
|
|
for key in keyorder:
|
|
|
|
if key in build:
|
|
|
|
write_builditem(key, build[key])
|
|
|
|
for key, value in build.iteritems():
|
|
|
|
if not key in keyorder:
|
|
|
|
write_builditem(key, value)
|
2012-01-11 00:24:28 +01:00
|
|
|
mf.write('\n')
|
2013-10-27 15:06:46 +01:00
|
|
|
|
2013-11-05 17:21:35 +01:00
|
|
|
if 'Maintainer Notes' in app:
|
|
|
|
writefield('Maintainer Notes', '')
|
|
|
|
for line in app['Maintainer Notes']:
|
|
|
|
mf.write(line + '\n')
|
|
|
|
mf.write('.\n')
|
|
|
|
mf.write('\n')
|
|
|
|
|
|
|
|
|
2013-10-14 17:16:34 +02:00
|
|
|
if app['Archive Policy']:
|
|
|
|
writefield('Archive Policy')
|
2012-09-20 15:16:55 +02:00
|
|
|
writefield('Auto Update Mode')
|
2012-01-13 11:29:19 +01:00
|
|
|
writefield('Update Check Mode')
|
2013-11-01 13:25:50 +01:00
|
|
|
if app['Vercode Operation']:
|
|
|
|
writefield('Vercode Operation')
|
2013-10-01 11:19:17 +02:00
|
|
|
if 'Update Check Data' in app:
|
|
|
|
writefield('Update Check Data')
|
2013-11-05 19:42:29 +01:00
|
|
|
if app['Current Version']:
|
2012-01-26 18:53:59 +01:00
|
|
|
writefield('Current Version')
|
|
|
|
writefield('Current Version Code')
|
2012-01-13 11:29:19 +01:00
|
|
|
mf.write('\n')
|
2013-11-05 19:42:29 +01:00
|
|
|
if app['No Source Since']:
|
2013-05-16 21:45:25 +02:00
|
|
|
writefield('No Source Since')
|
|
|
|
mf.write('\n')
|
2012-01-11 00:24:28 +01:00
|
|
|
writecomments(None)
|
|
|
|
mf.close()
|
|
|
|
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2012-01-11 00:24:28 +01:00
|
|
|
# Read all metadata. Returns a list of 'app' objects (which are dictionaries as
|
|
|
|
# returned by the parse_metadata function.
|
2013-11-01 12:10:57 +01:00
|
|
|
def read_metadata(xref=True, package=None):
|
2011-03-01 01:11:07 +01:00
|
|
|
apps = []
|
2013-11-04 18:37:57 +01:00
|
|
|
for basedir in ('metadata', 'tmp'):
|
|
|
|
if not os.path.exists(basedir):
|
|
|
|
os.makedirs(basedir)
|
2011-06-16 22:57:01 +02:00
|
|
|
for metafile in sorted(glob.glob(os.path.join('metadata', '*.txt'))):
|
2013-10-27 15:06:46 +01:00
|
|
|
if package is None or metafile == os.path.join('metadata', package + '.txt'):
|
|
|
|
try:
|
2013-11-01 12:10:57 +01:00
|
|
|
appinfo = parse_metadata(metafile)
|
2013-10-27 15:06:46 +01:00
|
|
|
except Exception, e:
|
|
|
|
raise MetaDataException("Problem reading metadata file %s: - %s" % (metafile, str(e)))
|
|
|
|
apps.append(appinfo)
|
2012-09-27 21:09:45 +02:00
|
|
|
|
2012-09-28 19:22:26 +02:00
|
|
|
if xref:
|
|
|
|
# Parse all descriptions at load time, just to ensure cross-referencing
|
|
|
|
# errors are caught early rather than when they hit the build server.
|
|
|
|
def linkres(link):
|
|
|
|
for app in apps:
|
|
|
|
if app['id'] == link:
|
|
|
|
return ("fdroid.app:" + link, "Dummy name - don't know yet")
|
|
|
|
raise MetaDataException("Cannot resolve app id " + link)
|
2012-09-27 21:09:45 +02:00
|
|
|
for app in apps:
|
2012-09-28 19:22:26 +02:00
|
|
|
try:
|
|
|
|
description_html(app['Description'], linkres)
|
|
|
|
except Exception, e:
|
|
|
|
raise MetaDataException("Problem with description of " + app['id'] +
|
|
|
|
" - " + str(e))
|
2012-09-27 21:09:45 +02:00
|
|
|
|
2010-11-11 23:34:39 +01:00
|
|
|
return apps
|
2012-01-02 12:51:14 +01:00
|
|
|
|
2012-09-17 22:49:56 +02:00
|
|
|
# Formatter for descriptions. Create an instance, and call parseline() with
|
|
|
|
# each line of the description source from the metadata. At the end, call
|
|
|
|
# end() and then text_plain, text_wiki and text_html will contain the result.
|
|
|
|
class DescriptionFormatter:
|
|
|
|
stNONE = 0
|
|
|
|
stPARA = 1
|
|
|
|
stUL = 2
|
|
|
|
stOL = 3
|
|
|
|
bold = False
|
|
|
|
ital = False
|
|
|
|
state = stNONE
|
|
|
|
text_plain = ''
|
|
|
|
text_wiki = ''
|
|
|
|
text_html = ''
|
|
|
|
linkResolver = None
|
|
|
|
def __init__(self, linkres):
|
|
|
|
self.linkResolver = linkres
|
|
|
|
def endcur(self, notstates=None):
|
|
|
|
if notstates and self.state in notstates:
|
|
|
|
return
|
|
|
|
if self.state == self.stPARA:
|
|
|
|
self.endpara()
|
|
|
|
elif self.state == self.stUL:
|
|
|
|
self.endul()
|
|
|
|
elif self.state == self.stOL:
|
|
|
|
self.endol()
|
|
|
|
def endpara(self):
|
|
|
|
self.text_plain += '\n'
|
|
|
|
self.text_html += '</p>'
|
|
|
|
self.state = self.stNONE
|
|
|
|
def endul(self):
|
|
|
|
self.text_html += '</ul>'
|
|
|
|
self.state = self.stNONE
|
|
|
|
def endol(self):
|
|
|
|
self.text_html += '</ol>'
|
|
|
|
self.state = self.stNONE
|
|
|
|
|
|
|
|
def formatted(self, txt, html):
|
|
|
|
formatted = ''
|
|
|
|
if html:
|
|
|
|
txt = cgi.escape(txt)
|
|
|
|
while True:
|
|
|
|
index = txt.find("''")
|
|
|
|
if index == -1:
|
|
|
|
return formatted + txt
|
|
|
|
formatted += txt[:index]
|
|
|
|
txt = txt[index:]
|
|
|
|
if txt.startswith("'''"):
|
|
|
|
if html:
|
|
|
|
if self.bold:
|
|
|
|
formatted += '</b>'
|
|
|
|
else:
|
|
|
|
formatted += '<b>'
|
|
|
|
self.bold = not self.bold
|
|
|
|
txt = txt[3:]
|
|
|
|
else:
|
|
|
|
if html:
|
|
|
|
if self.ital:
|
|
|
|
formatted += '</i>'
|
|
|
|
else:
|
|
|
|
formatted += '<i>'
|
|
|
|
self.ital = not self.ital
|
|
|
|
txt = txt[2:]
|
|
|
|
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2012-09-17 22:49:56 +02:00
|
|
|
def linkify(self, txt):
|
|
|
|
linkified_plain = ''
|
|
|
|
linkified_html = ''
|
|
|
|
while True:
|
|
|
|
index = txt.find("[")
|
|
|
|
if index == -1:
|
|
|
|
return (linkified_plain + self.formatted(txt, False), linkified_html + self.formatted(txt, True))
|
|
|
|
linkified_plain += self.formatted(txt[:index], False)
|
|
|
|
linkified_html += self.formatted(txt[:index], True)
|
|
|
|
txt = txt[index:]
|
|
|
|
if txt.startswith("[["):
|
|
|
|
index = txt.find("]]")
|
|
|
|
if index == -1:
|
|
|
|
raise MetaDataException("Unterminated ]]")
|
|
|
|
url = txt[2:index]
|
|
|
|
if self.linkResolver:
|
|
|
|
url, urltext = self.linkResolver(url)
|
|
|
|
else:
|
|
|
|
urltext = url
|
|
|
|
linkified_html += '<a href="' + url + '">' + cgi.escape(urltext) + '</a>'
|
|
|
|
linkified_plain += urltext
|
|
|
|
txt = txt[index+2:]
|
|
|
|
else:
|
|
|
|
index = txt.find("]")
|
|
|
|
if index == -1:
|
|
|
|
raise MetaDataException("Unterminated ]")
|
2012-09-19 15:01:59 +02:00
|
|
|
url = txt[1:index]
|
2012-09-17 22:49:56 +02:00
|
|
|
index2 = url.find(' ')
|
|
|
|
if index2 == -1:
|
|
|
|
urltxt = url
|
|
|
|
else:
|
2012-09-17 23:12:26 +02:00
|
|
|
urltxt = url[index2 + 1:]
|
2012-09-19 15:01:59 +02:00
|
|
|
url = url[:index2]
|
2012-09-17 22:49:56 +02:00
|
|
|
linkified_html += '<a href="' + url + '">' + cgi.escape(urltxt) + '</a>'
|
|
|
|
linkified_plain += urltxt
|
|
|
|
if urltxt != url:
|
|
|
|
linkified_plain += ' (' + url + ')'
|
|
|
|
txt = txt[index+1:]
|
|
|
|
|
|
|
|
def addtext(self, txt):
|
|
|
|
p, h = self.linkify(txt)
|
|
|
|
self.text_plain += p
|
|
|
|
self.text_html += h
|
|
|
|
|
|
|
|
def parseline(self, line):
|
|
|
|
self.text_wiki += line + '\n'
|
2013-11-05 19:42:29 +01:00
|
|
|
if not line:
|
2012-09-17 22:49:56 +02:00
|
|
|
self.endcur()
|
|
|
|
elif line.startswith('*'):
|
|
|
|
self.endcur([self.stUL])
|
|
|
|
if self.state != self.stUL:
|
|
|
|
self.text_html += '<ul>'
|
|
|
|
self.state = self.stUL
|
|
|
|
self.text_html += '<li>'
|
|
|
|
self.text_plain += '*'
|
|
|
|
self.addtext(line[1:])
|
|
|
|
self.text_html += '</li>'
|
|
|
|
elif line.startswith('#'):
|
|
|
|
self.endcur([self.stOL])
|
|
|
|
if self.state != self.stOL:
|
|
|
|
self.text_html += '<ol>'
|
|
|
|
self.state = self.stOL
|
|
|
|
self.text_html += '<li>'
|
|
|
|
self.text_plain += '*' #TODO: lazy - put the numbers in!
|
|
|
|
self.addtext(line[1:])
|
|
|
|
self.text_html += '</li>'
|
|
|
|
else:
|
|
|
|
self.endcur([self.stPARA])
|
|
|
|
if self.state == self.stNONE:
|
|
|
|
self.text_html += '<p>'
|
|
|
|
self.state = self.stPARA
|
|
|
|
elif self.state == self.stPARA:
|
|
|
|
self.text_html += ' '
|
|
|
|
self.text_plain += ' '
|
|
|
|
self.addtext(line)
|
|
|
|
|
|
|
|
def end(self):
|
|
|
|
self.endcur()
|
2012-01-11 00:24:28 +01:00
|
|
|
|
|
|
|
# Parse multiple lines of description as written in a metadata file, returning
|
2012-09-17 22:49:56 +02:00
|
|
|
# a single string in plain text format.
|
|
|
|
def description_plain(lines, linkres):
|
|
|
|
ps = DescriptionFormatter(linkres)
|
2012-01-11 00:24:28 +01:00
|
|
|
for line in lines:
|
2012-09-17 22:49:56 +02:00
|
|
|
ps.parseline(line)
|
|
|
|
ps.end()
|
|
|
|
return ps.text_plain
|
|
|
|
|
|
|
|
# Parse multiple lines of description as written in a metadata file, returning
|
2013-11-05 17:39:45 +01:00
|
|
|
# a single string in wiki format. Used for the Maintainer Notes field as well,
|
|
|
|
# because it's the same format.
|
2012-09-17 22:49:56 +02:00
|
|
|
def description_wiki(lines):
|
|
|
|
ps = DescriptionFormatter(None)
|
|
|
|
for line in lines:
|
|
|
|
ps.parseline(line)
|
|
|
|
ps.end()
|
|
|
|
return ps.text_wiki
|
|
|
|
|
|
|
|
# Parse multiple lines of description as written in a metadata file, returning
|
|
|
|
# a single string in HTML format.
|
|
|
|
def description_html(lines,linkres):
|
|
|
|
ps = DescriptionFormatter(linkres)
|
|
|
|
for line in lines:
|
|
|
|
ps.parseline(line)
|
|
|
|
ps.end()
|
|
|
|
return ps.text_html
|
|
|
|
|
2013-08-13 15:58:43 +02:00
|
|
|
def retrieve_string(xml_dir, string):
|
|
|
|
if not string.startswith('@string/'):
|
|
|
|
return string.replace("\\'","'")
|
|
|
|
string_search = re.compile(r'.*"'+string[8:]+'".*>([^<]+?)<.*').search
|
|
|
|
for xmlfile in glob.glob(os.path.join(xml_dir, '*.xml')):
|
2013-07-05 22:34:53 +02:00
|
|
|
for line in file(xmlfile):
|
|
|
|
matches = string_search(line)
|
|
|
|
if matches:
|
2013-08-13 15:58:43 +02:00
|
|
|
return retrieve_string(xml_dir, matches.group(1))
|
2013-07-05 22:34:53 +02:00
|
|
|
return ''
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2013-08-13 15:58:43 +02:00
|
|
|
# Return list of existing files that will be used to find the highest vercode
|
2013-08-13 12:02:48 +02:00
|
|
|
def manifest_paths(app_dir, flavour):
|
2013-08-03 16:44:14 +02:00
|
|
|
|
2013-08-13 12:02:48 +02:00
|
|
|
possible_manifests = [ os.path.join(app_dir, 'AndroidManifest.xml'),
|
|
|
|
os.path.join(app_dir, 'src', 'main', 'AndroidManifest.xml'),
|
|
|
|
os.path.join(app_dir, 'build.gradle') ]
|
2013-08-09 17:15:27 +02:00
|
|
|
|
2013-08-13 12:02:48 +02:00
|
|
|
if flavour is not None:
|
|
|
|
possible_manifests.append(
|
|
|
|
os.path.join(app_dir, 'src', flavour, 'AndroidManifest.xml'))
|
2013-08-13 15:58:43 +02:00
|
|
|
|
2013-08-13 12:02:48 +02:00
|
|
|
return [path for path in possible_manifests if os.path.isfile(path)]
|
2013-08-03 16:44:14 +02:00
|
|
|
|
2013-06-11 00:30:30 +02:00
|
|
|
# Retrieve the package name
|
2013-08-13 15:58:43 +02:00
|
|
|
def fetch_real_name(app_dir, flavour):
|
2013-06-14 10:06:22 +02:00
|
|
|
app_search = re.compile(r'.*<application.*').search
|
2013-06-11 00:30:30 +02:00
|
|
|
name_search = re.compile(r'.*android:label="([^"]+)".*').search
|
2013-06-14 10:06:22 +02:00
|
|
|
app_found = False
|
2013-08-13 15:58:43 +02:00
|
|
|
for f in manifest_paths(app_dir, flavour):
|
|
|
|
if not f.endswith(".xml"):
|
|
|
|
continue
|
|
|
|
xml_dir = os.path.join(f[:-19], 'res', 'values')
|
|
|
|
for line in file(f):
|
|
|
|
if not app_found:
|
|
|
|
if app_search(line):
|
|
|
|
app_found = True
|
|
|
|
if app_found:
|
|
|
|
matches = name_search(line)
|
|
|
|
if matches:
|
|
|
|
return retrieve_string(xml_dir, matches.group(1))
|
|
|
|
return ''
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2013-08-15 12:15:44 +02:00
|
|
|
# Retrieve the version name
|
|
|
|
def version_name(original, app_dir, flavour):
|
|
|
|
for f in manifest_paths(app_dir, flavour):
|
|
|
|
if not f.endswith(".xml"):
|
|
|
|
continue
|
|
|
|
xml_dir = os.path.join(f[:-19], 'res', 'values')
|
|
|
|
string = retrieve_string(xml_dir, original)
|
2013-11-05 19:42:29 +01:00
|
|
|
if string:
|
2013-08-15 12:15:44 +02:00
|
|
|
return string
|
|
|
|
return original
|
|
|
|
|
2013-11-04 17:03:43 +01:00
|
|
|
def ant_subprojects(root_dir):
|
|
|
|
subprojects = []
|
|
|
|
proppath = os.path.join(root_dir, 'project.properties')
|
|
|
|
if not os.path.isfile(proppath):
|
|
|
|
return subprojects
|
|
|
|
with open(proppath) as f:
|
|
|
|
for line in f.readlines():
|
|
|
|
if not line.startswith('android.library.reference.'):
|
|
|
|
continue
|
|
|
|
path = line.split('=')[1].strip()
|
|
|
|
relpath = os.path.join(root_dir, path)
|
|
|
|
if not os.path.isdir(relpath):
|
|
|
|
continue
|
|
|
|
if options.verbose:
|
|
|
|
print "Found subproject %s..." % path
|
|
|
|
subprojects.append(path)
|
|
|
|
return subprojects
|
|
|
|
|
2012-03-10 13:50:34 +01:00
|
|
|
# Extract some information from the AndroidManifest.xml at the given path.
|
|
|
|
# Returns (version, vercode, package), any or all of which might be None.
|
2012-08-23 15:25:39 +02:00
|
|
|
# All values returned are strings.
|
2013-08-13 12:02:48 +02:00
|
|
|
def parse_androidmanifests(paths):
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2013-08-14 15:07:26 +02:00
|
|
|
if not paths:
|
|
|
|
return (None, None, None)
|
|
|
|
|
2013-05-15 17:08:17 +02:00
|
|
|
vcsearch = re.compile(r'.*android:versionCode="([0-9]+?)".*').search
|
2013-07-04 15:55:44 +02:00
|
|
|
vnsearch = re.compile(r'.*android:versionName="([^"]+?)".*').search
|
2012-03-10 13:50:34 +01:00
|
|
|
psearch = re.compile(r'.*package="([^"]+)".*').search
|
2013-08-13 12:02:48 +02:00
|
|
|
|
2013-11-01 19:12:22 +01:00
|
|
|
vcsearch_g = re.compile(r'.*versionCode[ =]*([0-9]+?)[^\d].*').search
|
2013-09-21 19:43:31 +02:00
|
|
|
vnsearch_g = re.compile(r'.*versionName[ =]*"([^"]+?)".*').search
|
|
|
|
psearch_g = re.compile(r'.*packageName[ =]*"([^"]+)".*').search
|
2013-08-13 12:02:48 +02:00
|
|
|
|
|
|
|
max_version = None
|
|
|
|
max_vercode = None
|
|
|
|
max_package = None
|
|
|
|
|
|
|
|
for path in paths:
|
|
|
|
|
2013-08-13 14:51:40 +02:00
|
|
|
gradle = path.endswith("gradle")
|
2013-08-13 12:02:48 +02:00
|
|
|
version = None
|
|
|
|
vercode = None
|
2013-08-13 15:25:47 +02:00
|
|
|
# Remember package name, may be defined separately from version+vercode
|
|
|
|
package = max_package
|
2013-08-13 12:02:48 +02:00
|
|
|
|
|
|
|
for line in file(path):
|
|
|
|
if not package:
|
|
|
|
if gradle:
|
|
|
|
matches = psearch_g(line)
|
|
|
|
else:
|
|
|
|
matches = psearch(line)
|
|
|
|
if matches:
|
|
|
|
package = matches.group(1)
|
|
|
|
if not version:
|
|
|
|
if gradle:
|
|
|
|
matches = vnsearch_g(line)
|
|
|
|
else:
|
|
|
|
matches = vnsearch(line)
|
|
|
|
if matches:
|
|
|
|
version = matches.group(1)
|
|
|
|
if not vercode:
|
|
|
|
if gradle:
|
|
|
|
matches = vcsearch_g(line)
|
|
|
|
else:
|
|
|
|
matches = vcsearch(line)
|
|
|
|
if matches:
|
|
|
|
vercode = matches.group(1)
|
|
|
|
|
2013-08-13 15:25:47 +02:00
|
|
|
# Better some package name than nothing
|
|
|
|
if max_package is None:
|
|
|
|
max_package = package
|
|
|
|
|
2013-08-13 12:02:48 +02:00
|
|
|
if max_vercode is None or (vercode is not None and vercode > max_vercode):
|
|
|
|
max_version = version
|
|
|
|
max_vercode = vercode
|
|
|
|
max_package = package
|
|
|
|
|
2013-08-15 16:01:33 +02:00
|
|
|
if max_version is None:
|
|
|
|
max_version = "Unknown"
|
|
|
|
|
2013-08-13 12:02:48 +02:00
|
|
|
return (max_version, max_vercode, max_package)
|
2012-03-10 13:50:34 +01:00
|
|
|
|
2012-01-02 12:51:14 +01:00
|
|
|
class BuildException(Exception):
|
2012-01-08 19:13:15 +01:00
|
|
|
def __init__(self, value, stdout = None, stderr = None):
|
2012-01-02 12:51:14 +01:00
|
|
|
self.value = value
|
2012-01-08 19:13:15 +01:00
|
|
|
self.stdout = stdout
|
|
|
|
self.stderr = stderr
|
2012-01-02 12:51:14 +01:00
|
|
|
|
2013-05-20 22:19:53 +02:00
|
|
|
def get_wikitext(self):
|
|
|
|
ret = repr(self.value) + "\n"
|
|
|
|
if self.stdout:
|
|
|
|
ret += "=stdout=\n"
|
|
|
|
ret += "<pre>\n"
|
|
|
|
ret += str(self.stdout)
|
|
|
|
ret += "</pre>\n"
|
|
|
|
if self.stderr:
|
|
|
|
ret += "=stderr=\n"
|
|
|
|
ret += "<pre>\n"
|
|
|
|
ret += str(self.stderr)
|
|
|
|
ret += "</pre>\n"
|
|
|
|
return ret
|
|
|
|
|
2012-01-02 12:51:14 +01:00
|
|
|
def __str__(self):
|
2012-01-08 19:13:15 +01:00
|
|
|
ret = repr(self.value)
|
|
|
|
if self.stdout:
|
2013-10-02 19:28:43 +02:00
|
|
|
ret += "\n==== stdout begin ====\n%s\n==== stdout end ====" % self.stdout.strip()
|
2012-01-08 19:13:15 +01:00
|
|
|
if self.stderr:
|
2013-10-02 19:28:43 +02:00
|
|
|
ret += "\n==== stderr begin ====\n%s\n==== stderr end ====" % self.stderr.strip()
|
2012-01-08 19:13:15 +01:00
|
|
|
return ret
|
2012-01-02 12:51:14 +01:00
|
|
|
|
|
|
|
class VCSException(Exception):
|
|
|
|
def __init__(self, value):
|
|
|
|
self.value = value
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
return repr(self.value)
|
|
|
|
|
|
|
|
class MetaDataException(Exception):
|
|
|
|
def __init__(self, value):
|
|
|
|
self.value = value
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
return repr(self.value)
|
|
|
|
|
2013-05-20 13:16:06 +02:00
|
|
|
def parse_srclib(metafile, **kw):
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2013-05-20 13:16:06 +02:00
|
|
|
thisinfo = {}
|
|
|
|
if metafile and not isinstance(metafile, file):
|
|
|
|
metafile = open(metafile, "r")
|
2013-01-30 20:49:21 +01:00
|
|
|
|
2013-05-20 13:16:06 +02:00
|
|
|
# Defaults for fields that come from metadata
|
|
|
|
thisinfo['Repo Type'] = ''
|
|
|
|
thisinfo['Repo'] = ''
|
|
|
|
thisinfo['Subdir'] = None
|
|
|
|
thisinfo['Prepare'] = None
|
2013-11-15 20:42:17 +01:00
|
|
|
thisinfo['Srclibs'] = None
|
2013-05-20 13:16:06 +02:00
|
|
|
thisinfo['Update Project'] = None
|
2013-02-11 17:01:12 +01:00
|
|
|
|
2013-05-20 13:16:06 +02:00
|
|
|
if metafile is None:
|
|
|
|
return thisinfo
|
2013-02-17 20:11:42 +01:00
|
|
|
|
2013-05-20 13:16:06 +02:00
|
|
|
for line in metafile:
|
|
|
|
line = line.rstrip('\r\n')
|
2013-11-05 19:42:29 +01:00
|
|
|
if not line or line.startswith("#"):
|
2013-05-20 13:16:06 +02:00
|
|
|
continue
|
2013-11-05 19:42:29 +01:00
|
|
|
|
2013-05-20 13:16:06 +02:00
|
|
|
index = line.find(':')
|
|
|
|
if index == -1:
|
|
|
|
raise MetaDataException("Invalid metadata in " + metafile.name + " at: " + line)
|
|
|
|
field = line[:index]
|
|
|
|
value = line[index+1:]
|
|
|
|
|
|
|
|
if field == "Subdir":
|
|
|
|
thisinfo[field] = value.split(',')
|
|
|
|
else:
|
|
|
|
thisinfo[field] = value
|
2013-03-26 23:35:41 +01:00
|
|
|
|
2013-05-20 13:16:06 +02:00
|
|
|
return thisinfo
|
2013-02-27 15:25:22 +01:00
|
|
|
|
2013-05-20 13:16:06 +02:00
|
|
|
# Get the specified source library.
|
|
|
|
# Returns the path to it. Normally this is the path to be used when referencing
|
|
|
|
# it, which may be a subdirectory of the actual project. If you want the base
|
|
|
|
# directory of the project, pass 'basepath=True'.
|
2013-11-15 20:42:17 +01:00
|
|
|
def getsrclib(spec, srclib_dir, srclibpaths=[], subdir=None, basepath=False,
|
|
|
|
raw=False, prepare=True, preponly=False):
|
2013-03-01 18:59:01 +01:00
|
|
|
|
2013-05-24 23:35:56 +02:00
|
|
|
if raw:
|
|
|
|
name = spec
|
|
|
|
ref = None
|
|
|
|
else:
|
|
|
|
name, ref = spec.split('@')
|
2013-03-01 20:39:30 +01:00
|
|
|
|
2013-05-20 13:16:06 +02:00
|
|
|
srclib_path = os.path.join('srclibs', name + ".txt")
|
2013-03-18 10:17:23 +01:00
|
|
|
|
2013-05-20 13:16:06 +02:00
|
|
|
if not os.path.exists(srclib_path):
|
|
|
|
raise BuildException('srclib ' + name + ' not found.')
|
2013-03-15 16:29:29 +01:00
|
|
|
|
2013-05-20 13:16:06 +02:00
|
|
|
srclib = parse_srclib(srclib_path)
|
2013-04-07 20:39:53 +02:00
|
|
|
|
2013-05-20 13:34:03 +02:00
|
|
|
sdir = os.path.join(srclib_dir, name)
|
2013-04-23 21:11:10 +02:00
|
|
|
|
2013-06-04 23:42:18 +02:00
|
|
|
if not preponly:
|
2013-11-08 20:44:27 +01:00
|
|
|
vcs = getvcs(srclib["Repo Type"], srclib["Repo"], sdir)
|
2013-06-25 20:13:36 +02:00
|
|
|
vcs.srclib = (name, sdir)
|
2013-10-23 16:57:02 +02:00
|
|
|
if ref:
|
|
|
|
vcs.gotorevision(ref)
|
2013-06-04 23:42:18 +02:00
|
|
|
|
|
|
|
if raw:
|
|
|
|
return vcs
|
2013-05-24 23:35:56 +02:00
|
|
|
|
2013-05-20 13:16:06 +02:00
|
|
|
libdir = None
|
2013-11-15 20:42:17 +01:00
|
|
|
if subdir is not None:
|
2013-11-16 12:54:23 +01:00
|
|
|
libdir = os.path.join(sdir, subdir)
|
2013-11-15 20:42:17 +01:00
|
|
|
elif srclib["Subdir"] is not None:
|
2013-05-20 13:16:06 +02:00
|
|
|
for subdir in srclib["Subdir"]:
|
|
|
|
libdir_candidate = os.path.join(sdir, subdir)
|
|
|
|
if os.path.exists(libdir_candidate):
|
|
|
|
libdir = libdir_candidate
|
|
|
|
break
|
2013-04-28 19:52:27 +02:00
|
|
|
|
2013-05-20 13:16:06 +02:00
|
|
|
if libdir is None:
|
|
|
|
libdir = sdir
|
2013-05-03 16:53:37 +02:00
|
|
|
|
2013-11-15 20:42:17 +01:00
|
|
|
if srclib["Srclibs"]:
|
|
|
|
n=1
|
|
|
|
for lib in srclib["Srclibs"].split(','):
|
|
|
|
s_tuple = None
|
|
|
|
for t in srclibpaths:
|
|
|
|
if t[0] == lib:
|
|
|
|
s_tuple = t
|
|
|
|
break
|
|
|
|
if s_tuple is None:
|
|
|
|
raise BuildException('Missing recursive srclib %s for %s' % (
|
|
|
|
lib, name))
|
|
|
|
place_srclib(libdir, n, s_tuple[2])
|
|
|
|
n+=1
|
|
|
|
|
2013-06-04 23:42:18 +02:00
|
|
|
if prepare:
|
|
|
|
|
|
|
|
if srclib["Prepare"] is not None:
|
2013-11-08 20:44:27 +01:00
|
|
|
cmd = replace_config_vars(srclib["Prepare"])
|
2013-06-09 23:15:46 +02:00
|
|
|
|
2013-10-16 23:31:02 +02:00
|
|
|
p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=libdir)
|
2013-06-04 23:42:18 +02:00
|
|
|
if p.returncode != 0:
|
2013-10-16 23:17:51 +02:00
|
|
|
raise BuildException("Error running prepare command for srclib %s"
|
|
|
|
% name, p.stdout, p.stderr)
|
2013-06-04 23:42:18 +02:00
|
|
|
|
|
|
|
if srclib["Update Project"] == "Yes":
|
2013-08-03 18:18:10 +02:00
|
|
|
print "Updating srclib %s at path %s" % (name, libdir)
|
2013-11-15 10:47:12 +01:00
|
|
|
p = FDroidPopen([
|
|
|
|
os.path.join(config['sdk_path'], 'tools', 'android'),
|
|
|
|
'update', 'project', '-p', libdir])
|
|
|
|
# Check to see whether an error was returned without a proper exit
|
|
|
|
# code (this is the case for the 'no target set or target invalid'
|
|
|
|
# error)
|
|
|
|
if p.returncode != 0 or (p.stderr != "" and
|
|
|
|
p.stderr.startswith("Error: ")):
|
|
|
|
raise BuildException("Failed to update srclib project {0}"
|
|
|
|
.format(name), p.stdout, p.stderr)
|
2013-05-06 21:41:42 +02:00
|
|
|
|
2013-11-14 14:09:37 +01:00
|
|
|
remove_signing_keys(libdir)
|
|
|
|
|
2013-05-20 13:16:06 +02:00
|
|
|
if basepath:
|
2013-05-14 19:31:59 +02:00
|
|
|
return sdir
|
2013-05-20 13:16:06 +02:00
|
|
|
return libdir
|
2012-01-28 01:05:30 +01:00
|
|
|
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2012-01-03 22:39:30 +01:00
|
|
|
# Prepare the source code for a particular build
|
2012-01-04 22:37:11 +01:00
|
|
|
# 'vcs' - the appropriate vcs object for the application
|
|
|
|
# 'app' - the application details from the metadata
|
|
|
|
# 'build' - the build details from the metadata
|
2012-01-27 23:10:08 +01:00
|
|
|
# 'build_dir' - the path to the build directory, usually
|
|
|
|
# 'build/app.id'
|
2013-05-20 13:34:03 +02:00
|
|
|
# 'srclib_dir' - the path to the source libraries directory, usually
|
|
|
|
# 'build/srclib'
|
2012-01-27 23:10:08 +01:00
|
|
|
# 'extlib_dir' - the path to the external libraries directory, usually
|
|
|
|
# 'build/extlib'
|
2013-03-20 10:30:56 +01:00
|
|
|
# Returns the (root, srclibpaths) where:
|
|
|
|
# 'root' is the root directory, which may be the same as 'build_dir' or may
|
|
|
|
# be a subdirectory of it.
|
|
|
|
# 'srclibpaths' is information on the srclibs being used
|
2013-11-08 20:44:27 +01:00
|
|
|
def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=False):
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2012-01-03 22:39:30 +01:00
|
|
|
# Optionally, the actual app source can be in a subdirectory...
|
2013-03-13 17:56:17 +01:00
|
|
|
if 'subdir' in build:
|
2012-01-03 22:39:30 +01:00
|
|
|
root_dir = os.path.join(build_dir, build['subdir'])
|
|
|
|
else:
|
|
|
|
root_dir = build_dir
|
|
|
|
|
|
|
|
# Get a working copy of the right revision...
|
2012-01-23 15:15:40 +01:00
|
|
|
print "Getting source for revision " + build['commit']
|
|
|
|
vcs.gotorevision(build['commit'])
|
2012-01-03 22:39:30 +01:00
|
|
|
|
2012-01-13 13:28:11 +01:00
|
|
|
# Check that a subdir (if we're using one) exists. This has to happen
|
|
|
|
# after the checkout, since it might not exist elsewhere...
|
|
|
|
if not os.path.exists(root_dir):
|
|
|
|
raise BuildException('Missing subdir ' + root_dir)
|
|
|
|
|
2012-01-03 22:39:30 +01:00
|
|
|
# Initialise submodules if requred...
|
2013-11-09 12:21:43 +01:00
|
|
|
if build['submodules']:
|
2013-11-01 12:10:57 +01:00
|
|
|
if options.verbose:
|
|
|
|
print "Initialising submodules..."
|
2012-01-03 22:39:30 +01:00
|
|
|
vcs.initsubmodules()
|
|
|
|
|
2012-02-04 22:19:07 +01:00
|
|
|
# Run an init command if one is required...
|
2013-03-13 17:56:17 +01:00
|
|
|
if 'init' in build:
|
2013-11-08 20:44:27 +01:00
|
|
|
cmd = replace_config_vars(build['init'])
|
2013-11-01 12:10:57 +01:00
|
|
|
if options.verbose:
|
2013-10-09 23:36:24 +02:00
|
|
|
print "Running 'init' commands in %s" % root_dir
|
|
|
|
|
2013-11-01 12:10:57 +01:00
|
|
|
p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=root_dir)
|
2013-10-09 23:36:24 +02:00
|
|
|
if p.returncode != 0:
|
|
|
|
raise BuildException("Error running init command for %s:%s" %
|
2013-10-16 23:17:51 +02:00
|
|
|
(app['id'], build['version']), p.stdout, p.stderr)
|
2012-02-04 22:19:07 +01:00
|
|
|
|
2012-01-03 22:39:30 +01:00
|
|
|
# Generate (or update) the ant build file, build.xml...
|
2013-11-03 13:44:33 +01:00
|
|
|
updatemode = build.get('update', 'auto')
|
|
|
|
if (updatemode != 'no'
|
|
|
|
and build.get('maven', 'no') == 'no'
|
|
|
|
and build.get('gradle', 'no') == 'no'):
|
2013-11-08 20:44:27 +01:00
|
|
|
parms = [os.path.join(config['sdk_path'], 'tools', 'android'),
|
2013-11-03 13:44:33 +01:00
|
|
|
'update', 'project']
|
2013-11-04 20:19:31 +01:00
|
|
|
if 'target' in build and build['target']:
|
|
|
|
parms += ['-t', build['target']]
|
2013-11-03 13:44:33 +01:00
|
|
|
update_dirs = None
|
|
|
|
if updatemode == 'auto':
|
2013-11-04 17:03:43 +01:00
|
|
|
update_dirs = ['.'] + ant_subprojects(root_dir)
|
2013-11-03 13:44:33 +01:00
|
|
|
else:
|
|
|
|
update_dirs = [d.strip() for d in updatemode.split(';')]
|
2012-02-18 22:30:18 +01:00
|
|
|
# Force build.xml update if necessary...
|
2013-03-13 17:56:17 +01:00
|
|
|
if updatemode == 'force' or 'target' in build:
|
2012-08-26 12:58:18 +02:00
|
|
|
if updatemode == 'force':
|
|
|
|
update_dirs = ['.']
|
2012-01-04 23:58:37 +01:00
|
|
|
buildxml = os.path.join(root_dir, 'build.xml')
|
|
|
|
if os.path.exists(buildxml):
|
2012-02-09 10:06:51 +01:00
|
|
|
print 'Force-removing old build.xml'
|
2012-01-04 23:58:37 +01:00
|
|
|
os.remove(buildxml)
|
2013-10-25 12:59:55 +02:00
|
|
|
|
2012-03-07 07:46:56 +01:00
|
|
|
for d in update_dirs:
|
2013-08-21 16:20:27 +02:00
|
|
|
# Remove gen and bin dirs in libraries
|
|
|
|
# rid of them...
|
2013-10-10 15:48:39 +02:00
|
|
|
for baddir in [
|
|
|
|
'gen', 'bin', 'obj', # ant
|
|
|
|
'libs/armeabi-v7a', 'libs/armeabi', # jni
|
|
|
|
'libs/mips', 'libs/x86']:
|
2013-11-03 13:44:33 +01:00
|
|
|
badpath = os.path.join(root_dir, d, baddir)
|
2013-08-21 16:20:27 +02:00
|
|
|
if os.path.exists(badpath):
|
2013-10-10 15:48:39 +02:00
|
|
|
print "Removing '%s'" % badpath
|
2013-08-21 16:20:27 +02:00
|
|
|
shutil.rmtree(badpath)
|
2013-11-03 13:44:33 +01:00
|
|
|
dparms = parms + ['-p', d]
|
2013-11-01 12:10:57 +01:00
|
|
|
if options.verbose:
|
2013-11-03 13:44:33 +01:00
|
|
|
if d == '.':
|
|
|
|
print "Updating main project..."
|
|
|
|
else:
|
|
|
|
print "Updating subproject %s..." % d
|
2013-11-03 14:00:09 +01:00
|
|
|
p = FDroidPopen(dparms, cwd=root_dir)
|
2013-11-15 10:47:12 +01:00
|
|
|
# Check to see whether an error was returned without a proper exit
|
|
|
|
# code (this is the case for the 'no target set or target invalid'
|
|
|
|
# error)
|
|
|
|
if p.returncode != 0 or (p.stderr != "" and
|
|
|
|
p.stderr.startswith("Error: ")):
|
2013-11-03 13:54:55 +01:00
|
|
|
raise BuildException("Failed to update project at %s" % d,
|
2013-10-16 23:17:51 +02:00
|
|
|
p.stdout, p.stderr)
|
2012-01-03 22:39:30 +01:00
|
|
|
|
2013-11-14 14:09:37 +01:00
|
|
|
remove_signing_keys(build_dir)
|
2012-01-03 22:39:30 +01:00
|
|
|
|
|
|
|
# Update the local.properties file...
|
2013-11-03 11:49:11 +01:00
|
|
|
localprops = [ os.path.join(build_dir, 'local.properties') ]
|
|
|
|
if 'subdir' in build:
|
|
|
|
localprops += [ os.path.join(root_dir, 'local.properties') ]
|
|
|
|
for path in localprops:
|
|
|
|
if not os.path.isfile(path):
|
|
|
|
continue
|
|
|
|
if options.verbose:
|
|
|
|
print "Updating properties file at %s" % path
|
|
|
|
f = open(path, 'r')
|
2012-01-03 22:39:30 +01:00
|
|
|
props = f.read()
|
|
|
|
f.close()
|
2013-11-03 11:49:11 +01:00
|
|
|
props += '\n'
|
2012-01-03 22:39:30 +01:00
|
|
|
# Fix old-fashioned 'sdk-location' by copying
|
|
|
|
# from sdk.dir, if necessary...
|
2013-11-09 12:21:43 +01:00
|
|
|
if build['oldsdkloc']:
|
2012-01-03 22:39:30 +01:00
|
|
|
sdkloc = re.match(r".*^sdk.dir=(\S+)$.*", props,
|
|
|
|
re.S|re.M).group(1)
|
2013-11-03 11:49:11 +01:00
|
|
|
props += "sdk-location=%s\n" % sdkloc
|
|
|
|
else:
|
2013-11-08 20:44:27 +01:00
|
|
|
props += "sdk.dir=%s\n" % config['sdk_path']
|
|
|
|
props += "sdk-location=%s\n" % ['sdk_path']
|
2012-01-03 22:39:30 +01:00
|
|
|
# Add ndk location...
|
2013-11-08 20:44:27 +01:00
|
|
|
props += "ndk.dir=%s\n" % config['ndk_path']
|
|
|
|
props += "ndk-location=%s\n" % config['ndk_path']
|
2012-01-03 22:39:30 +01:00
|
|
|
# Add java.encoding if necessary...
|
2013-03-13 17:56:17 +01:00
|
|
|
if 'encoding' in build:
|
2013-11-03 11:49:11 +01:00
|
|
|
props += "java.encoding=%s\n" % build['encoding']
|
|
|
|
f = open(path, 'w')
|
2012-01-03 22:39:30 +01:00
|
|
|
f.write(props)
|
|
|
|
f.close()
|
|
|
|
|
2013-10-20 22:27:34 +02:00
|
|
|
flavour = None
|
2013-11-09 12:21:43 +01:00
|
|
|
if build.get('gradle', 'no') != 'no':
|
2013-10-20 22:27:34 +02:00
|
|
|
flavour = build['gradle'].split('@')[0]
|
|
|
|
if flavour in ['main', 'yes', '']:
|
|
|
|
flavour = None
|
2013-10-30 17:17:44 +01:00
|
|
|
|
|
|
|
# Remove forced debuggable flags
|
|
|
|
print "Removing debuggable flags..."
|
|
|
|
for path in manifest_paths(root_dir, flavour):
|
|
|
|
if not os.path.isfile(path):
|
|
|
|
continue
|
|
|
|
if subprocess.call(['sed','-i',
|
|
|
|
's/android:debuggable="[^"]*"//g', path]) != 0:
|
|
|
|
raise BuildException("Failed to remove debuggable flags")
|
|
|
|
|
|
|
|
# Insert version code and number into the manifest if necessary...
|
2013-11-09 12:21:43 +01:00
|
|
|
if build['forceversion']:
|
2013-10-30 17:17:44 +01:00
|
|
|
print "Changing the version name..."
|
2013-10-20 22:27:34 +02:00
|
|
|
for path in manifest_paths(root_dir, flavour):
|
2013-10-20 13:43:15 +02:00
|
|
|
if not os.path.isfile(path):
|
|
|
|
continue
|
2013-11-04 16:09:23 +01:00
|
|
|
if path.endswith('.xml'):
|
|
|
|
if subprocess.call(['sed','-i',
|
|
|
|
's/android:versionName="[^"]*"/android:versionName="' + build['version'] + '"/g',
|
|
|
|
path]) != 0:
|
|
|
|
raise BuildException("Failed to amend manifest")
|
|
|
|
elif path.endswith('.gradle'):
|
|
|
|
if subprocess.call(['sed','-i',
|
|
|
|
's/versionName[ ]*=[ ]*"[^"]*"/versionName = "' + build['version'] + '"/g',
|
|
|
|
path]) != 0:
|
|
|
|
raise BuildException("Failed to amend build.gradle")
|
2013-11-09 12:21:43 +01:00
|
|
|
if build['forcevercode']:
|
2013-10-30 17:17:44 +01:00
|
|
|
print "Changing the version code..."
|
2013-10-20 22:27:34 +02:00
|
|
|
for path in manifest_paths(root_dir, flavour):
|
2013-10-20 13:43:15 +02:00
|
|
|
if not os.path.isfile(path):
|
|
|
|
continue
|
2013-11-04 16:09:23 +01:00
|
|
|
if path.endswith('.xml'):
|
|
|
|
if subprocess.call(['sed','-i',
|
|
|
|
's/android:versionCode="[^"]*"/android:versionCode="' + build['vercode'] + '"/g',
|
|
|
|
path]) != 0:
|
|
|
|
raise BuildException("Failed to amend manifest")
|
|
|
|
elif path.endswith('.gradle'):
|
|
|
|
if subprocess.call(['sed','-i',
|
|
|
|
's/versionCode[ ]*=[ ]*[0-9]*/versionCode = ' + build['vercode'] + '/g',
|
|
|
|
path]) != 0:
|
|
|
|
raise BuildException("Failed to amend build.gradle")
|
2012-01-03 22:39:30 +01:00
|
|
|
|
2013-10-27 18:05:53 +01:00
|
|
|
# Delete unwanted files...
|
2013-03-13 17:56:17 +01:00
|
|
|
if 'rm' in build:
|
2013-03-23 14:01:03 +01:00
|
|
|
for part in build['rm'].split(';'):
|
2013-09-11 13:45:02 +02:00
|
|
|
dest = os.path.join(build_dir, part.strip())
|
2013-11-15 10:46:04 +01:00
|
|
|
rdest = os.path.realpath(dest)
|
|
|
|
if not rdest.startswith(os.path.realpath(build_dir)):
|
2013-10-27 18:16:04 +01:00
|
|
|
raise BuildException("rm for {0} is outside build root {1}".format(
|
|
|
|
os.path.realpath(build_dir),os.path.realpath(dest)))
|
2013-11-15 10:46:04 +01:00
|
|
|
if rdest == os.path.realpath(build_dir):
|
|
|
|
raise BuildException("rm removes whole build directory")
|
|
|
|
if os.path.exists(rdest):
|
|
|
|
subprocess.call('rm -rf ' + rdest, shell=True)
|
2012-01-03 22:39:30 +01:00
|
|
|
|
|
|
|
# Fix apostrophes translation files if necessary...
|
2013-11-09 12:21:43 +01:00
|
|
|
if build['fixapos']:
|
2012-01-04 22:37:11 +01:00
|
|
|
for root, dirs, files in os.walk(os.path.join(root_dir, 'res')):
|
2012-01-03 22:39:30 +01:00
|
|
|
for filename in files:
|
|
|
|
if filename.endswith('.xml'):
|
|
|
|
if subprocess.call(['sed','-i','s@' +
|
|
|
|
r"\([^\\]\)'@\1\\'" +
|
|
|
|
'@g',
|
|
|
|
os.path.join(root, filename)]) != 0:
|
|
|
|
raise BuildException("Failed to amend " + filename)
|
|
|
|
|
|
|
|
# Fix translation files if necessary...
|
2013-11-09 12:21:43 +01:00
|
|
|
if build['fixtrans']:
|
2012-01-04 22:37:11 +01:00
|
|
|
for root, dirs, files in os.walk(os.path.join(root_dir, 'res')):
|
2012-01-03 22:39:30 +01:00
|
|
|
for filename in files:
|
|
|
|
if filename.endswith('.xml'):
|
|
|
|
f = open(os.path.join(root, filename))
|
|
|
|
changed = False
|
|
|
|
outlines = []
|
|
|
|
for line in f:
|
|
|
|
num = 1
|
|
|
|
index = 0
|
|
|
|
oldline = line
|
|
|
|
while True:
|
|
|
|
index = line.find("%", index)
|
|
|
|
if index == -1:
|
|
|
|
break
|
|
|
|
next = line[index+1:index+2]
|
|
|
|
if next == "s" or next == "d":
|
|
|
|
line = (line[:index+1] +
|
|
|
|
str(num) + "$" +
|
|
|
|
line[index+1:])
|
|
|
|
num += 1
|
|
|
|
index += 3
|
|
|
|
else:
|
|
|
|
index += 1
|
|
|
|
# We only want to insert the positional arguments
|
|
|
|
# when there is more than one argument...
|
|
|
|
if oldline != line:
|
|
|
|
if num > 2:
|
|
|
|
changed = True
|
|
|
|
else:
|
|
|
|
line = oldline
|
|
|
|
outlines.append(line)
|
|
|
|
f.close()
|
|
|
|
if changed:
|
|
|
|
f = open(os.path.join(root, filename), 'w')
|
|
|
|
f.writelines(outlines)
|
|
|
|
f.close()
|
|
|
|
|
2012-01-27 23:10:08 +01:00
|
|
|
# Add required external libraries...
|
2013-03-13 17:56:17 +01:00
|
|
|
if 'extlibs' in build:
|
2013-08-26 23:52:04 +02:00
|
|
|
print "Collecting prebuilt libraries..."
|
2012-01-27 23:10:08 +01:00
|
|
|
libsdir = os.path.join(root_dir, 'libs')
|
|
|
|
if not os.path.exists(libsdir):
|
|
|
|
os.mkdir(libsdir)
|
2012-01-28 01:05:30 +01:00
|
|
|
for lib in build['extlibs'].split(';'):
|
2013-09-11 13:45:02 +02:00
|
|
|
lib = lib.strip()
|
2013-11-12 21:14:16 +01:00
|
|
|
if options.verbose:
|
|
|
|
print "...installing extlib {0}".format(lib)
|
2012-01-27 23:10:08 +01:00
|
|
|
libf = os.path.basename(lib)
|
2013-11-12 21:14:16 +01:00
|
|
|
libsrc = os.path.join(extlib_dir, lib)
|
|
|
|
if not os.path.exists(libsrc):
|
|
|
|
raise BuildException("Missing extlib file {0}".format(libsrc))
|
|
|
|
shutil.copyfile(libsrc, os.path.join(libsdir, libf))
|
2012-01-27 23:10:08 +01:00
|
|
|
|
2012-01-28 01:05:30 +01:00
|
|
|
# Get required source libraries...
|
|
|
|
srclibpaths = []
|
2013-03-13 17:56:17 +01:00
|
|
|
if 'srclibs' in build:
|
2013-08-26 23:52:04 +02:00
|
|
|
print "Collecting source libraries..."
|
2012-01-28 01:05:30 +01:00
|
|
|
for lib in build['srclibs'].split(';'):
|
2013-11-15 20:42:17 +01:00
|
|
|
number = None
|
|
|
|
subdir = None
|
2013-09-11 13:45:02 +02:00
|
|
|
lib = lib.strip()
|
2013-11-15 20:42:17 +01:00
|
|
|
name, ref = lib.split('@')
|
|
|
|
if ':' in name:
|
|
|
|
number, name = name.split(':', 1)
|
|
|
|
if '/' in name:
|
|
|
|
name, subdir = name.split('/',1)
|
|
|
|
libpath = getsrclib(name+'@'+ref, srclib_dir, srclibpaths, subdir, preponly=onserver)
|
|
|
|
srclibpaths.append((name, number, libpath))
|
2013-11-18 08:41:29 +01:00
|
|
|
place_srclib(root_dir, int(number) if number else None, libpath)
|
2013-11-15 20:42:17 +01:00
|
|
|
|
2012-01-30 22:11:50 +01:00
|
|
|
basesrclib = vcs.getsrclib()
|
|
|
|
# If one was used for the main source, add that too.
|
|
|
|
if basesrclib:
|
|
|
|
srclibpaths.append(basesrclib)
|
2012-01-28 01:05:30 +01:00
|
|
|
|
2012-02-24 02:21:00 +01:00
|
|
|
# Apply patches if any
|
|
|
|
if 'patch' in build:
|
|
|
|
for patch in build['patch'].split(';'):
|
2013-09-11 13:45:02 +02:00
|
|
|
patch = patch.strip()
|
2012-02-24 02:21:00 +01:00
|
|
|
print "Applying " + patch
|
|
|
|
patch_path = os.path.join('metadata', app['id'], patch)
|
|
|
|
if subprocess.call(['patch', '-p1',
|
|
|
|
'-i', os.path.abspath(patch_path)], cwd=build_dir) != 0:
|
|
|
|
raise BuildException("Failed to apply patch %s" % patch_path)
|
|
|
|
|
2012-01-03 22:39:30 +01:00
|
|
|
# Run a pre-build command if one is required...
|
2013-03-13 17:56:17 +01:00
|
|
|
if 'prebuild' in build:
|
2013-11-08 20:44:27 +01:00
|
|
|
cmd = replace_config_vars(build['prebuild'])
|
2013-08-26 23:52:04 +02:00
|
|
|
|
2012-01-28 01:05:30 +01:00
|
|
|
# Substitute source library paths into prebuild commands...
|
2013-11-15 20:42:17 +01:00
|
|
|
for name, number, libpath in srclibpaths:
|
2012-01-28 01:05:30 +01:00
|
|
|
libpath = os.path.relpath(libpath, root_dir)
|
2013-10-09 23:36:24 +02:00
|
|
|
cmd = cmd.replace('$$' + name + '$$', libpath)
|
2013-11-08 20:44:27 +01:00
|
|
|
|
2013-11-01 12:10:57 +01:00
|
|
|
if options.verbose:
|
2013-10-09 23:36:24 +02:00
|
|
|
print "Running 'prebuild' commands in %s" % root_dir
|
|
|
|
|
2013-11-01 12:10:57 +01:00
|
|
|
p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=root_dir)
|
2012-09-24 15:06:15 +02:00
|
|
|
if p.returncode != 0:
|
2013-10-09 23:36:24 +02:00
|
|
|
raise BuildException("Error running prebuild command for %s:%s" %
|
2013-10-16 23:17:51 +02:00
|
|
|
(app['id'], build['version']), p.stdout, p.stderr)
|
2012-01-03 22:39:30 +01:00
|
|
|
|
2013-03-20 10:30:56 +01:00
|
|
|
return (root_dir, srclibpaths)
|
2012-01-03 22:39:30 +01:00
|
|
|
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2012-02-02 23:13:31 +01:00
|
|
|
# Scan the source code in the given directory (and all subdirectories)
|
|
|
|
# and return a list of potential problems.
|
2012-02-03 17:01:35 +01:00
|
|
|
def scan_source(build_dir, root_dir, thisbuild):
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2012-02-02 23:13:31 +01:00
|
|
|
problems = []
|
|
|
|
|
2013-10-09 10:43:02 +02:00
|
|
|
# Common known non-free blobs (always lower case):
|
2012-02-02 23:13:31 +01:00
|
|
|
usual_suspects = ['flurryagent',
|
|
|
|
'paypal_mpl',
|
|
|
|
'libgoogleanalytics',
|
|
|
|
'admob-sdk-android',
|
|
|
|
'googleadview',
|
2012-03-02 12:46:41 +01:00
|
|
|
'googleadmobadssdk',
|
2013-06-13 09:37:42 +02:00
|
|
|
'google-play-services',
|
|
|
|
'crittercism',
|
2012-08-31 15:49:07 +02:00
|
|
|
'heyzap',
|
2013-10-09 10:43:02 +02:00
|
|
|
'jpct-ae',
|
2013-10-26 20:30:17 +02:00
|
|
|
'youtubeandroidplayerapi',
|
2013-11-17 21:58:33 +01:00
|
|
|
'bugsense',
|
|
|
|
'crashlytics']
|
2012-03-06 20:50:19 +01:00
|
|
|
|
2013-11-01 13:46:19 +01:00
|
|
|
def getpaths(field):
|
|
|
|
paths = []
|
|
|
|
if field not in thisbuild:
|
|
|
|
return paths
|
|
|
|
for p in thisbuild[field].split(';'):
|
|
|
|
p = p.strip()
|
|
|
|
if p == '.':
|
|
|
|
p = '/'
|
|
|
|
elif p.startswith('./'):
|
|
|
|
p = p[1:]
|
|
|
|
elif not p.startswith('/'):
|
|
|
|
p = '/' + p;
|
|
|
|
if p not in paths:
|
|
|
|
paths.append(p)
|
|
|
|
return paths
|
|
|
|
|
|
|
|
scanignore = getpaths('scanignore')
|
|
|
|
scandelete = getpaths('scandelete')
|
|
|
|
|
2013-10-16 22:50:07 +02:00
|
|
|
ms = magic.open(magic.MIME_TYPE)
|
|
|
|
ms.load()
|
2013-03-20 10:46:30 +01:00
|
|
|
|
2013-11-01 13:46:19 +01:00
|
|
|
def toignore(fd):
|
|
|
|
for i in scanignore:
|
|
|
|
if fd.startswith(i):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def todelete(fd):
|
|
|
|
for i in scandelete:
|
|
|
|
if fd.startswith(i):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def removeproblem(what, fd, fp):
|
|
|
|
print 'Removing %s at %s' % (what, fd)
|
|
|
|
os.remove(fp)
|
|
|
|
|
|
|
|
def handleproblem(what, fd, fp):
|
|
|
|
if todelete(fd):
|
|
|
|
removeproblem(what, fd, fp)
|
|
|
|
else:
|
|
|
|
problems.append('Found %s at %s' % (what, fd))
|
|
|
|
|
2012-03-06 20:50:19 +01:00
|
|
|
# Iterate through all files in the source code...
|
2013-10-16 22:50:07 +02:00
|
|
|
for r,d,f in os.walk(build_dir):
|
|
|
|
for curfile in f:
|
|
|
|
|
|
|
|
if '/.hg' in r or '/.git' in r or '/.svn' in r:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Path (relative) to the file...
|
|
|
|
fp = os.path.join(r, curfile)
|
2013-11-01 13:46:19 +01:00
|
|
|
fd = fp[len(build_dir):]
|
2013-10-16 22:50:07 +02:00
|
|
|
|
|
|
|
# Check if this file has been explicitly excluded from scanning...
|
2013-11-01 13:46:19 +01:00
|
|
|
if toignore(fd):
|
2013-10-16 22:50:07 +02:00
|
|
|
continue
|
|
|
|
|
|
|
|
for suspect in usual_suspects:
|
|
|
|
if suspect in curfile.lower():
|
2013-11-01 13:46:19 +01:00
|
|
|
handleproblem('usual supect', fd, fp)
|
2013-10-16 22:50:07 +02:00
|
|
|
|
|
|
|
mime = ms.file(fp)
|
|
|
|
if mime == 'application/x-sharedlib':
|
2013-11-01 13:46:19 +01:00
|
|
|
handleproblem('shared library', fd, fp)
|
2013-10-16 22:50:07 +02:00
|
|
|
elif mime == 'application/x-archive':
|
2013-11-01 13:46:19 +01:00
|
|
|
handleproblem('static library', fd, fp)
|
2013-10-16 22:50:07 +02:00
|
|
|
elif mime == 'application/x-executable':
|
2013-11-01 13:46:19 +01:00
|
|
|
handleproblem('binary executable', fd, fp)
|
2013-10-29 12:03:37 +01:00
|
|
|
elif mime == 'application/jar' and fp.endswith('.apk'):
|
2013-11-01 13:46:19 +01:00
|
|
|
removeproblem('APK file', fd, fp)
|
2013-10-16 22:50:07 +02:00
|
|
|
|
|
|
|
elif curfile.endswith('.java'):
|
|
|
|
for line in file(fp):
|
|
|
|
if 'DexClassLoader' in line:
|
2013-11-01 13:46:19 +01:00
|
|
|
handleproblem('DexClassLoader', fd, fp)
|
2013-10-15 23:24:20 +02:00
|
|
|
break
|
2013-10-16 22:50:07 +02:00
|
|
|
ms.close()
|
2012-03-06 20:54:37 +01:00
|
|
|
|
2012-02-02 23:13:31 +01:00
|
|
|
# Presence of a jni directory without buildjni=yes might
|
2012-08-13 18:59:03 +02:00
|
|
|
# indicate a problem... (if it's not a problem, explicitly use
|
|
|
|
# buildjni=no to bypass this check)
|
2012-02-02 23:13:31 +01:00
|
|
|
if (os.path.exists(os.path.join(root_dir, 'jni')) and
|
2012-03-07 07:46:56 +01:00
|
|
|
thisbuild.get('buildjni') is None):
|
2012-02-02 23:13:31 +01:00
|
|
|
msg = 'Found jni directory, but buildjni is not enabled'
|
|
|
|
problems.append(msg)
|
|
|
|
|
|
|
|
return problems
|
|
|
|
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2012-01-17 18:25:28 +01:00
|
|
|
class KnownApks:
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.path = os.path.join('stats', 'known_apks.txt')
|
|
|
|
self.apks = {}
|
|
|
|
if os.path.exists(self.path):
|
|
|
|
for line in file( self.path):
|
|
|
|
t = line.rstrip().split(' ')
|
2012-01-20 00:03:35 +01:00
|
|
|
if len(t) == 2:
|
|
|
|
self.apks[t[0]] = (t[1], None)
|
|
|
|
else:
|
|
|
|
self.apks[t[0]] = (t[1], time.strptime(t[2], '%Y-%m-%d'))
|
2012-01-17 18:25:28 +01:00
|
|
|
self.changed = False
|
|
|
|
|
|
|
|
def writeifchanged(self):
|
|
|
|
if self.changed:
|
|
|
|
if not os.path.exists('stats'):
|
|
|
|
os.mkdir('stats')
|
|
|
|
f = open(self.path, 'w')
|
2012-01-19 15:14:14 +01:00
|
|
|
lst = []
|
2012-01-17 18:25:28 +01:00
|
|
|
for apk, app in self.apks.iteritems():
|
2012-01-20 00:03:35 +01:00
|
|
|
appid, added = app
|
|
|
|
line = apk + ' ' + appid
|
|
|
|
if added:
|
|
|
|
line += ' ' + time.strftime('%Y-%m-%d', added)
|
|
|
|
lst.append(line)
|
2012-01-19 15:14:14 +01:00
|
|
|
for line in sorted(lst):
|
|
|
|
f.write(line + '\n')
|
2012-01-17 18:25:28 +01:00
|
|
|
f.close()
|
|
|
|
|
2012-07-12 22:48:59 +02:00
|
|
|
# Record an apk (if it's new, otherwise does nothing)
|
|
|
|
# Returns the date it was added.
|
2012-01-17 18:25:28 +01:00
|
|
|
def recordapk(self, apk, app):
|
|
|
|
if not apk in self.apks:
|
2012-01-20 00:03:35 +01:00
|
|
|
self.apks[apk] = (app, time.gmtime(time.time()))
|
2012-01-17 18:25:28 +01:00
|
|
|
self.changed = True
|
2012-07-12 22:48:59 +02:00
|
|
|
_, added = self.apks[apk]
|
|
|
|
return added
|
2012-01-17 18:25:28 +01:00
|
|
|
|
2012-07-12 22:48:59 +02:00
|
|
|
# Look up information - given the 'apkname', returns (app id, date added/None).
|
|
|
|
# Or returns None for an unknown apk.
|
2012-01-17 18:25:28 +01:00
|
|
|
def getapp(self, apkname):
|
|
|
|
if apkname in self.apks:
|
|
|
|
return self.apks[apkname]
|
|
|
|
return None
|
2012-01-22 15:03:56 +01:00
|
|
|
|
2012-07-12 22:48:59 +02:00
|
|
|
# Get the most recent 'num' apps added to the repo, as a list of package ids
|
|
|
|
# with the most recent first.
|
2012-01-22 15:03:56 +01:00
|
|
|
def getlatest(self, num):
|
|
|
|
apps = {}
|
|
|
|
for apk, app in self.apks.iteritems():
|
|
|
|
appid, added = app
|
|
|
|
if added:
|
|
|
|
if appid in apps:
|
|
|
|
if apps[appid] > added:
|
|
|
|
apps[appid] = added
|
|
|
|
else:
|
|
|
|
apps[appid] = added
|
|
|
|
sortedapps = sorted(apps.iteritems(), key=operator.itemgetter(1))[-num:]
|
2013-11-04 20:19:31 +01:00
|
|
|
lst = [app for app,added in sortedapps]
|
2012-01-26 22:36:23 +01:00
|
|
|
lst.reverse()
|
2012-01-22 15:03:56 +01:00
|
|
|
return lst
|
|
|
|
|
2013-10-31 16:37:39 +01:00
|
|
|
def isApkDebuggable(apkfile, config):
|
2013-04-15 16:07:23 +02:00
|
|
|
"""Returns True if the given apk file is debuggable
|
|
|
|
|
2013-07-31 19:35:57 +02:00
|
|
|
:param apkfile: full path to the apk to check"""
|
2013-04-15 14:04:13 +02:00
|
|
|
|
2013-10-31 16:37:39 +01:00
|
|
|
p = subprocess.Popen([os.path.join(config['sdk_path'],
|
|
|
|
'build-tools', config['build_tools'], 'aapt'),
|
|
|
|
'dump', 'xmltree', apkfile, 'AndroidManifest.xml'],
|
|
|
|
stdout=subprocess.PIPE)
|
2013-04-15 14:04:13 +02:00
|
|
|
output = p.communicate()[0]
|
|
|
|
if p.returncode != 0:
|
|
|
|
print "ERROR: Failed to get apk manifest information"
|
|
|
|
sys.exit(1)
|
|
|
|
for line in output.splitlines():
|
|
|
|
if line.find('android:debuggable') != -1 and not line.endswith('0x0'):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2013-10-16 23:17:51 +02:00
|
|
|
class AsynchronousFileReader(threading.Thread):
|
|
|
|
'''
|
|
|
|
Helper class to implement asynchronous reading of a file
|
|
|
|
in a separate thread. Pushes read lines on a queue to
|
|
|
|
be consumed in another thread.
|
|
|
|
'''
|
|
|
|
|
|
|
|
def __init__(self, fd, queue):
|
|
|
|
assert isinstance(queue, Queue.Queue)
|
|
|
|
assert callable(fd.readline)
|
|
|
|
threading.Thread.__init__(self)
|
|
|
|
self._fd = fd
|
|
|
|
self._queue = queue
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
'''The body of the tread: read lines and put them on the queue.'''
|
|
|
|
for line in iter(self._fd.readline, ''):
|
|
|
|
self._queue.put(line)
|
|
|
|
|
|
|
|
def eof(self):
|
|
|
|
'''Check whether there is no more content to expect.'''
|
|
|
|
return not self.is_alive() and self._queue.empty()
|
|
|
|
|
|
|
|
class PopenResult:
|
|
|
|
returncode = None
|
|
|
|
stdout = ''
|
|
|
|
stderr = ''
|
|
|
|
stdout_apk = ''
|
|
|
|
|
2013-11-05 09:26:26 +01:00
|
|
|
def FDroidPopen(commands, cwd=None):
|
2013-10-16 23:17:51 +02:00
|
|
|
"""
|
|
|
|
Runs a command the FDroid way and returns return code and output
|
|
|
|
|
2013-11-04 20:19:31 +01:00
|
|
|
:param commands, cwd: like subprocess.Popen
|
2013-10-16 23:17:51 +02:00
|
|
|
"""
|
2013-10-29 20:39:12 +01:00
|
|
|
|
2013-11-01 12:10:57 +01:00
|
|
|
if options.verbose:
|
2013-11-05 09:26:26 +01:00
|
|
|
if cwd is not None:
|
|
|
|
print "Directory: %s" % cwd
|
2013-10-29 20:39:12 +01:00
|
|
|
print " > %s" % ' '.join(commands)
|
|
|
|
|
2013-10-16 23:17:51 +02:00
|
|
|
result = PopenResult()
|
2013-11-04 20:19:31 +01:00
|
|
|
p = subprocess.Popen(commands, cwd=cwd,
|
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
2013-10-16 23:17:51 +02:00
|
|
|
|
|
|
|
stdout_queue = Queue.Queue()
|
|
|
|
stdout_reader = AsynchronousFileReader(p.stdout, stdout_queue)
|
|
|
|
stdout_reader.start()
|
|
|
|
stderr_queue = Queue.Queue()
|
|
|
|
stderr_reader = AsynchronousFileReader(p.stderr, stderr_queue)
|
|
|
|
stderr_reader.start()
|
|
|
|
|
|
|
|
# Check the queues for output (until there is no more to get)
|
|
|
|
while not stdout_reader.eof() or not stderr_reader.eof():
|
|
|
|
# Show what we received from standard output
|
|
|
|
while not stdout_queue.empty():
|
|
|
|
line = stdout_queue.get()
|
2013-11-01 12:10:57 +01:00
|
|
|
if options.verbose:
|
2013-10-16 23:17:51 +02:00
|
|
|
# Output directly to console
|
|
|
|
sys.stdout.write(line)
|
|
|
|
sys.stdout.flush()
|
|
|
|
result.stdout += line
|
|
|
|
|
|
|
|
# Show what we received from standard error
|
|
|
|
while not stderr_queue.empty():
|
|
|
|
line = stderr_queue.get()
|
2013-11-01 12:10:57 +01:00
|
|
|
if options.verbose:
|
2013-10-16 23:17:51 +02:00
|
|
|
# Output directly to console
|
|
|
|
sys.stderr.write(line)
|
|
|
|
sys.stderr.flush()
|
|
|
|
result.stderr += line
|
2013-11-02 20:30:21 +01:00
|
|
|
time.sleep(0.2)
|
2013-10-16 23:17:51 +02:00
|
|
|
|
|
|
|
p.communicate()
|
|
|
|
result.returncode = p.returncode
|
|
|
|
return result
|
2013-10-27 23:43:38 +01:00
|
|
|
|
2013-11-14 14:09:37 +01:00
|
|
|
def remove_signing_keys(build_dir):
|
|
|
|
for root, dirs, files in os.walk(build_dir):
|
|
|
|
if 'build.gradle' in files:
|
|
|
|
path = os.path.join(root, 'build.gradle')
|
2013-11-15 12:42:39 +01:00
|
|
|
changed = False
|
2013-10-27 23:43:38 +01:00
|
|
|
|
2013-11-14 14:09:37 +01:00
|
|
|
if options.verbose:
|
2013-11-15 12:42:39 +01:00
|
|
|
print "Cleaned build.gradle of keysigning configs at %s" % path
|
2013-11-14 14:09:37 +01:00
|
|
|
|
|
|
|
with open(path, "r") as o:
|
|
|
|
lines = o.readlines()
|
|
|
|
|
|
|
|
opened = 0
|
2013-11-16 12:54:35 +01:00
|
|
|
with open(path, "w") as o:
|
|
|
|
for line in lines:
|
2013-11-14 14:09:37 +01:00
|
|
|
if 'signingConfigs ' in line:
|
|
|
|
opened = 1
|
2013-11-15 12:42:39 +01:00
|
|
|
changed = True
|
2013-11-14 14:09:37 +01:00
|
|
|
elif opened > 0:
|
|
|
|
if '{' in line:
|
|
|
|
opened += 1
|
|
|
|
elif '}' in line:
|
|
|
|
opened -=1
|
2013-11-15 12:42:39 +01:00
|
|
|
elif any(s in line for s in (
|
2013-11-14 14:09:37 +01:00
|
|
|
' signingConfig ',
|
|
|
|
'android.signingConfigs.',
|
2013-11-17 21:58:43 +01:00
|
|
|
'variant.outputFile = ',
|
|
|
|
'.readLine(')):
|
2013-11-15 12:42:39 +01:00
|
|
|
changed = True
|
|
|
|
else:
|
2013-11-14 14:09:37 +01:00
|
|
|
o.write(line)
|
|
|
|
|
|
|
|
for propfile in ('build.properties', 'default.properties', 'ant.properties'):
|
|
|
|
if propfile in files:
|
|
|
|
path = os.path.join(root, propfile)
|
2013-11-15 12:42:39 +01:00
|
|
|
changed = False
|
|
|
|
|
2013-11-16 12:54:35 +01:00
|
|
|
with open(path, "r") as o:
|
|
|
|
lines = o.readlines()
|
|
|
|
|
|
|
|
with open(path, "w") as o:
|
|
|
|
for line in lines:
|
2013-11-15 12:42:39 +01:00
|
|
|
if line.startswith('key.store'):
|
|
|
|
changed = True
|
|
|
|
else:
|
2013-11-14 14:09:37 +01:00
|
|
|
o.write(line)
|
2013-10-27 23:43:38 +01:00
|
|
|
|
2013-11-15 12:42:39 +01:00
|
|
|
if changed and options.verbose:
|
|
|
|
print "Cleaned %s of keysigning configs at %s" % (propfile,path)
|
|
|
|
|
2013-11-08 20:44:27 +01:00
|
|
|
def replace_config_vars(cmd):
|
|
|
|
cmd = cmd.replace('$$SDK$$', config['sdk_path'])
|
|
|
|
cmd = cmd.replace('$$NDK$$', config['ndk_path'])
|
|
|
|
cmd = cmd.replace('$$MVN3$$', config['mvn3'])
|
|
|
|
return cmd
|
|
|
|
|
2013-11-15 20:42:17 +01:00
|
|
|
def place_srclib(root_dir, number, libpath):
|
|
|
|
if not number:
|
|
|
|
return
|
|
|
|
relpath = os.path.relpath(libpath, root_dir)
|
2013-11-17 23:20:58 +01:00
|
|
|
proppath = os.path.join(root_dir, 'project.properties')
|
|
|
|
|
|
|
|
with open(proppath, "r") as o:
|
|
|
|
lines = o.readlines()
|
|
|
|
|
|
|
|
with open(proppath, "w") as o:
|
|
|
|
placed = False
|
|
|
|
for line in lines:
|
|
|
|
if line.startswith('android.library.reference.%d=' % number):
|
|
|
|
o.write('android.library.reference.%d=%s\n' % (number,relpath))
|
|
|
|
placed = True
|
|
|
|
else:
|
|
|
|
o.write(line)
|
|
|
|
if not placed:
|
|
|
|
o.write('android.library.reference.%d=%s\n' % (number,relpath))
|
|
|
|
|