2011-01-26 17:26:51 +01:00
|
|
|
# -*- coding: utf-8 -*-
|
2010-11-11 23:34:39 +01:00
|
|
|
#
|
2011-02-17 21:16:26 +01:00
|
|
|
# common.py - part of the FDroid server tools
|
2013-03-18 10:17:23 +01:00
|
|
|
# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com
|
2014-01-28 14:07:19 +01:00
|
|
|
# Copyright (C) 2013-2014 Daniel Martí <mvdan@mvdan.cc>
|
2010-11-11 23:34:39 +01:00
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU Affero General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU Affero General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU Affero General Public License
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
2014-05-02 04:36:12 +02:00
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import re
|
2012-01-04 22:37:11 +01:00
|
|
|
import shutil
|
2014-04-15 23:53:44 +02:00
|
|
|
import glob
|
2013-11-05 23:27:08 +01:00
|
|
|
import stat
|
2011-08-07 17:14:54 +02:00
|
|
|
import subprocess
|
2012-01-20 00:03:35 +01:00
|
|
|
import time
|
2012-01-22 15:03:56 +01:00
|
|
|
import operator
|
2013-10-16 23:17:51 +02:00
|
|
|
import Queue
|
|
|
|
import threading
|
2013-10-16 22:50:07 +02:00
|
|
|
import magic
|
2014-01-27 15:59:40 +01:00
|
|
|
import logging
|
2014-06-12 10:00:46 +02:00
|
|
|
from distutils.version import LooseVersion
|
2011-08-07 17:14:54 +02:00
|
|
|
|
2013-11-19 15:35:16 +01:00
|
|
|
import metadata
|
|
|
|
|
2013-11-01 12:10:57 +01:00
|
|
|
config = None
|
|
|
|
options = None
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-04-23 18:44:37 +02:00
|
|
|
def get_default_config():
|
|
|
|
return {
|
|
|
|
'sdk_path': os.getenv("ANDROID_HOME"),
|
2014-06-16 12:42:43 +02:00
|
|
|
'ndk_path': os.getenv("ANDROID_NDK"),
|
2014-06-16 23:12:47 +02:00
|
|
|
'build_tools': "19.1.0",
|
2014-04-23 18:44:37 +02:00
|
|
|
'ant': "ant",
|
|
|
|
'mvn3': "mvn",
|
|
|
|
'gradle': 'gradle',
|
|
|
|
'archive_older': 0,
|
|
|
|
'update_stats': False,
|
|
|
|
'stats_to_carbon': False,
|
|
|
|
'repo_maxage': 0,
|
|
|
|
'build_server_always': False,
|
2014-06-16 12:42:43 +02:00
|
|
|
'keystore': os.path.join(os.getenv("HOME"), '.local', 'share', 'fdroidserver', 'keystore.jks'),
|
2014-04-23 18:44:37 +02:00
|
|
|
'smartcardoptions': [],
|
|
|
|
'char_limits': {
|
2014-05-02 04:06:59 +02:00
|
|
|
'Summary': 50,
|
|
|
|
'Description': 1500
|
2014-04-23 18:44:37 +02:00
|
|
|
},
|
2014-05-02 04:21:47 +02:00
|
|
|
'keyaliases': {},
|
2014-04-23 18:44:37 +02:00
|
|
|
}
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-11-04 10:22:22 +01:00
|
|
|
def read_config(opts, config_file='config.py'):
|
2013-10-31 16:37:39 +01:00
|
|
|
"""Read the repository config
|
|
|
|
|
2013-11-04 10:22:22 +01:00
|
|
|
The config is read from config_file, which is in the current directory when
|
2013-10-31 16:37:39 +01:00
|
|
|
any of the repo management commands are used.
|
|
|
|
"""
|
2013-11-01 12:10:57 +01:00
|
|
|
global config, options
|
|
|
|
|
|
|
|
if config is not None:
|
|
|
|
return config
|
2013-11-04 10:22:22 +01:00
|
|
|
if not os.path.isfile(config_file):
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.critical("Missing config file - is this a repo directory?")
|
2013-10-31 16:37:39 +01:00
|
|
|
sys.exit(2)
|
2013-12-06 12:55:56 +01:00
|
|
|
|
2013-11-01 12:10:57 +01:00
|
|
|
options = opts
|
|
|
|
|
2014-02-17 20:04:39 +01:00
|
|
|
config = {}
|
|
|
|
|
2014-03-18 12:20:51 +01:00
|
|
|
logging.debug("Reading %s" % config_file)
|
2014-02-17 20:04:39 +01:00
|
|
|
execfile(config_file, config)
|
|
|
|
|
2014-04-04 04:07:45 +02:00
|
|
|
# smartcardoptions must be a list since its command line args for Popen
|
|
|
|
if 'smartcardoptions' in config:
|
|
|
|
config['smartcardoptions'] = config['smartcardoptions'].split(' ')
|
|
|
|
elif 'keystore' in config and config['keystore'] == 'NONE':
|
|
|
|
# keystore='NONE' means use smartcard, these are required defaults
|
|
|
|
config['smartcardoptions'] = ['-storetype', 'PKCS11', '-providerName',
|
|
|
|
'SunPKCS11-OpenSC', '-providerClass',
|
|
|
|
'sun.security.pkcs11.SunPKCS11',
|
|
|
|
'-providerArg', 'opensc-fdroid.cfg']
|
|
|
|
|
2014-06-22 21:24:05 +02:00
|
|
|
if any(k in config for k in ["keystore", "keystorepass", "keypass"]):
|
|
|
|
st = os.stat(config_file)
|
|
|
|
if st.st_mode & stat.S_IRWXG or st.st_mode & stat.S_IRWXO:
|
|
|
|
logging.warn("unsafe permissions on {0} (should be 0600)!".format(config_file))
|
|
|
|
|
2014-04-23 18:44:37 +02:00
|
|
|
defconfig = get_default_config()
|
2014-01-26 21:39:30 +01:00
|
|
|
for k, v in defconfig.items():
|
|
|
|
if k not in config:
|
|
|
|
config[k] = v
|
|
|
|
|
2013-12-06 12:55:56 +01:00
|
|
|
# Expand environment variables
|
2013-12-06 12:37:53 +01:00
|
|
|
for k, v in config.items():
|
|
|
|
if type(v) != str:
|
|
|
|
continue
|
2013-12-07 13:21:45 +01:00
|
|
|
v = os.path.expanduser(v)
|
2013-12-07 13:20:22 +01:00
|
|
|
config[k] = os.path.expandvars(v)
|
2013-12-06 12:55:56 +01:00
|
|
|
|
2014-04-23 18:44:37 +02:00
|
|
|
if not test_sdk_exists(config):
|
2014-02-17 20:04:39 +01:00
|
|
|
sys.exit(3)
|
|
|
|
|
2014-04-01 03:02:42 +02:00
|
|
|
for k in ["keystorepass", "keypass"]:
|
|
|
|
if k in config:
|
|
|
|
write_password_file(k)
|
|
|
|
|
2014-04-16 02:47:03 +02:00
|
|
|
# since this is used with rsync, where trailing slashes have meaning,
|
|
|
|
# ensure there is always a trailing slash
|
|
|
|
if 'serverwebroot' in config:
|
|
|
|
if config['serverwebroot'][-1] != '/':
|
|
|
|
config['serverwebroot'] += '/'
|
|
|
|
config['serverwebroot'] = config['serverwebroot'].replace('//', '/')
|
|
|
|
|
2013-11-01 12:10:57 +01:00
|
|
|
return config
|
2013-10-31 16:37:39 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-04-23 18:44:37 +02:00
|
|
|
def test_sdk_exists(c):
|
2014-05-02 04:02:40 +02:00
|
|
|
if c['sdk_path'] is None:
|
2014-04-23 18:44:37 +02:00
|
|
|
# c['sdk_path'] is set to the value of ANDROID_HOME by default
|
2014-04-24 01:21:22 +02:00
|
|
|
logging.critical('No Android SDK found! ANDROID_HOME is not set and sdk_path is not in config.py!')
|
|
|
|
logging.info('You can use ANDROID_HOME to set the path to your SDK, i.e.:')
|
2014-04-23 18:44:37 +02:00
|
|
|
logging.info('\texport ANDROID_HOME=/opt/android-sdk')
|
|
|
|
return False
|
|
|
|
if not os.path.exists(c['sdk_path']):
|
|
|
|
logging.critical('Android SDK path "' + c['sdk_path'] + '" does not exist!')
|
|
|
|
return False
|
|
|
|
if not os.path.isdir(c['sdk_path']):
|
|
|
|
logging.critical('Android SDK path "' + c['sdk_path'] + '" is not a directory!')
|
|
|
|
return False
|
2014-04-24 01:21:22 +02:00
|
|
|
if not os.path.isdir(os.path.join(c['sdk_path'], 'build-tools')):
|
|
|
|
logging.critical('Android SDK path "' + c['sdk_path'] + '" does not contain "build-tools/"!')
|
|
|
|
return False
|
2014-06-16 12:40:04 +02:00
|
|
|
if not os.path.isdir(os.path.join(c['sdk_path'], 'build-tools', c['build_tools'])):
|
|
|
|
logging.critical('Configured build-tools version "' + c['build_tools'] + '" not found in the SDK!')
|
|
|
|
return False
|
2014-04-23 18:44:37 +02:00
|
|
|
return True
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-05-30 03:43:16 +02:00
|
|
|
def test_build_tools_exists(c):
|
|
|
|
if not test_sdk_exists(c):
|
|
|
|
return False
|
|
|
|
build_tools = os.path.join(c['sdk_path'], 'build-tools')
|
|
|
|
versioned_build_tools = os.path.join(build_tools, c['build_tools'])
|
|
|
|
if not os.path.isdir(versioned_build_tools):
|
|
|
|
logging.critical('Android Build Tools path "'
|
|
|
|
+ versioned_build_tools + '" does not exist!')
|
|
|
|
return False
|
|
|
|
if not os.path.exists(os.path.join(c['sdk_path'], 'build-tools', c['build_tools'], 'aapt')):
|
|
|
|
logging.critical('Android Build Tools "'
|
|
|
|
+ versioned_build_tools
|
|
|
|
+ '" does not contain "aapt"!')
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2014-04-01 03:02:42 +02:00
|
|
|
def write_password_file(pwtype, password=None):
|
|
|
|
'''
|
|
|
|
writes out passwords to a protected file instead of passing passwords as
|
|
|
|
command line argments
|
|
|
|
'''
|
|
|
|
filename = '.fdroid.' + pwtype + '.txt'
|
2014-04-16 05:48:48 +02:00
|
|
|
fd = os.open(filename, os.O_CREAT | os.O_TRUNC | os.O_WRONLY, 0600)
|
2014-05-02 04:02:40 +02:00
|
|
|
if password is None:
|
2014-04-01 03:02:42 +02:00
|
|
|
os.write(fd, config[pwtype])
|
|
|
|
else:
|
|
|
|
os.write(fd, password)
|
|
|
|
os.close(fd)
|
|
|
|
config[pwtype + 'file'] = filename
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-12-19 22:28:38 +01:00
|
|
|
# Given the arguments in the form of multiple appid:[vc] strings, this returns
|
|
|
|
# a dictionary with the set of vercodes specified for each package.
|
|
|
|
def read_pkg_args(args, allow_vercodes=False):
|
2013-12-11 19:08:15 +01:00
|
|
|
|
|
|
|
vercodes = {}
|
2013-12-19 22:28:38 +01:00
|
|
|
if not args:
|
|
|
|
return vercodes
|
|
|
|
|
2013-12-11 19:08:15 +01:00
|
|
|
for p in args:
|
2013-12-11 19:37:38 +01:00
|
|
|
if allow_vercodes and ':' in p:
|
2013-12-11 19:08:15 +01:00
|
|
|
package, vercode = p.split(':')
|
|
|
|
else:
|
|
|
|
package, vercode = p, None
|
|
|
|
if package not in vercodes:
|
|
|
|
vercodes[package] = [vercode] if vercode else []
|
|
|
|
continue
|
2013-12-11 19:37:38 +01:00
|
|
|
elif vercode and vercode not in vercodes[package]:
|
2013-12-11 19:08:15 +01:00
|
|
|
vercodes[package] += [vercode] if vercode else []
|
2013-12-19 17:51:16 +01:00
|
|
|
|
|
|
|
return vercodes
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-12-19 22:28:38 +01:00
|
|
|
# On top of what read_pkg_args does, this returns the whole app metadata, but
|
|
|
|
# limiting the builds list to the builds matching the vercodes specified.
|
|
|
|
def read_app_args(args, allapps, allow_vercodes=False):
|
|
|
|
|
|
|
|
vercodes = read_pkg_args(args, allow_vercodes)
|
|
|
|
|
|
|
|
if not vercodes:
|
|
|
|
return allapps
|
2013-12-19 17:51:16 +01:00
|
|
|
|
|
|
|
apps = [app for app in allapps if app['id'] in vercodes]
|
2013-12-19 22:28:38 +01:00
|
|
|
|
2013-12-19 17:51:16 +01:00
|
|
|
if len(apps) != len(vercodes):
|
2013-12-11 19:08:15 +01:00
|
|
|
allids = [app["id"] for app in allapps]
|
2013-12-19 17:51:16 +01:00
|
|
|
for p in vercodes:
|
2013-12-11 19:08:15 +01:00
|
|
|
if p not in allids:
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.critical("No such package: %s" % p)
|
2013-12-11 19:08:15 +01:00
|
|
|
raise Exception("Found invalid app ids in arguments")
|
2014-02-15 00:40:25 +01:00
|
|
|
if not apps:
|
|
|
|
raise Exception("No packages specified")
|
2013-12-11 19:08:15 +01:00
|
|
|
|
2013-12-11 17:29:38 +01:00
|
|
|
error = False
|
|
|
|
for app in apps:
|
|
|
|
vc = vercodes[app['id']]
|
2013-12-19 22:28:38 +01:00
|
|
|
if not vc:
|
|
|
|
continue
|
|
|
|
app['builds'] = [b for b in app['builds'] if b['vercode'] in vc]
|
|
|
|
if len(app['builds']) != len(vercodes[app['id']]):
|
|
|
|
error = True
|
|
|
|
allvcs = [b['vercode'] for b in app['builds']]
|
|
|
|
for v in vercodes[app['id']]:
|
|
|
|
if v not in allvcs:
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.critical("No such vercode %s for app %s" % (v, app['id']))
|
2013-12-11 17:29:38 +01:00
|
|
|
|
|
|
|
if error:
|
|
|
|
raise Exception("Found invalid vercodes for some apps")
|
|
|
|
|
|
|
|
return apps
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-12-30 11:33:37 +01:00
|
|
|
def has_extension(filename, extension):
|
|
|
|
name, ext = os.path.splitext(filename)
|
|
|
|
ext = ext.lower()[1:]
|
|
|
|
return ext == extension
|
|
|
|
|
2013-12-19 17:51:16 +01:00
|
|
|
apk_regex = None
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-12-19 17:58:10 +01:00
|
|
|
def apknameinfo(filename):
|
2013-12-19 17:51:16 +01:00
|
|
|
global apk_regex
|
2013-12-19 17:58:10 +01:00
|
|
|
filename = os.path.basename(filename)
|
2013-12-19 17:51:16 +01:00
|
|
|
if apk_regex is None:
|
2013-12-22 12:27:26 +01:00
|
|
|
apk_regex = re.compile(r"^(.+)_([0-9]+)\.apk$")
|
2013-12-19 17:58:10 +01:00
|
|
|
m = apk_regex.match(filename)
|
2013-12-19 17:51:16 +01:00
|
|
|
try:
|
|
|
|
result = (m.group(1), m.group(2))
|
|
|
|
except AttributeError:
|
2013-12-19 17:58:10 +01:00
|
|
|
raise Exception("Invalid apk name: %s" % filename)
|
2013-12-19 17:51:16 +01:00
|
|
|
return result
|
2013-12-11 17:29:38 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-11-19 15:35:16 +01:00
|
|
|
def getapkname(app, build):
|
|
|
|
return "%s_%s.apk" % (app['id'], build['vercode'])
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-11-19 15:35:16 +01:00
|
|
|
def getsrcname(app, build):
|
|
|
|
return "%s_%s_src.tar.gz" % (app['id'], build['vercode'])
|
2013-10-31 16:37:39 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-12-02 15:28:30 +01:00
|
|
|
def getappname(app):
|
2013-12-11 17:29:38 +01:00
|
|
|
if app['Name']:
|
2014-02-10 10:55:29 +01:00
|
|
|
return app['Name']
|
2013-12-11 17:29:38 +01:00
|
|
|
if app['Auto Name']:
|
2014-02-10 10:55:29 +01:00
|
|
|
return app['Auto Name']
|
2013-12-11 17:29:38 +01:00
|
|
|
return app['id']
|
2013-12-02 15:28:30 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-12-02 15:28:30 +01:00
|
|
|
def getcvname(app):
|
2013-12-11 17:29:38 +01:00
|
|
|
return '%s (%s)' % (app['Current Version'], app['Current Version Code'])
|
2013-12-02 15:28:30 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-11-08 20:44:27 +01:00
|
|
|
def getvcs(vcstype, remote, local):
|
2011-08-07 17:14:54 +02:00
|
|
|
if vcstype == 'git':
|
2013-11-08 20:44:27 +01:00
|
|
|
return vcs_git(remote, local)
|
2013-04-08 12:20:21 +02:00
|
|
|
if vcstype == 'svn':
|
2013-11-08 20:44:27 +01:00
|
|
|
return vcs_svn(remote, local)
|
2013-04-08 12:20:21 +02:00
|
|
|
if vcstype == 'git-svn':
|
2013-11-08 20:44:27 +01:00
|
|
|
return vcs_gitsvn(remote, local)
|
2013-04-08 12:20:21 +02:00
|
|
|
if vcstype == 'hg':
|
2013-11-08 20:44:27 +01:00
|
|
|
return vcs_hg(remote, local)
|
2013-04-08 12:20:21 +02:00
|
|
|
if vcstype == 'bzr':
|
2013-11-08 20:44:27 +01:00
|
|
|
return vcs_bzr(remote, local)
|
2013-04-08 12:20:21 +02:00
|
|
|
if vcstype == 'srclib':
|
2013-06-05 13:54:47 +02:00
|
|
|
if local != 'build/srclib/' + remote:
|
2013-10-09 11:11:34 +02:00
|
|
|
raise VCSException("Error: srclib paths are hard-coded!")
|
2013-11-08 20:44:27 +01:00
|
|
|
return getsrclib(remote, 'build/srclib', raw=True)
|
2012-01-02 12:51:14 +01:00
|
|
|
raise VCSException("Invalid vcs type " + vcstype)
|
2011-08-07 17:14:54 +02:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-05-24 23:35:56 +02:00
|
|
|
def getsrclibvcs(name):
|
2014-05-28 09:33:14 +02:00
|
|
|
if name not in metadata.srclibs:
|
2013-05-31 08:50:47 +02:00
|
|
|
raise VCSException("Missing srclib " + name)
|
2014-05-20 23:44:47 +02:00
|
|
|
return metadata.srclibs[name]['Repo Type']
|
2013-05-24 23:35:56 +02:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2011-08-07 17:14:54 +02:00
|
|
|
class vcs:
|
2013-11-08 20:44:27 +01:00
|
|
|
def __init__(self, remote, local):
|
2011-08-07 17:14:54 +02:00
|
|
|
|
2013-11-05 19:42:29 +01:00
|
|
|
# svn, git-svn and bzr may require auth
|
2013-10-02 11:25:26 +02:00
|
|
|
self.username = None
|
2013-11-05 19:42:29 +01:00
|
|
|
if self.repotype() in ('svn', 'git-svn', 'bzr'):
|
|
|
|
if '@' in remote:
|
|
|
|
self.username, remote = remote.split('@')
|
|
|
|
if ':' not in self.username:
|
2013-10-02 11:25:26 +02:00
|
|
|
raise VCSException("Password required with username")
|
2013-11-05 19:42:29 +01:00
|
|
|
self.username, self.password = self.username.split(':')
|
2011-08-07 17:14:54 +02:00
|
|
|
|
|
|
|
self.remote = remote
|
|
|
|
self.local = local
|
2012-01-23 15:15:40 +01:00
|
|
|
self.refreshed = False
|
2012-01-30 22:11:50 +01:00
|
|
|
self.srclib = None
|
2012-01-08 19:13:15 +01:00
|
|
|
|
2014-04-02 23:37:29 +02:00
|
|
|
def repotype(self):
|
|
|
|
return None
|
|
|
|
|
2012-01-23 15:15:40 +01:00
|
|
|
# Take the local repository to a clean version of the given revision, which
|
|
|
|
# is specificed in the VCS's native format. Beforehand, the repository can
|
|
|
|
# be dirty, or even non-existent. If the repository does already exist
|
|
|
|
# locally, it will be updated from the origin, but only once in the
|
|
|
|
# lifetime of the vcs object.
|
2012-02-05 12:02:01 +01:00
|
|
|
# None is acceptable for 'rev' if you know you are cloning a clean copy of
|
|
|
|
# the repo - otherwise it must specify a valid revision.
|
2012-01-23 15:15:40 +01:00
|
|
|
def gotorevision(self, rev):
|
2012-08-13 18:59:03 +02:00
|
|
|
|
|
|
|
# The .fdroidvcs-id file for a repo tells us what VCS type
|
|
|
|
# and remote that directory was created from, allowing us to drop it
|
|
|
|
# automatically if either of those things changes.
|
|
|
|
fdpath = os.path.join(self.local, '..',
|
2014-05-06 19:50:52 +02:00
|
|
|
'.fdroidvcs-' + os.path.basename(self.local))
|
2012-08-13 18:59:03 +02:00
|
|
|
cdata = self.repotype() + ' ' + self.remote
|
|
|
|
writeback = True
|
2013-06-05 13:54:47 +02:00
|
|
|
deleterepo = False
|
2012-08-13 18:59:03 +02:00
|
|
|
if os.path.exists(self.local):
|
|
|
|
if os.path.exists(fdpath):
|
|
|
|
with open(fdpath, 'r') as f:
|
2014-03-05 13:22:58 +01:00
|
|
|
fsdata = f.read().strip()
|
2012-08-13 18:59:03 +02:00
|
|
|
if fsdata == cdata:
|
|
|
|
writeback = False
|
|
|
|
else:
|
2013-06-05 13:54:47 +02:00
|
|
|
deleterepo = True
|
2014-06-25 10:54:14 +02:00
|
|
|
logging.info(
|
2014-06-25 13:38:25 +02:00
|
|
|
"Repository details for {0} changed - deleting"
|
|
|
|
.format(self.local))
|
2013-06-05 13:54:47 +02:00
|
|
|
else:
|
|
|
|
deleterepo = True
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.info("Repository details missing - deleting")
|
2013-06-05 13:54:47 +02:00
|
|
|
if deleterepo:
|
|
|
|
shutil.rmtree(self.local)
|
2012-08-13 18:59:03 +02:00
|
|
|
|
|
|
|
self.gotorevisionx(rev)
|
|
|
|
|
|
|
|
# If necessary, write the .fdroidvcs file.
|
|
|
|
if writeback:
|
|
|
|
with open(fdpath, 'w') as f:
|
|
|
|
f.write(cdata)
|
|
|
|
|
|
|
|
# Derived classes need to implement this. It's called once basic checking
|
|
|
|
# has been performend.
|
|
|
|
def gotorevisionx(self, rev):
|
|
|
|
raise VCSException("This VCS type doesn't define gotorevisionx")
|
2011-08-07 17:14:54 +02:00
|
|
|
|
|
|
|
# Initialise and update submodules
|
|
|
|
def initsubmodules(self):
|
2012-01-23 15:15:40 +01:00
|
|
|
raise VCSException('Submodules not supported for this vcs type')
|
2011-08-07 17:14:54 +02:00
|
|
|
|
2012-08-23 15:25:39 +02:00
|
|
|
# Get a list of all known tags
|
|
|
|
def gettags(self):
|
|
|
|
raise VCSException('gettags not supported for this vcs type')
|
|
|
|
|
2014-04-17 21:05:18 +02:00
|
|
|
# Get a list of latest number tags
|
|
|
|
def latesttags(self, number):
|
|
|
|
raise VCSException('latesttags not supported for this vcs type')
|
|
|
|
|
2013-10-17 23:27:55 +02:00
|
|
|
# Get current commit reference (hash, revision, etc)
|
|
|
|
def getref(self):
|
|
|
|
raise VCSException('getref not supported for this vcs type')
|
|
|
|
|
2012-01-30 22:11:50 +01:00
|
|
|
# Returns the srclib (name, path) used in setting up the current
|
|
|
|
# revision, or None.
|
|
|
|
def getsrclib(self):
|
|
|
|
return self.srclib
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2011-08-07 17:14:54 +02:00
|
|
|
class vcs_git(vcs):
|
|
|
|
|
2012-08-13 18:59:03 +02:00
|
|
|
def repotype(self):
|
|
|
|
return 'git'
|
|
|
|
|
2012-01-23 15:15:40 +01:00
|
|
|
# If the local directory exists, but is somehow not a git repository, git
|
|
|
|
# will traverse up the directory tree until it finds one that is (i.e.
|
2012-08-13 18:59:03 +02:00
|
|
|
# fdroidserver) and then we'll proceed to destroy it! This is called as
|
2012-01-23 15:15:40 +01:00
|
|
|
# a safety check.
|
2012-01-08 14:43:59 +01:00
|
|
|
def checkrepo(self):
|
2014-02-17 13:12:25 +01:00
|
|
|
p = SilentPopen(['git', 'rev-parse', '--show-toplevel'], cwd=self.local)
|
2014-01-27 16:07:30 +01:00
|
|
|
result = p.stdout.rstrip()
|
2012-01-08 15:13:41 +01:00
|
|
|
if not result.endswith(self.local):
|
2012-01-08 14:43:59 +01:00
|
|
|
raise VCSException('Repository mismatch')
|
|
|
|
|
2012-08-13 18:59:03 +02:00
|
|
|
def gotorevisionx(self, rev):
|
2012-01-23 15:15:40 +01:00
|
|
|
if not os.path.exists(self.local):
|
2014-01-27 15:59:40 +01:00
|
|
|
# Brand new checkout
|
2014-03-18 12:47:35 +01:00
|
|
|
p = FDroidPopen(['git', 'clone', self.remote, self.local])
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2012-01-23 15:15:40 +01:00
|
|
|
raise VCSException("Git clone failed")
|
|
|
|
self.checkrepo()
|
|
|
|
else:
|
|
|
|
self.checkrepo()
|
2014-01-27 15:59:40 +01:00
|
|
|
# Discard any working tree changes
|
2014-02-17 13:25:55 +01:00
|
|
|
p = SilentPopen(['git', 'reset', '--hard'], cwd=self.local)
|
|
|
|
if p.returncode != 0:
|
2012-01-23 15:15:40 +01:00
|
|
|
raise VCSException("Git reset failed")
|
|
|
|
# Remove untracked files now, in case they're tracked in the target
|
2014-01-27 15:59:40 +01:00
|
|
|
# revision (it happens!)
|
2014-02-17 13:25:55 +01:00
|
|
|
p = SilentPopen(['git', 'clean', '-dffx'], cwd=self.local)
|
|
|
|
if p.returncode != 0:
|
2012-01-23 15:15:40 +01:00
|
|
|
raise VCSException("Git clean failed")
|
|
|
|
if not self.refreshed:
|
2014-01-27 15:59:40 +01:00
|
|
|
# Get latest commits and tags from remote
|
2014-03-18 12:47:35 +01:00
|
|
|
p = FDroidPopen(['git', 'fetch', 'origin'], cwd=self.local)
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2012-01-25 21:21:58 +01:00
|
|
|
raise VCSException("Git fetch failed")
|
2014-02-17 13:25:55 +01:00
|
|
|
p = SilentPopen(['git', 'fetch', '--prune', '--tags', 'origin'], cwd=self.local)
|
|
|
|
if p.returncode != 0:
|
2012-01-23 15:15:40 +01:00
|
|
|
raise VCSException("Git fetch failed")
|
|
|
|
self.refreshed = True
|
2014-06-25 11:12:53 +02:00
|
|
|
# origin/HEAD is the HEAD of the remote, e.g. the "default branch" on
|
|
|
|
# a github repo. Most of the time this is the same as origin/master.
|
|
|
|
rev = str(rev if rev else 'origin/HEAD')
|
2014-02-17 13:25:55 +01:00
|
|
|
p = SilentPopen(['git', 'checkout', '-f', rev], cwd=self.local)
|
|
|
|
if p.returncode != 0:
|
2013-09-15 23:20:27 +02:00
|
|
|
raise VCSException("Git checkout failed")
|
2014-01-27 15:59:40 +01:00
|
|
|
# Get rid of any uncontrolled files left behind
|
2014-02-17 13:25:55 +01:00
|
|
|
p = SilentPopen(['git', 'clean', '-dffx'], cwd=self.local)
|
|
|
|
if p.returncode != 0:
|
2012-01-02 12:51:14 +01:00
|
|
|
raise VCSException("Git clean failed")
|
2011-08-07 17:14:54 +02:00
|
|
|
|
|
|
|
def initsubmodules(self):
|
2012-01-08 14:43:59 +01:00
|
|
|
self.checkrepo()
|
2014-03-27 18:38:12 +01:00
|
|
|
submfile = os.path.join(self.local, '.gitmodules')
|
|
|
|
if not os.path.isfile(submfile):
|
|
|
|
raise VCSException("No git submodules available")
|
|
|
|
|
|
|
|
# fix submodules not accessible without an account and public key auth
|
|
|
|
with open(submfile, 'r') as f:
|
|
|
|
lines = f.readlines()
|
|
|
|
with open(submfile, 'w') as f:
|
|
|
|
for line in lines:
|
|
|
|
if 'git@github.com' in line:
|
|
|
|
line = line.replace('git@github.com:', 'https://github.com/')
|
|
|
|
f.write(line)
|
|
|
|
|
2014-02-17 13:12:25 +01:00
|
|
|
for cmd in [
|
|
|
|
['git', 'reset', '--hard'],
|
|
|
|
['git', 'clean', '-dffx'],
|
|
|
|
]:
|
|
|
|
p = SilentPopen(['git', 'submodule', 'foreach', '--recursive'] + cmd, cwd=self.local)
|
|
|
|
if p.returncode != 0:
|
|
|
|
raise VCSException("Git submodule reset failed")
|
2014-04-29 12:00:03 +02:00
|
|
|
p = FDroidPopen(['git', 'submodule', 'sync'], cwd=self.local)
|
|
|
|
if p.returncode != 0:
|
|
|
|
raise VCSException("Git submodule sync failed")
|
2014-03-18 12:47:35 +01:00
|
|
|
p = FDroidPopen(['git', 'submodule', 'update', '--init', '--force', '--recursive'], cwd=self.local)
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2014-01-29 12:38:21 +01:00
|
|
|
raise VCSException("Git submodule update failed")
|
2011-08-07 17:14:54 +02:00
|
|
|
|
2012-08-23 15:25:39 +02:00
|
|
|
def gettags(self):
|
|
|
|
self.checkrepo()
|
2014-02-17 13:12:25 +01:00
|
|
|
p = SilentPopen(['git', 'tag'], cwd=self.local)
|
2014-01-27 16:07:30 +01:00
|
|
|
return p.stdout.splitlines()
|
2012-08-23 15:25:39 +02:00
|
|
|
|
2014-04-17 21:05:18 +02:00
|
|
|
def latesttags(self, alltags, number):
|
|
|
|
self.checkrepo()
|
2014-06-26 12:52:16 +02:00
|
|
|
p = SilentPopen(['echo "' + '\n'.join(alltags) + '" | '
|
|
|
|
+ 'xargs -I@ git log --format=format:"%at @%n" -1 @ | '
|
|
|
|
+ 'sort -n | awk \'{print $2}\''],
|
2014-05-06 19:50:52 +02:00
|
|
|
cwd=self.local, shell=True)
|
2014-04-17 21:05:18 +02:00
|
|
|
return p.stdout.splitlines()[-number:]
|
|
|
|
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2012-01-04 22:37:11 +01:00
|
|
|
class vcs_gitsvn(vcs):
|
|
|
|
|
2012-08-13 18:59:03 +02:00
|
|
|
def repotype(self):
|
|
|
|
return 'git-svn'
|
|
|
|
|
2013-10-31 11:53:12 +01:00
|
|
|
# Damn git-svn tries to use a graphical password prompt, so we have to
|
|
|
|
# trick it into taking the password from stdin
|
|
|
|
def userargs(self):
|
|
|
|
if self.username is None:
|
|
|
|
return ('', '')
|
2014-03-02 13:39:48 +01:00
|
|
|
return ('echo "%s" | DISPLAY="" ' % self.password, ' --username "%s"' % self.username)
|
2013-10-31 11:53:12 +01:00
|
|
|
|
2012-01-23 15:15:40 +01:00
|
|
|
# If the local directory exists, but is somehow not a git repository, git
|
|
|
|
# will traverse up the directory tree until it finds one that is (i.e.
|
|
|
|
# fdroidserver) and then we'll proceed to destory it! This is called as
|
|
|
|
# a safety check.
|
2012-01-08 14:43:59 +01:00
|
|
|
def checkrepo(self):
|
2014-02-17 13:12:25 +01:00
|
|
|
p = SilentPopen(['git', 'rev-parse', '--show-toplevel'], cwd=self.local)
|
2014-01-27 16:07:30 +01:00
|
|
|
result = p.stdout.rstrip()
|
2012-01-08 15:16:42 +01:00
|
|
|
if not result.endswith(self.local):
|
2012-01-08 14:43:59 +01:00
|
|
|
raise VCSException('Repository mismatch')
|
|
|
|
|
2012-08-13 18:59:03 +02:00
|
|
|
def gotorevisionx(self, rev):
|
2012-01-23 15:15:40 +01:00
|
|
|
if not os.path.exists(self.local):
|
2014-01-27 15:59:40 +01:00
|
|
|
# Brand new checkout
|
2014-03-02 13:39:48 +01:00
|
|
|
gitsvn_cmd = '%sgit svn clone%s' % self.userargs()
|
2013-10-31 11:53:12 +01:00
|
|
|
if ';' in self.remote:
|
|
|
|
remote_split = self.remote.split(';')
|
2013-04-05 21:55:34 +02:00
|
|
|
for i in remote_split[1:]:
|
|
|
|
if i.startswith('trunk='):
|
2013-10-31 15:54:52 +01:00
|
|
|
gitsvn_cmd += ' -T %s' % i[6:]
|
2013-04-05 21:55:34 +02:00
|
|
|
elif i.startswith('tags='):
|
2013-10-31 15:54:52 +01:00
|
|
|
gitsvn_cmd += ' -t %s' % i[5:]
|
2013-05-27 15:00:35 +02:00
|
|
|
elif i.startswith('branches='):
|
2013-10-31 15:54:52 +01:00
|
|
|
gitsvn_cmd += ' -b %s' % i[9:]
|
2014-02-17 13:25:55 +01:00
|
|
|
p = SilentPopen([gitsvn_cmd + " %s %s" % (remote_split[0], self.local)], shell=True)
|
|
|
|
if p.returncode != 0:
|
2013-04-05 21:55:34 +02:00
|
|
|
raise VCSException("Git clone failed")
|
|
|
|
else:
|
2014-02-17 13:25:55 +01:00
|
|
|
p = SilentPopen([gitsvn_cmd + " %s %s" % (self.remote, self.local)], shell=True)
|
|
|
|
if p.returncode != 0:
|
2013-04-05 21:55:34 +02:00
|
|
|
raise VCSException("Git clone failed")
|
2012-01-23 15:15:40 +01:00
|
|
|
self.checkrepo()
|
2012-01-04 22:37:11 +01:00
|
|
|
else:
|
2012-01-23 15:15:40 +01:00
|
|
|
self.checkrepo()
|
2014-01-27 15:59:40 +01:00
|
|
|
# Discard any working tree changes
|
2014-02-17 13:25:55 +01:00
|
|
|
p = SilentPopen(['git', 'reset', '--hard'], cwd=self.local)
|
|
|
|
if p.returncode != 0:
|
2012-01-23 15:15:40 +01:00
|
|
|
raise VCSException("Git reset failed")
|
|
|
|
# Remove untracked files now, in case they're tracked in the target
|
2014-01-27 15:59:40 +01:00
|
|
|
# revision (it happens!)
|
2014-02-17 13:25:55 +01:00
|
|
|
p = SilentPopen(['git', 'clean', '-dffx'], cwd=self.local)
|
|
|
|
if p.returncode != 0:
|
2012-01-23 15:15:40 +01:00
|
|
|
raise VCSException("Git clean failed")
|
|
|
|
if not self.refreshed:
|
2014-02-18 10:34:49 +01:00
|
|
|
# Get new commits, branches and tags from repo
|
|
|
|
p = SilentPopen(['%sgit svn fetch %s' % self.userargs()], cwd=self.local, shell=True)
|
|
|
|
if p.returncode != 0:
|
|
|
|
raise VCSException("Git svn fetch failed")
|
2014-02-17 13:25:55 +01:00
|
|
|
p = SilentPopen(['%sgit svn rebase %s' % self.userargs()], cwd=self.local, shell=True)
|
|
|
|
if p.returncode != 0:
|
2012-01-23 15:15:40 +01:00
|
|
|
raise VCSException("Git svn rebase failed")
|
|
|
|
self.refreshed = True
|
2013-09-15 23:20:27 +02:00
|
|
|
|
|
|
|
rev = str(rev if rev else 'master')
|
2012-02-05 12:02:01 +01:00
|
|
|
if rev:
|
2013-06-03 11:20:49 +02:00
|
|
|
nospaces_rev = rev.replace(' ', '%20')
|
2013-05-28 16:25:23 +02:00
|
|
|
# Try finding a svn tag
|
2014-02-17 13:12:25 +01:00
|
|
|
p = SilentPopen(['git', 'checkout', 'tags/' + nospaces_rev], cwd=self.local)
|
2014-01-27 16:07:30 +01:00
|
|
|
if p.returncode != 0:
|
2013-05-28 16:25:23 +02:00
|
|
|
# No tag found, normal svn rev translation
|
|
|
|
# Translate svn rev into git format
|
2014-01-24 10:10:40 +01:00
|
|
|
rev_split = rev.split('/')
|
2014-01-24 09:47:57 +01:00
|
|
|
if len(rev_split) > 1:
|
2014-01-24 10:10:40 +01:00
|
|
|
treeish = rev_split[0]
|
|
|
|
svn_rev = rev_split[1]
|
|
|
|
|
2014-01-24 09:47:57 +01:00
|
|
|
else:
|
2014-05-02 04:26:54 +02:00
|
|
|
# if no branch is specified, then assume trunk (ie. 'master'
|
2014-01-24 10:10:40 +01:00
|
|
|
# branch):
|
|
|
|
treeish = 'master'
|
|
|
|
svn_rev = rev
|
|
|
|
|
2014-02-17 13:12:25 +01:00
|
|
|
p = SilentPopen(['git', 'svn', 'find-rev', 'r' + svn_rev, treeish], cwd=self.local)
|
2014-01-27 16:07:30 +01:00
|
|
|
git_rev = p.stdout.rstrip()
|
2014-01-24 10:10:40 +01:00
|
|
|
|
2013-11-05 19:42:29 +01:00
|
|
|
if p.returncode != 0 or not git_rev:
|
2013-05-28 16:36:52 +02:00
|
|
|
# Try a plain git checkout as a last resort
|
2014-02-17 13:12:25 +01:00
|
|
|
p = SilentPopen(['git', 'checkout', rev], cwd=self.local)
|
2014-01-27 16:07:30 +01:00
|
|
|
if p.returncode != 0:
|
2013-05-28 16:25:23 +02:00
|
|
|
raise VCSException("No git treeish found and direct git checkout failed")
|
2013-06-03 11:20:49 +02:00
|
|
|
else:
|
|
|
|
# Check out the git rev equivalent to the svn rev
|
2014-02-17 13:12:25 +01:00
|
|
|
p = SilentPopen(['git', 'checkout', git_rev], cwd=self.local)
|
2014-01-27 16:07:30 +01:00
|
|
|
if p.returncode != 0:
|
2013-06-03 11:20:49 +02:00
|
|
|
raise VCSException("Git svn checkout failed")
|
2014-01-24 10:10:40 +01:00
|
|
|
|
2014-01-27 15:59:40 +01:00
|
|
|
# Get rid of any uncontrolled files left behind
|
2014-02-17 13:25:55 +01:00
|
|
|
p = SilentPopen(['git', 'clean', '-dffx'], cwd=self.local)
|
|
|
|
if p.returncode != 0:
|
2012-01-04 22:37:11 +01:00
|
|
|
raise VCSException("Git clean failed")
|
|
|
|
|
2013-04-05 15:43:12 +02:00
|
|
|
def gettags(self):
|
|
|
|
self.checkrepo()
|
2013-06-03 10:55:58 +02:00
|
|
|
return os.listdir(os.path.join(self.local, '.git/svn/refs/remotes/tags'))
|
2013-04-05 15:43:12 +02:00
|
|
|
|
2013-10-17 23:27:55 +02:00
|
|
|
def getref(self):
|
|
|
|
self.checkrepo()
|
2014-02-17 13:12:25 +01:00
|
|
|
p = SilentPopen(['git', 'svn', 'find-rev', 'HEAD'], cwd=self.local)
|
2014-01-27 16:07:30 +01:00
|
|
|
if p.returncode != 0:
|
|
|
|
return None
|
|
|
|
return p.stdout.strip()
|
2013-10-17 23:27:55 +02:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2011-08-07 17:14:54 +02:00
|
|
|
class vcs_svn(vcs):
|
|
|
|
|
2012-08-13 18:59:03 +02:00
|
|
|
def repotype(self):
|
|
|
|
return 'svn'
|
|
|
|
|
2011-08-07 17:14:54 +02:00
|
|
|
def userargs(self):
|
|
|
|
if self.username is None:
|
2012-01-02 12:51:14 +01:00
|
|
|
return ['--non-interactive']
|
2013-12-30 17:04:16 +01:00
|
|
|
return ['--username', self.username,
|
2011-08-07 17:14:54 +02:00
|
|
|
'--password', self.password,
|
|
|
|
'--non-interactive']
|
|
|
|
|
2012-08-13 18:59:03 +02:00
|
|
|
def gotorevisionx(self, rev):
|
2012-01-23 15:15:40 +01:00
|
|
|
if not os.path.exists(self.local):
|
2014-02-17 13:25:55 +01:00
|
|
|
p = SilentPopen(['svn', 'checkout', self.remote, self.local] + self.userargs())
|
|
|
|
if p.returncode != 0:
|
2012-01-23 15:15:40 +01:00
|
|
|
raise VCSException("Svn checkout failed")
|
2011-08-07 17:14:54 +02:00
|
|
|
else:
|
2012-01-23 15:15:40 +01:00
|
|
|
for svncommand in (
|
|
|
|
'svn revert -R .',
|
|
|
|
r"svn status | awk '/\?/ {print $2}' | xargs rm -rf"):
|
2014-02-17 14:59:55 +01:00
|
|
|
p = SilentPopen([svncommand], cwd=self.local, shell=True)
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2013-05-22 11:15:57 +02:00
|
|
|
raise VCSException("Svn reset ({0}) failed in {1}".format(svncommand, self.local))
|
2012-01-23 15:15:40 +01:00
|
|
|
if not self.refreshed:
|
2014-02-17 13:25:55 +01:00
|
|
|
p = SilentPopen(['svn', 'update'] + self.userargs(), cwd=self.local)
|
|
|
|
if p.returncode != 0:
|
2012-01-23 15:15:40 +01:00
|
|
|
raise VCSException("Svn update failed")
|
|
|
|
self.refreshed = True
|
2013-09-15 23:20:27 +02:00
|
|
|
|
|
|
|
revargs = list(['-r', rev] if rev else [])
|
2014-02-17 13:25:55 +01:00
|
|
|
p = SilentPopen(['svn', 'update', '--force'] + revargs + self.userargs(), cwd=self.local)
|
|
|
|
if p.returncode != 0:
|
2013-09-15 23:20:27 +02:00
|
|
|
raise VCSException("Svn update failed")
|
2011-08-07 17:14:54 +02:00
|
|
|
|
2013-10-17 23:27:55 +02:00
|
|
|
def getref(self):
|
2014-02-17 13:12:25 +01:00
|
|
|
p = SilentPopen(['svn', 'info'], cwd=self.local)
|
|
|
|
for line in p.stdout.splitlines():
|
2013-12-06 12:15:13 +01:00
|
|
|
if line and line.startswith('Last Changed Rev: '):
|
2013-10-17 23:27:55 +02:00
|
|
|
return line[18:]
|
2014-01-27 16:07:30 +01:00
|
|
|
return None
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2011-08-07 17:14:54 +02:00
|
|
|
class vcs_hg(vcs):
|
|
|
|
|
2012-08-13 18:59:03 +02:00
|
|
|
def repotype(self):
|
|
|
|
return 'hg'
|
|
|
|
|
|
|
|
def gotorevisionx(self, rev):
|
2012-01-23 15:15:40 +01:00
|
|
|
if not os.path.exists(self.local):
|
2014-02-17 13:25:55 +01:00
|
|
|
p = SilentPopen(['hg', 'clone', self.remote, self.local])
|
|
|
|
if p.returncode != 0:
|
2012-01-23 15:15:40 +01:00
|
|
|
raise VCSException("Hg clone failed")
|
2011-08-07 17:14:54 +02:00
|
|
|
else:
|
2014-02-17 14:59:55 +01:00
|
|
|
p = SilentPopen(['hg status -uS | xargs rm -rf'], cwd=self.local, shell=True)
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2012-01-23 15:15:40 +01:00
|
|
|
raise VCSException("Hg clean failed")
|
|
|
|
if not self.refreshed:
|
2014-02-17 13:25:55 +01:00
|
|
|
p = SilentPopen(['hg', 'pull'], cwd=self.local)
|
|
|
|
if p.returncode != 0:
|
2012-01-23 15:15:40 +01:00
|
|
|
raise VCSException("Hg pull failed")
|
|
|
|
self.refreshed = True
|
2013-09-15 23:20:27 +02:00
|
|
|
|
|
|
|
rev = str(rev if rev else 'default')
|
2013-11-01 19:12:22 +01:00
|
|
|
if not rev:
|
|
|
|
return
|
2014-02-17 13:25:55 +01:00
|
|
|
p = SilentPopen(['hg', 'update', '-C', rev], cwd=self.local)
|
|
|
|
if p.returncode != 0:
|
2013-11-01 19:12:22 +01:00
|
|
|
raise VCSException("Hg checkout failed")
|
2014-02-17 13:12:25 +01:00
|
|
|
p = SilentPopen(['hg', 'purge', '--all'], cwd=self.local)
|
2014-01-15 16:40:27 +01:00
|
|
|
# Also delete untracked files, we have to enable purge extension for that:
|
2014-01-27 16:22:25 +01:00
|
|
|
if "'purge' is provided by the following extension" in p.stdout:
|
2014-05-06 19:56:44 +02:00
|
|
|
with open(self.local + "/.hg/hgrc", "a") as myfile:
|
2014-01-20 00:33:31 +01:00
|
|
|
myfile.write("\n[extensions]\nhgext.purge=\n")
|
2014-02-17 13:25:55 +01:00
|
|
|
p = SilentPopen(['hg', 'purge', '--all'], cwd=self.local)
|
|
|
|
if p.returncode != 0:
|
2014-01-15 16:40:27 +01:00
|
|
|
raise VCSException("HG purge failed")
|
2014-01-20 16:14:00 +01:00
|
|
|
elif p.returncode != 0:
|
2014-01-15 15:08:55 +01:00
|
|
|
raise VCSException("HG purge failed")
|
2011-08-07 17:14:54 +02:00
|
|
|
|
2013-09-06 20:33:47 +02:00
|
|
|
def gettags(self):
|
2014-02-17 13:12:25 +01:00
|
|
|
p = SilentPopen(['hg', 'tags', '-q'], cwd=self.local)
|
2014-01-27 16:22:25 +01:00
|
|
|
return p.stdout.splitlines()[1:]
|
2013-09-06 20:33:47 +02:00
|
|
|
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2011-08-07 17:14:54 +02:00
|
|
|
class vcs_bzr(vcs):
|
|
|
|
|
2012-08-13 18:59:03 +02:00
|
|
|
def repotype(self):
|
|
|
|
return 'bzr'
|
|
|
|
|
|
|
|
def gotorevisionx(self, rev):
|
2012-01-23 15:15:40 +01:00
|
|
|
if not os.path.exists(self.local):
|
2014-02-17 13:25:55 +01:00
|
|
|
p = SilentPopen(['bzr', 'branch', self.remote, self.local])
|
|
|
|
if p.returncode != 0:
|
2012-01-23 15:15:40 +01:00
|
|
|
raise VCSException("Bzr branch failed")
|
2011-08-07 17:14:54 +02:00
|
|
|
else:
|
2014-02-17 13:25:55 +01:00
|
|
|
p = SilentPopen(['bzr', 'clean-tree', '--force', '--unknown', '--ignored'], cwd=self.local)
|
|
|
|
if p.returncode != 0:
|
2012-01-23 15:15:40 +01:00
|
|
|
raise VCSException("Bzr revert failed")
|
|
|
|
if not self.refreshed:
|
2014-02-17 13:25:55 +01:00
|
|
|
p = SilentPopen(['bzr', 'pull'], cwd=self.local)
|
|
|
|
if p.returncode != 0:
|
2012-01-23 15:15:40 +01:00
|
|
|
raise VCSException("Bzr update failed")
|
|
|
|
self.refreshed = True
|
2013-09-15 23:20:27 +02:00
|
|
|
|
|
|
|
revargs = list(['-r', rev] if rev else [])
|
2014-02-17 13:25:55 +01:00
|
|
|
p = SilentPopen(['bzr', 'revert'] + revargs, cwd=self.local)
|
|
|
|
if p.returncode != 0:
|
2013-09-15 23:20:27 +02:00
|
|
|
raise VCSException("Bzr revert failed")
|
2011-08-07 17:14:54 +02:00
|
|
|
|
2013-10-30 21:54:09 +01:00
|
|
|
def gettags(self):
|
2014-02-17 13:12:25 +01:00
|
|
|
p = SilentPopen(['bzr', 'tags'], cwd=self.local)
|
2013-10-30 21:54:09 +01:00
|
|
|
return [tag.split(' ')[0].strip() for tag in
|
2014-01-27 16:22:25 +01:00
|
|
|
p.stdout.splitlines()]
|
2013-10-30 21:54:09 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-02-23 20:09:42 +01:00
|
|
|
def retrieve_string(app_dir, string, xmlfiles=None):
|
2014-03-16 09:43:54 +01:00
|
|
|
|
2014-03-18 15:44:39 +01:00
|
|
|
res_dirs = [
|
2014-05-06 19:50:52 +02:00
|
|
|
os.path.join(app_dir, 'res'),
|
|
|
|
os.path.join(app_dir, 'src/main'),
|
|
|
|
]
|
2014-03-16 09:43:54 +01:00
|
|
|
|
2014-02-23 20:09:42 +01:00
|
|
|
if xmlfiles is None:
|
|
|
|
xmlfiles = []
|
2014-03-18 15:44:39 +01:00
|
|
|
for res_dir in res_dirs:
|
2014-05-02 04:16:32 +02:00
|
|
|
for r, d, f in os.walk(res_dir):
|
2014-03-18 15:44:39 +01:00
|
|
|
if r.endswith('/values'):
|
2014-05-02 04:16:32 +02:00
|
|
|
xmlfiles += [os.path.join(r, x) for x in f if x.endswith('.xml')]
|
2014-02-23 20:09:42 +01:00
|
|
|
|
|
|
|
string_search = None
|
2013-12-02 15:09:59 +01:00
|
|
|
if string.startswith('@string/'):
|
2014-05-27 10:10:15 +02:00
|
|
|
string_search = re.compile(r'.*name="' + string[8:] + '".*?>([^<]+?)<.*').search
|
2013-12-02 15:09:59 +01:00
|
|
|
elif string.startswith('&') and string.endswith(';'):
|
2014-05-06 19:56:44 +02:00
|
|
|
string_search = re.compile(r'.*<!ENTITY.*' + string[1:-1] + '.*?"([^"]+?)".*>').search
|
2014-02-23 20:09:42 +01:00
|
|
|
|
|
|
|
if string_search is not None:
|
|
|
|
for xmlfile in xmlfiles:
|
2013-12-02 15:09:59 +01:00
|
|
|
for line in file(xmlfile):
|
|
|
|
matches = string_search(line)
|
|
|
|
if matches:
|
2014-02-23 20:09:42 +01:00
|
|
|
return retrieve_string(app_dir, matches.group(1), xmlfiles)
|
2014-03-16 23:12:37 +01:00
|
|
|
return None
|
2013-12-02 15:09:59 +01:00
|
|
|
|
2014-05-02 04:16:32 +02:00
|
|
|
return string.replace("\\'", "'")
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-08-13 15:58:43 +02:00
|
|
|
# Return list of existing files that will be used to find the highest vercode
|
2013-08-13 12:02:48 +02:00
|
|
|
def manifest_paths(app_dir, flavour):
|
2013-08-03 16:44:14 +02:00
|
|
|
|
2014-05-06 19:50:52 +02:00
|
|
|
possible_manifests = \
|
|
|
|
[os.path.join(app_dir, 'AndroidManifest.xml'),
|
|
|
|
os.path.join(app_dir, 'src', 'main', 'AndroidManifest.xml'),
|
|
|
|
os.path.join(app_dir, 'src', 'AndroidManifest.xml'),
|
|
|
|
os.path.join(app_dir, 'build.gradle')]
|
2013-08-09 17:15:27 +02:00
|
|
|
|
2013-12-06 12:15:13 +01:00
|
|
|
if flavour:
|
2013-08-13 12:02:48 +02:00
|
|
|
possible_manifests.append(
|
2014-05-06 19:50:52 +02:00
|
|
|
os.path.join(app_dir, 'src', flavour, 'AndroidManifest.xml'))
|
2013-12-30 17:04:16 +01:00
|
|
|
|
2013-08-13 12:02:48 +02:00
|
|
|
return [path for path in possible_manifests if os.path.isfile(path)]
|
2013-08-03 16:44:14 +02:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-03-16 23:12:37 +01:00
|
|
|
# Retrieve the package name. Returns the name, or None if not found.
|
2013-08-13 15:58:43 +02:00
|
|
|
def fetch_real_name(app_dir, flavour):
|
2013-06-14 10:06:22 +02:00
|
|
|
app_search = re.compile(r'.*<application.*').search
|
2013-06-11 00:30:30 +02:00
|
|
|
name_search = re.compile(r'.*android:label="([^"]+)".*').search
|
2013-06-14 10:06:22 +02:00
|
|
|
app_found = False
|
2013-08-13 15:58:43 +02:00
|
|
|
for f in manifest_paths(app_dir, flavour):
|
2013-12-30 11:33:37 +01:00
|
|
|
if not has_extension(f, 'xml'):
|
2013-08-13 15:58:43 +02:00
|
|
|
continue
|
2014-03-16 09:43:24 +01:00
|
|
|
logging.debug("fetch_real_name: Checking manifest at " + f)
|
2013-08-13 15:58:43 +02:00
|
|
|
for line in file(f):
|
|
|
|
if not app_found:
|
|
|
|
if app_search(line):
|
|
|
|
app_found = True
|
|
|
|
if app_found:
|
|
|
|
matches = name_search(line)
|
|
|
|
if matches:
|
2014-03-16 09:43:24 +01:00
|
|
|
stringname = matches.group(1)
|
|
|
|
logging.debug("fetch_real_name: using string " + stringname)
|
2014-03-16 23:34:13 +01:00
|
|
|
result = retrieve_string(app_dir, stringname)
|
|
|
|
if result:
|
|
|
|
result = result.strip()
|
|
|
|
return result
|
2014-03-16 23:12:37 +01:00
|
|
|
return None
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-08-15 12:15:44 +02:00
|
|
|
# Retrieve the version name
|
|
|
|
def version_name(original, app_dir, flavour):
|
|
|
|
for f in manifest_paths(app_dir, flavour):
|
2013-12-30 11:33:37 +01:00
|
|
|
if not has_extension(f, 'xml'):
|
2013-08-15 12:15:44 +02:00
|
|
|
continue
|
2014-02-23 20:09:42 +01:00
|
|
|
string = retrieve_string(app_dir, original)
|
2013-11-05 19:42:29 +01:00
|
|
|
if string:
|
2013-08-15 12:15:44 +02:00
|
|
|
return string
|
|
|
|
return original
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-02-10 23:03:02 +01:00
|
|
|
def get_library_references(root_dir):
|
|
|
|
libraries = []
|
2013-11-04 17:03:43 +01:00
|
|
|
proppath = os.path.join(root_dir, 'project.properties')
|
|
|
|
if not os.path.isfile(proppath):
|
2014-02-10 23:03:02 +01:00
|
|
|
return libraries
|
2013-11-04 17:03:43 +01:00
|
|
|
with open(proppath) as f:
|
|
|
|
for line in f.readlines():
|
|
|
|
if not line.startswith('android.library.reference.'):
|
|
|
|
continue
|
|
|
|
path = line.split('=')[1].strip()
|
|
|
|
relpath = os.path.join(root_dir, path)
|
|
|
|
if not os.path.isdir(relpath):
|
|
|
|
continue
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.info("Found subproject at %s" % path)
|
2014-02-10 23:03:02 +01:00
|
|
|
libraries.append(path)
|
|
|
|
return libraries
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-02-10 23:03:02 +01:00
|
|
|
def ant_subprojects(root_dir):
|
|
|
|
subprojects = get_library_references(root_dir)
|
|
|
|
for subpath in subprojects:
|
|
|
|
subrelpath = os.path.join(root_dir, subpath)
|
|
|
|
for p in get_library_references(subrelpath):
|
2014-05-02 04:16:32 +02:00
|
|
|
relp = os.path.normpath(os.path.join(subpath, p))
|
2014-02-10 23:03:02 +01:00
|
|
|
if relp not in subprojects:
|
|
|
|
subprojects.insert(0, relp)
|
2013-11-04 17:03:43 +01:00
|
|
|
return subprojects
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-02-11 17:56:36 +01:00
|
|
|
def remove_debuggable_flags(root_dir):
|
|
|
|
# Remove forced debuggable flags
|
|
|
|
logging.info("Removing debuggable flags")
|
|
|
|
for root, dirs, files in os.walk(root_dir):
|
|
|
|
if 'AndroidManifest.xml' in files:
|
|
|
|
path = os.path.join(root, 'AndroidManifest.xml')
|
2014-05-02 04:16:32 +02:00
|
|
|
p = FDroidPopen(['sed', '-i', 's/android:debuggable="[^"]*"//g', path])
|
2014-02-11 17:56:36 +01:00
|
|
|
if p.returncode != 0:
|
|
|
|
raise BuildException("Failed to remove debuggable flags of %s" % path)
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2012-03-10 13:50:34 +01:00
|
|
|
# Extract some information from the AndroidManifest.xml at the given path.
|
|
|
|
# Returns (version, vercode, package), any or all of which might be None.
|
2012-08-23 15:25:39 +02:00
|
|
|
# All values returned are strings.
|
2014-05-13 21:04:22 +02:00
|
|
|
def parse_androidmanifests(paths, ignoreversions=None):
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2013-08-14 15:07:26 +02:00
|
|
|
if not paths:
|
|
|
|
return (None, None, None)
|
|
|
|
|
2014-01-23 10:52:09 +01:00
|
|
|
vcsearch = re.compile(r'.*:versionCode="([0-9]+?)".*').search
|
|
|
|
vnsearch = re.compile(r'.*:versionName="([^"]+?)".*').search
|
2012-03-10 13:50:34 +01:00
|
|
|
psearch = re.compile(r'.*package="([^"]+)".*').search
|
2013-08-13 12:02:48 +02:00
|
|
|
|
2014-04-12 01:00:59 +02:00
|
|
|
vcsearch_g = re.compile(r'.*versionCode *=* *["\']*([0-9]+)["\']*').search
|
|
|
|
vnsearch_g = re.compile(r'.*versionName *=* *(["\'])((?:(?=(\\?))\3.)*?)\1.*').search
|
|
|
|
psearch_g = re.compile(r'.*packageName *=* *["\']([^"]+)["\'].*').search
|
2013-08-13 12:02:48 +02:00
|
|
|
|
2014-05-13 21:04:22 +02:00
|
|
|
ignoresearch = re.compile(ignoreversions).search if ignoreversions else None
|
|
|
|
|
2013-08-13 12:02:48 +02:00
|
|
|
max_version = None
|
|
|
|
max_vercode = None
|
|
|
|
max_package = None
|
|
|
|
|
|
|
|
for path in paths:
|
|
|
|
|
2013-12-30 11:33:37 +01:00
|
|
|
gradle = has_extension(path, 'gradle')
|
2014-06-20 11:10:52 +02:00
|
|
|
version = None
|
|
|
|
vercode = None
|
2013-08-13 15:25:47 +02:00
|
|
|
# Remember package name, may be defined separately from version+vercode
|
|
|
|
package = max_package
|
2013-08-13 12:02:48 +02:00
|
|
|
|
|
|
|
for line in file(path):
|
|
|
|
if not package:
|
|
|
|
if gradle:
|
|
|
|
matches = psearch_g(line)
|
|
|
|
else:
|
|
|
|
matches = psearch(line)
|
|
|
|
if matches:
|
|
|
|
package = matches.group(1)
|
|
|
|
if not version:
|
|
|
|
if gradle:
|
|
|
|
matches = vnsearch_g(line)
|
|
|
|
else:
|
|
|
|
matches = vnsearch(line)
|
|
|
|
if matches:
|
2014-01-07 10:04:11 +01:00
|
|
|
version = matches.group(2 if gradle else 1)
|
2013-08-13 12:02:48 +02:00
|
|
|
if not vercode:
|
|
|
|
if gradle:
|
|
|
|
matches = vcsearch_g(line)
|
|
|
|
else:
|
|
|
|
matches = vcsearch(line)
|
|
|
|
if matches:
|
|
|
|
vercode = matches.group(1)
|
|
|
|
|
2014-06-22 17:36:00 +02:00
|
|
|
# Always grab the package name and version name in case they are not
|
|
|
|
# together with the highest version code
|
|
|
|
if max_package is None and package is not None:
|
2013-08-13 15:25:47 +02:00
|
|
|
max_package = package
|
2014-06-22 17:36:00 +02:00
|
|
|
if max_version is None and version is not None:
|
|
|
|
max_version = version
|
2013-08-13 15:25:47 +02:00
|
|
|
|
2013-08-13 12:02:48 +02:00
|
|
|
if max_vercode is None or (vercode is not None and vercode > max_vercode):
|
2014-05-13 21:04:22 +02:00
|
|
|
if not ignoresearch or not ignoresearch(version):
|
2014-06-22 17:36:00 +02:00
|
|
|
if version is not None:
|
|
|
|
max_version = version
|
|
|
|
if vercode is not None:
|
|
|
|
max_vercode = vercode
|
|
|
|
if package is not None:
|
|
|
|
max_package = package
|
2014-05-13 21:04:22 +02:00
|
|
|
else:
|
|
|
|
max_version = "Ignore"
|
2013-08-13 12:02:48 +02:00
|
|
|
|
2013-08-15 16:01:33 +02:00
|
|
|
if max_version is None:
|
2014-06-20 11:10:52 +02:00
|
|
|
max_version = "Unknown"
|
2013-08-15 16:01:33 +02:00
|
|
|
|
2013-08-13 12:02:48 +02:00
|
|
|
return (max_version, max_vercode, max_package)
|
2012-03-10 13:50:34 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2012-01-02 12:51:14 +01:00
|
|
|
class BuildException(Exception):
|
2014-05-02 04:27:58 +02:00
|
|
|
def __init__(self, value, detail=None):
|
2012-01-02 12:51:14 +01:00
|
|
|
self.value = value
|
2014-01-16 11:17:22 +01:00
|
|
|
self.detail = detail
|
2012-01-02 12:51:14 +01:00
|
|
|
|
2013-05-20 22:19:53 +02:00
|
|
|
def get_wikitext(self):
|
|
|
|
ret = repr(self.value) + "\n"
|
2014-01-16 11:17:22 +01:00
|
|
|
if self.detail:
|
|
|
|
ret += "=detail=\n"
|
2013-05-20 22:19:53 +02:00
|
|
|
ret += "<pre>\n"
|
2014-03-17 14:52:01 +01:00
|
|
|
txt = self.detail[-8192:] if len(self.detail) > 8192 else self.detail
|
|
|
|
ret += str(txt)
|
2013-05-20 22:19:53 +02:00
|
|
|
ret += "</pre>\n"
|
|
|
|
return ret
|
|
|
|
|
2012-01-02 12:51:14 +01:00
|
|
|
def __str__(self):
|
2014-05-20 23:14:19 +02:00
|
|
|
ret = self.value
|
2014-01-16 11:17:22 +01:00
|
|
|
if self.detail:
|
|
|
|
ret += "\n==== detail begin ====\n%s\n==== detail end ====" % self.detail.strip()
|
2012-01-08 19:13:15 +01:00
|
|
|
return ret
|
2012-01-02 12:51:14 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2012-01-02 12:51:14 +01:00
|
|
|
class VCSException(Exception):
|
|
|
|
def __init__(self, value):
|
|
|
|
self.value = value
|
|
|
|
|
|
|
|
def __str__(self):
|
2014-05-20 23:14:19 +02:00
|
|
|
return self.value
|
2012-01-02 12:51:14 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-05-20 13:16:06 +02:00
|
|
|
# Get the specified source library.
|
|
|
|
# Returns the path to it. Normally this is the path to be used when referencing
|
|
|
|
# it, which may be a subdirectory of the actual project. If you want the base
|
|
|
|
# directory of the project, pass 'basepath=True'.
|
2014-02-11 16:29:39 +01:00
|
|
|
def getsrclib(spec, srclib_dir, srclibpaths=[], subdir=None,
|
2014-05-06 19:50:52 +02:00
|
|
|
basepath=False, raw=False, prepare=True, preponly=False):
|
2013-03-01 18:59:01 +01:00
|
|
|
|
2013-11-18 22:31:52 +01:00
|
|
|
number = None
|
|
|
|
subdir = None
|
2013-05-24 23:35:56 +02:00
|
|
|
if raw:
|
|
|
|
name = spec
|
|
|
|
ref = None
|
|
|
|
else:
|
|
|
|
name, ref = spec.split('@')
|
2013-11-18 22:31:52 +01:00
|
|
|
if ':' in name:
|
|
|
|
number, name = name.split(':', 1)
|
|
|
|
if '/' in name:
|
2014-05-02 04:16:32 +02:00
|
|
|
name, subdir = name.split('/', 1)
|
2013-03-01 20:39:30 +01:00
|
|
|
|
2014-05-28 09:33:14 +02:00
|
|
|
if name not in metadata.srclibs:
|
2013-05-20 13:16:06 +02:00
|
|
|
raise BuildException('srclib ' + name + ' not found.')
|
2013-03-15 16:29:29 +01:00
|
|
|
|
2014-05-20 23:44:47 +02:00
|
|
|
srclib = metadata.srclibs[name]
|
2013-04-07 20:39:53 +02:00
|
|
|
|
2013-05-20 13:34:03 +02:00
|
|
|
sdir = os.path.join(srclib_dir, name)
|
2013-04-23 21:11:10 +02:00
|
|
|
|
2013-06-04 23:42:18 +02:00
|
|
|
if not preponly:
|
2013-11-08 20:44:27 +01:00
|
|
|
vcs = getvcs(srclib["Repo Type"], srclib["Repo"], sdir)
|
2013-11-18 22:31:52 +01:00
|
|
|
vcs.srclib = (name, number, sdir)
|
2013-10-23 16:57:02 +02:00
|
|
|
if ref:
|
|
|
|
vcs.gotorevision(ref)
|
2013-06-04 23:42:18 +02:00
|
|
|
|
|
|
|
if raw:
|
|
|
|
return vcs
|
2013-05-24 23:35:56 +02:00
|
|
|
|
2013-05-20 13:16:06 +02:00
|
|
|
libdir = None
|
2013-12-06 12:15:13 +01:00
|
|
|
if subdir:
|
2013-11-16 12:54:23 +01:00
|
|
|
libdir = os.path.join(sdir, subdir)
|
2013-12-06 12:15:13 +01:00
|
|
|
elif srclib["Subdir"]:
|
2013-05-20 13:16:06 +02:00
|
|
|
for subdir in srclib["Subdir"]:
|
|
|
|
libdir_candidate = os.path.join(sdir, subdir)
|
|
|
|
if os.path.exists(libdir_candidate):
|
|
|
|
libdir = libdir_candidate
|
|
|
|
break
|
2013-04-28 19:52:27 +02:00
|
|
|
|
2013-05-20 13:16:06 +02:00
|
|
|
if libdir is None:
|
|
|
|
libdir = sdir
|
2013-05-03 16:53:37 +02:00
|
|
|
|
2013-11-15 20:42:17 +01:00
|
|
|
if srclib["Srclibs"]:
|
2014-03-31 19:23:48 +02:00
|
|
|
n = 1
|
2014-05-02 04:16:32 +02:00
|
|
|
for lib in srclib["Srclibs"].replace(';', ',').split(','):
|
2013-11-15 20:42:17 +01:00
|
|
|
s_tuple = None
|
|
|
|
for t in srclibpaths:
|
|
|
|
if t[0] == lib:
|
|
|
|
s_tuple = t
|
|
|
|
break
|
|
|
|
if s_tuple is None:
|
|
|
|
raise BuildException('Missing recursive srclib %s for %s' % (
|
|
|
|
lib, name))
|
|
|
|
place_srclib(libdir, n, s_tuple[2])
|
2014-03-31 19:23:48 +02:00
|
|
|
n += 1
|
2013-11-15 20:42:17 +01:00
|
|
|
|
2014-02-11 17:56:36 +01:00
|
|
|
remove_signing_keys(sdir)
|
|
|
|
remove_debuggable_flags(sdir)
|
|
|
|
|
2013-06-04 23:42:18 +02:00
|
|
|
if prepare:
|
|
|
|
|
2013-12-06 12:15:13 +01:00
|
|
|
if srclib["Prepare"]:
|
2013-11-08 20:44:27 +01:00
|
|
|
cmd = replace_config_vars(srclib["Prepare"])
|
2013-06-09 23:15:46 +02:00
|
|
|
|
2013-10-16 23:31:02 +02:00
|
|
|
p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=libdir)
|
2013-06-04 23:42:18 +02:00
|
|
|
if p.returncode != 0:
|
2013-10-16 23:17:51 +02:00
|
|
|
raise BuildException("Error running prepare command for srclib %s"
|
2014-05-06 19:50:52 +02:00
|
|
|
% name, p.stdout)
|
2013-12-30 17:04:16 +01:00
|
|
|
|
2013-05-20 13:16:06 +02:00
|
|
|
if basepath:
|
2013-11-20 19:00:22 +01:00
|
|
|
libdir = sdir
|
|
|
|
|
|
|
|
return (name, number, libdir)
|
2012-01-28 01:05:30 +01:00
|
|
|
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2012-01-03 22:39:30 +01:00
|
|
|
# Prepare the source code for a particular build
|
2012-01-04 22:37:11 +01:00
|
|
|
# 'vcs' - the appropriate vcs object for the application
|
|
|
|
# 'app' - the application details from the metadata
|
|
|
|
# 'build' - the build details from the metadata
|
2012-01-27 23:10:08 +01:00
|
|
|
# 'build_dir' - the path to the build directory, usually
|
|
|
|
# 'build/app.id'
|
2013-05-20 13:34:03 +02:00
|
|
|
# 'srclib_dir' - the path to the source libraries directory, usually
|
|
|
|
# 'build/srclib'
|
2012-01-27 23:10:08 +01:00
|
|
|
# 'extlib_dir' - the path to the external libraries directory, usually
|
|
|
|
# 'build/extlib'
|
2013-03-20 10:30:56 +01:00
|
|
|
# Returns the (root, srclibpaths) where:
|
|
|
|
# 'root' is the root directory, which may be the same as 'build_dir' or may
|
|
|
|
# be a subdirectory of it.
|
|
|
|
# 'srclibpaths' is information on the srclibs being used
|
2013-11-08 20:44:27 +01:00
|
|
|
def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=False):
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2014-01-27 15:59:40 +01:00
|
|
|
# Optionally, the actual app source can be in a subdirectory
|
2014-05-31 23:10:16 +02:00
|
|
|
if build['subdir']:
|
2012-01-03 22:39:30 +01:00
|
|
|
root_dir = os.path.join(build_dir, build['subdir'])
|
|
|
|
else:
|
|
|
|
root_dir = build_dir
|
|
|
|
|
2014-01-27 15:59:40 +01:00
|
|
|
# Get a working copy of the right revision
|
|
|
|
logging.info("Getting source for revision " + build['commit'])
|
2012-01-23 15:15:40 +01:00
|
|
|
vcs.gotorevision(build['commit'])
|
2012-01-03 22:39:30 +01:00
|
|
|
|
2014-01-27 15:59:40 +01:00
|
|
|
# Initialise submodules if requred
|
2013-11-09 12:21:43 +01:00
|
|
|
if build['submodules']:
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.info("Initialising submodules")
|
2012-01-03 22:39:30 +01:00
|
|
|
vcs.initsubmodules()
|
|
|
|
|
2014-02-09 19:11:15 +01:00
|
|
|
# Check that a subdir (if we're using one) exists. This has to happen
|
|
|
|
# after the checkout, since it might not exist elsewhere
|
|
|
|
if not os.path.exists(root_dir):
|
|
|
|
raise BuildException('Missing subdir ' + root_dir)
|
|
|
|
|
2014-01-27 15:59:40 +01:00
|
|
|
# Run an init command if one is required
|
2014-05-31 23:10:16 +02:00
|
|
|
if build['init']:
|
2013-11-08 20:44:27 +01:00
|
|
|
cmd = replace_config_vars(build['init'])
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.info("Running 'init' commands in %s" % root_dir)
|
2013-10-09 23:36:24 +02:00
|
|
|
|
2013-11-01 12:10:57 +01:00
|
|
|
p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=root_dir)
|
2013-10-09 23:36:24 +02:00
|
|
|
if p.returncode != 0:
|
|
|
|
raise BuildException("Error running init command for %s:%s" %
|
2014-05-06 19:50:52 +02:00
|
|
|
(app['id'], build['version']), p.stdout)
|
2012-02-04 22:19:07 +01:00
|
|
|
|
2014-01-23 10:29:04 +01:00
|
|
|
# Apply patches if any
|
2014-05-31 23:10:16 +02:00
|
|
|
if build['patch']:
|
|
|
|
logging.info("Applying patches")
|
2014-02-12 11:13:20 +01:00
|
|
|
for patch in build['patch']:
|
2014-01-23 10:29:04 +01:00
|
|
|
patch = patch.strip()
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.info("Applying " + patch)
|
2014-01-23 10:29:04 +01:00
|
|
|
patch_path = os.path.join('metadata', app['id'], patch)
|
2014-02-17 13:25:55 +01:00
|
|
|
p = FDroidPopen(['patch', '-p1', '-i', os.path.abspath(patch_path)], cwd=build_dir)
|
|
|
|
if p.returncode != 0:
|
2014-01-23 10:29:04 +01:00
|
|
|
raise BuildException("Failed to apply patch %s" % patch_path)
|
|
|
|
|
2014-01-27 15:59:40 +01:00
|
|
|
# Get required source libraries
|
2014-01-23 10:29:04 +01:00
|
|
|
srclibpaths = []
|
2014-05-31 23:10:16 +02:00
|
|
|
if build['srclibs']:
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.info("Collecting source libraries")
|
2014-02-12 11:13:20 +01:00
|
|
|
for lib in build['srclibs']:
|
2014-01-23 10:29:04 +01:00
|
|
|
srclibpaths.append(getsrclib(lib, srclib_dir, srclibpaths,
|
2014-05-06 19:50:52 +02:00
|
|
|
preponly=onserver))
|
2014-01-23 10:29:04 +01:00
|
|
|
|
|
|
|
for name, number, libpath in srclibpaths:
|
|
|
|
place_srclib(root_dir, int(number) if number else None, libpath)
|
|
|
|
|
|
|
|
basesrclib = vcs.getsrclib()
|
|
|
|
# If one was used for the main source, add that too.
|
|
|
|
if basesrclib:
|
|
|
|
srclibpaths.append(basesrclib)
|
|
|
|
|
2014-01-27 15:59:40 +01:00
|
|
|
# Update the local.properties file
|
2014-05-02 04:24:48 +02:00
|
|
|
localprops = [os.path.join(build_dir, 'local.properties')]
|
2014-05-31 23:10:16 +02:00
|
|
|
if build['subdir']:
|
2014-05-02 04:24:48 +02:00
|
|
|
localprops += [os.path.join(root_dir, 'local.properties')]
|
2014-01-21 10:14:37 +01:00
|
|
|
for path in localprops:
|
|
|
|
if not os.path.isfile(path):
|
|
|
|
continue
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.info("Updating properties file at %s" % path)
|
2014-01-21 10:14:37 +01:00
|
|
|
f = open(path, 'r')
|
|
|
|
props = f.read()
|
|
|
|
f.close()
|
|
|
|
props += '\n'
|
|
|
|
# Fix old-fashioned 'sdk-location' by copying
|
2014-01-27 15:59:40 +01:00
|
|
|
# from sdk.dir, if necessary
|
2014-01-21 10:14:37 +01:00
|
|
|
if build['oldsdkloc']:
|
|
|
|
sdkloc = re.match(r".*^sdk.dir=(\S+)$.*", props,
|
2014-05-06 19:50:52 +02:00
|
|
|
re.S | re.M).group(1)
|
2014-01-21 10:14:37 +01:00
|
|
|
props += "sdk-location=%s\n" % sdkloc
|
|
|
|
else:
|
|
|
|
props += "sdk.dir=%s\n" % config['sdk_path']
|
2014-02-10 18:26:33 +01:00
|
|
|
props += "sdk-location=%s\n" % config['sdk_path']
|
2014-01-26 21:28:09 +01:00
|
|
|
if 'ndk_path' in config:
|
2014-01-27 15:59:40 +01:00
|
|
|
# Add ndk location
|
2014-01-26 21:28:09 +01:00
|
|
|
props += "ndk.dir=%s\n" % config['ndk_path']
|
|
|
|
props += "ndk-location=%s\n" % config['ndk_path']
|
2014-01-27 15:59:40 +01:00
|
|
|
# Add java.encoding if necessary
|
2014-05-31 23:10:16 +02:00
|
|
|
if build['encoding']:
|
2014-01-21 10:14:37 +01:00
|
|
|
props += "java.encoding=%s\n" % build['encoding']
|
|
|
|
f = open(path, 'w')
|
|
|
|
f.write(props)
|
|
|
|
f.close()
|
|
|
|
|
2013-10-20 22:27:34 +02:00
|
|
|
flavour = None
|
2014-01-10 20:39:39 +01:00
|
|
|
if build['type'] == 'gradle':
|
2013-10-20 22:27:34 +02:00
|
|
|
flavour = build['gradle'].split('@')[0]
|
|
|
|
if flavour in ['main', 'yes', '']:
|
|
|
|
flavour = None
|
2013-10-30 17:17:44 +01:00
|
|
|
|
2014-06-12 10:00:46 +02:00
|
|
|
version_regex = re.compile(r".*'com\.android\.tools\.build:gradle:([^\.]+\.[^\.]+).*'.*")
|
|
|
|
gradlepluginver = None
|
|
|
|
|
2014-06-19 12:41:34 +02:00
|
|
|
gradle_files = [os.path.join(root_dir, 'build.gradle')]
|
|
|
|
|
|
|
|
# Parent dir build.gradle
|
|
|
|
parent_dir = os.path.normpath(os.path.join(root_dir, '..'))
|
|
|
|
if parent_dir.startswith(build_dir):
|
|
|
|
gradle_files.append(os.path.join(parent_dir, 'build.gradle'))
|
|
|
|
|
|
|
|
# Gradle execution dir build.gradle
|
|
|
|
if '@' in build['gradle']:
|
|
|
|
gradle_file = os.path.join(root_dir, build['gradle'].split('@', 1)[1], 'build.gradle')
|
|
|
|
gradle_file = os.path.normpath(gradle_file)
|
|
|
|
if gradle_file not in gradle_files:
|
|
|
|
gradle_files.append(gradle_file)
|
|
|
|
|
|
|
|
for path in gradle_files:
|
|
|
|
if gradlepluginver:
|
|
|
|
break
|
|
|
|
if not os.path.isfile(path):
|
|
|
|
continue
|
|
|
|
with open(path) as f:
|
2014-06-12 10:00:46 +02:00
|
|
|
for line in f:
|
|
|
|
match = version_regex.match(line)
|
|
|
|
if match:
|
|
|
|
gradlepluginver = match.group(1)
|
|
|
|
break
|
|
|
|
|
2014-06-19 12:41:34 +02:00
|
|
|
if gradlepluginver:
|
|
|
|
build['gradlepluginver'] = LooseVersion(gradlepluginver)
|
|
|
|
else:
|
|
|
|
logging.warn("Could not fetch the gradle plugin version, defaulting to 0.11")
|
|
|
|
build['gradlepluginver'] = LooseVersion('0.11')
|
2014-06-12 10:00:46 +02:00
|
|
|
|
2014-05-31 23:10:16 +02:00
|
|
|
if build['target']:
|
2014-01-28 14:14:18 +01:00
|
|
|
n = build["target"].split('-')[1]
|
2014-02-17 13:25:55 +01:00
|
|
|
FDroidPopen(['sed', '-i',
|
2014-05-06 19:56:44 +02:00
|
|
|
's@compileSdkVersion *[0-9]*@compileSdkVersion ' + n + '@g',
|
2014-05-06 19:50:52 +02:00
|
|
|
'build.gradle'],
|
|
|
|
cwd=root_dir)
|
2014-01-28 14:14:18 +01:00
|
|
|
if '@' in build['gradle']:
|
2014-05-02 04:16:32 +02:00
|
|
|
gradle_dir = os.path.join(root_dir, build['gradle'].split('@', 1)[1])
|
2014-01-28 14:14:18 +01:00
|
|
|
gradle_dir = os.path.normpath(gradle_dir)
|
2014-02-17 13:25:55 +01:00
|
|
|
FDroidPopen(['sed', '-i',
|
2014-05-06 19:56:44 +02:00
|
|
|
's@compileSdkVersion *[0-9]*@compileSdkVersion ' + n + '@g',
|
2014-05-06 19:50:52 +02:00
|
|
|
'build.gradle'],
|
|
|
|
cwd=gradle_dir)
|
2014-01-28 14:01:32 +01:00
|
|
|
|
2013-10-30 17:17:44 +01:00
|
|
|
# Remove forced debuggable flags
|
2014-02-11 17:56:36 +01:00
|
|
|
remove_debuggable_flags(root_dir)
|
2013-10-30 17:17:44 +01:00
|
|
|
|
2014-01-27 15:59:40 +01:00
|
|
|
# Insert version code and number into the manifest if necessary
|
2013-11-09 12:21:43 +01:00
|
|
|
if build['forceversion']:
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.info("Changing the version name")
|
2013-10-20 22:27:34 +02:00
|
|
|
for path in manifest_paths(root_dir, flavour):
|
2013-10-20 13:43:15 +02:00
|
|
|
if not os.path.isfile(path):
|
|
|
|
continue
|
2013-12-30 11:33:37 +01:00
|
|
|
if has_extension(path, 'xml'):
|
2014-02-17 13:25:55 +01:00
|
|
|
p = SilentPopen(['sed', '-i',
|
2014-05-06 19:50:52 +02:00
|
|
|
's/android:versionName="[^"]*"/android:versionName="'
|
|
|
|
+ build['version'] + '"/g',
|
|
|
|
path])
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2013-11-04 16:09:23 +01:00
|
|
|
raise BuildException("Failed to amend manifest")
|
2013-12-30 11:33:37 +01:00
|
|
|
elif has_extension(path, 'gradle'):
|
2014-02-17 13:25:55 +01:00
|
|
|
p = SilentPopen(['sed', '-i',
|
2014-05-06 19:50:52 +02:00
|
|
|
's/versionName *=* *"[^"]*"/versionName = "'
|
|
|
|
+ build['version'] + '"/g',
|
|
|
|
path])
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2013-11-04 16:09:23 +01:00
|
|
|
raise BuildException("Failed to amend build.gradle")
|
2013-11-09 12:21:43 +01:00
|
|
|
if build['forcevercode']:
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.info("Changing the version code")
|
2013-10-20 22:27:34 +02:00
|
|
|
for path in manifest_paths(root_dir, flavour):
|
2013-10-20 13:43:15 +02:00
|
|
|
if not os.path.isfile(path):
|
|
|
|
continue
|
2013-12-30 11:33:37 +01:00
|
|
|
if has_extension(path, 'xml'):
|
2014-02-17 13:25:55 +01:00
|
|
|
p = SilentPopen(['sed', '-i',
|
2014-05-06 19:50:52 +02:00
|
|
|
's/android:versionCode="[^"]*"/android:versionCode="'
|
|
|
|
+ build['vercode'] + '"/g',
|
|
|
|
path])
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2013-11-04 16:09:23 +01:00
|
|
|
raise BuildException("Failed to amend manifest")
|
2013-12-30 11:33:37 +01:00
|
|
|
elif has_extension(path, 'gradle'):
|
2014-02-17 13:25:55 +01:00
|
|
|
p = SilentPopen(['sed', '-i',
|
2014-05-06 19:50:52 +02:00
|
|
|
's/versionCode *=* *[0-9]*/versionCode = '
|
|
|
|
+ build['vercode'] + '/g',
|
|
|
|
path])
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2013-11-04 16:09:23 +01:00
|
|
|
raise BuildException("Failed to amend build.gradle")
|
2012-01-03 22:39:30 +01:00
|
|
|
|
2014-01-27 15:59:40 +01:00
|
|
|
# Delete unwanted files
|
2014-05-31 23:10:16 +02:00
|
|
|
if build['rm']:
|
|
|
|
logging.info("Removing specified files")
|
2014-04-15 23:58:12 +02:00
|
|
|
for part in getpaths(build_dir, build, 'rm'):
|
2014-02-17 14:59:55 +01:00
|
|
|
dest = os.path.join(build_dir, part)
|
|
|
|
logging.info("Removing {0}".format(part))
|
|
|
|
if os.path.lexists(dest):
|
|
|
|
if os.path.islink(dest):
|
|
|
|
SilentPopen(['unlink ' + dest], shell=True)
|
2013-11-24 11:29:28 +01:00
|
|
|
else:
|
2014-02-17 14:59:55 +01:00
|
|
|
SilentPopen(['rm -rf ' + dest], shell=True)
|
2013-11-24 11:29:28 +01:00
|
|
|
else:
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.info("...but it didn't exist")
|
2012-01-03 22:39:30 +01:00
|
|
|
|
2013-11-24 10:39:12 +01:00
|
|
|
remove_signing_keys(build_dir)
|
|
|
|
|
2014-01-27 15:59:40 +01:00
|
|
|
# Add required external libraries
|
2014-05-31 23:10:16 +02:00
|
|
|
if build['extlibs']:
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.info("Collecting prebuilt libraries")
|
2012-01-27 23:10:08 +01:00
|
|
|
libsdir = os.path.join(root_dir, 'libs')
|
|
|
|
if not os.path.exists(libsdir):
|
|
|
|
os.mkdir(libsdir)
|
2014-02-12 11:13:20 +01:00
|
|
|
for lib in build['extlibs']:
|
2013-09-11 13:45:02 +02:00
|
|
|
lib = lib.strip()
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.info("...installing extlib {0}".format(lib))
|
2012-01-27 23:10:08 +01:00
|
|
|
libf = os.path.basename(lib)
|
2013-11-12 21:14:16 +01:00
|
|
|
libsrc = os.path.join(extlib_dir, lib)
|
|
|
|
if not os.path.exists(libsrc):
|
|
|
|
raise BuildException("Missing extlib file {0}".format(libsrc))
|
|
|
|
shutil.copyfile(libsrc, os.path.join(libsdir, libf))
|
2012-01-27 23:10:08 +01:00
|
|
|
|
2014-01-27 15:59:40 +01:00
|
|
|
# Run a pre-build command if one is required
|
2014-05-31 23:10:16 +02:00
|
|
|
if build['prebuild']:
|
|
|
|
logging.info("Running 'prebuild' commands in %s" % root_dir)
|
|
|
|
|
2013-11-08 20:44:27 +01:00
|
|
|
cmd = replace_config_vars(build['prebuild'])
|
2013-08-26 23:52:04 +02:00
|
|
|
|
2014-01-27 15:59:40 +01:00
|
|
|
# Substitute source library paths into prebuild commands
|
2013-11-15 20:42:17 +01:00
|
|
|
for name, number, libpath in srclibpaths:
|
2012-01-28 01:05:30 +01:00
|
|
|
libpath = os.path.relpath(libpath, root_dir)
|
2013-10-09 23:36:24 +02:00
|
|
|
cmd = cmd.replace('$$' + name + '$$', libpath)
|
2013-11-08 20:44:27 +01:00
|
|
|
|
2013-11-01 12:10:57 +01:00
|
|
|
p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=root_dir)
|
2012-09-24 15:06:15 +02:00
|
|
|
if p.returncode != 0:
|
2013-10-09 23:36:24 +02:00
|
|
|
raise BuildException("Error running prebuild command for %s:%s" %
|
2014-05-06 19:50:52 +02:00
|
|
|
(app['id'], build['version']), p.stdout)
|
2012-01-03 22:39:30 +01:00
|
|
|
|
2014-02-11 16:30:49 +01:00
|
|
|
# Generate (or update) the ant build file, build.xml...
|
2014-05-31 23:10:16 +02:00
|
|
|
if build['update'] and build['update'] != ['no'] and build['type'] == 'ant':
|
2014-02-11 16:30:49 +01:00
|
|
|
parms = [os.path.join(config['sdk_path'], 'tools', 'android'), 'update']
|
|
|
|
lparms = parms + ['lib-project']
|
|
|
|
parms = parms + ['project']
|
|
|
|
|
2014-05-31 23:10:16 +02:00
|
|
|
if build['target']:
|
2014-02-11 16:30:49 +01:00
|
|
|
parms += ['-t', build['target']]
|
|
|
|
lparms += ['-t', build['target']]
|
2014-05-31 23:10:16 +02:00
|
|
|
if build['update'] == ['auto']:
|
2014-02-11 16:30:49 +01:00
|
|
|
update_dirs = ant_subprojects(root_dir) + ['.']
|
|
|
|
else:
|
2014-05-31 23:10:16 +02:00
|
|
|
update_dirs = build['update']
|
2014-02-11 16:30:49 +01:00
|
|
|
|
|
|
|
for d in update_dirs:
|
|
|
|
subdir = os.path.join(root_dir, d)
|
|
|
|
if d == '.':
|
2014-02-16 00:27:19 +01:00
|
|
|
print("Updating main project")
|
2014-02-11 16:30:49 +01:00
|
|
|
cmd = parms + ['-p', d]
|
|
|
|
else:
|
2014-02-16 00:27:19 +01:00
|
|
|
print("Updating subproject %s" % d)
|
2014-02-11 16:30:49 +01:00
|
|
|
cmd = lparms + ['-p', d]
|
|
|
|
p = FDroidPopen(cmd, cwd=root_dir)
|
|
|
|
# Check to see whether an error was returned without a proper exit
|
|
|
|
# code (this is the case for the 'no target set or target invalid'
|
|
|
|
# error)
|
|
|
|
if p.returncode != 0 or p.stdout.startswith("Error: "):
|
|
|
|
raise BuildException("Failed to update project at %s" % d, p.stdout)
|
|
|
|
# Clean update dirs via ant
|
2014-02-13 09:19:26 +01:00
|
|
|
if d != '.':
|
2014-02-11 16:30:49 +01:00
|
|
|
logging.info("Cleaning subproject %s" % d)
|
2014-02-13 09:19:26 +01:00
|
|
|
p = FDroidPopen(['ant', 'clean'], cwd=subdir)
|
2014-02-11 16:30:49 +01:00
|
|
|
|
2013-03-20 10:30:56 +01:00
|
|
|
return (root_dir, srclibpaths)
|
2012-01-03 22:39:30 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-04-15 23:53:44 +02:00
|
|
|
# Split and extend via globbing the paths from a field
|
|
|
|
def getpaths(build_dir, build, field):
|
|
|
|
paths = []
|
|
|
|
for p in build[field]:
|
|
|
|
p = p.strip()
|
|
|
|
full_path = os.path.join(build_dir, p)
|
|
|
|
full_path = os.path.normpath(full_path)
|
2014-05-06 19:56:44 +02:00
|
|
|
paths += [r[len(build_dir) + 1:] for r in glob.glob(full_path)]
|
2014-04-15 23:53:44 +02:00
|
|
|
return paths
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2012-02-02 23:13:31 +01:00
|
|
|
# Scan the source code in the given directory (and all subdirectories)
|
2014-04-03 16:04:06 +02:00
|
|
|
# and return the number of fatal problems encountered
|
2012-02-03 17:01:35 +01:00
|
|
|
def scan_source(build_dir, root_dir, thisbuild):
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2014-04-03 16:04:06 +02:00
|
|
|
count = 0
|
2012-02-02 23:13:31 +01:00
|
|
|
|
2013-10-09 10:43:02 +02:00
|
|
|
# Common known non-free blobs (always lower case):
|
2014-04-15 23:33:54 +02:00
|
|
|
usual_suspects = [
|
2014-05-06 19:50:52 +02:00
|
|
|
re.compile(r'flurryagent', re.IGNORECASE),
|
|
|
|
re.compile(r'paypal.*mpl', re.IGNORECASE),
|
|
|
|
re.compile(r'libgoogleanalytics', re.IGNORECASE),
|
|
|
|
re.compile(r'admob.*sdk.*android', re.IGNORECASE),
|
|
|
|
re.compile(r'googleadview', re.IGNORECASE),
|
|
|
|
re.compile(r'googleadmobadssdk', re.IGNORECASE),
|
|
|
|
re.compile(r'google.*play.*services', re.IGNORECASE),
|
|
|
|
re.compile(r'crittercism', re.IGNORECASE),
|
|
|
|
re.compile(r'heyzap', re.IGNORECASE),
|
|
|
|
re.compile(r'jpct.*ae', re.IGNORECASE),
|
|
|
|
re.compile(r'youtubeandroidplayerapi', re.IGNORECASE),
|
|
|
|
re.compile(r'bugsense', re.IGNORECASE),
|
|
|
|
re.compile(r'crashlytics', re.IGNORECASE),
|
|
|
|
re.compile(r'ouya.*sdk', re.IGNORECASE),
|
2014-05-11 15:46:47 +02:00
|
|
|
re.compile(r'libspen23', re.IGNORECASE),
|
2014-05-06 19:50:52 +02:00
|
|
|
]
|
2012-03-06 20:50:19 +01:00
|
|
|
|
2014-04-15 23:53:44 +02:00
|
|
|
scanignore = getpaths(build_dir, thisbuild, 'scanignore')
|
|
|
|
scandelete = getpaths(build_dir, thisbuild, 'scandelete')
|
2013-11-01 13:46:19 +01:00
|
|
|
|
2014-02-17 04:42:16 +01:00
|
|
|
try:
|
|
|
|
ms = magic.open(magic.MIME_TYPE)
|
|
|
|
ms.load()
|
|
|
|
except AttributeError:
|
|
|
|
ms = None
|
2014-02-03 23:04:33 +01:00
|
|
|
|
2013-11-01 13:46:19 +01:00
|
|
|
def toignore(fd):
|
|
|
|
for i in scanignore:
|
|
|
|
if fd.startswith(i):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def todelete(fd):
|
|
|
|
for i in scandelete:
|
|
|
|
if fd.startswith(i):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def removeproblem(what, fd, fp):
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.info('Removing %s at %s' % (what, fd))
|
2013-11-01 13:46:19 +01:00
|
|
|
os.remove(fp)
|
2013-12-30 17:04:16 +01:00
|
|
|
|
2014-04-03 16:04:06 +02:00
|
|
|
def warnproblem(what, fd):
|
|
|
|
logging.warn('Found %s at %s' % (what, fd))
|
|
|
|
|
2013-11-01 13:46:19 +01:00
|
|
|
def handleproblem(what, fd, fp):
|
|
|
|
if todelete(fd):
|
|
|
|
removeproblem(what, fd, fp)
|
|
|
|
else:
|
2014-04-03 16:04:06 +02:00
|
|
|
logging.error('Found %s at %s' % (what, fd))
|
|
|
|
return True
|
|
|
|
return False
|
2013-12-30 11:25:15 +01:00
|
|
|
|
2014-01-30 00:16:03 +01:00
|
|
|
def insidedir(path, dirname):
|
|
|
|
return path.endswith('/%s' % dirname) or '/%s/' % dirname in path
|
|
|
|
|
2014-01-27 15:59:40 +01:00
|
|
|
# Iterate through all files in the source code
|
2014-05-02 04:16:32 +02:00
|
|
|
for r, d, f in os.walk(build_dir):
|
2013-10-16 22:50:07 +02:00
|
|
|
|
2014-04-16 21:06:24 +02:00
|
|
|
if any(insidedir(r, d) for d in ('.hg', '.git', '.svn', '.bzr')):
|
2014-01-30 00:16:03 +01:00
|
|
|
continue
|
|
|
|
|
|
|
|
for curfile in f:
|
2013-10-16 22:50:07 +02:00
|
|
|
|
2014-01-27 15:59:40 +01:00
|
|
|
# Path (relative) to the file
|
2013-10-16 22:50:07 +02:00
|
|
|
fp = os.path.join(r, curfile)
|
2014-05-06 19:56:44 +02:00
|
|
|
fd = fp[len(build_dir) + 1:]
|
2013-10-16 22:50:07 +02:00
|
|
|
|
2014-01-27 15:59:40 +01:00
|
|
|
# Check if this file has been explicitly excluded from scanning
|
2013-11-01 13:46:19 +01:00
|
|
|
if toignore(fd):
|
2013-10-16 22:50:07 +02:00
|
|
|
continue
|
|
|
|
|
2014-02-17 04:42:16 +01:00
|
|
|
mime = magic.from_file(fp, mime=True) if ms is None else ms.file(fp)
|
2014-04-15 23:33:54 +02:00
|
|
|
|
2013-10-16 22:50:07 +02:00
|
|
|
if mime == 'application/x-sharedlib':
|
2014-04-03 16:04:06 +02:00
|
|
|
count += handleproblem('shared library', fd, fp)
|
2014-04-15 23:33:54 +02:00
|
|
|
|
2013-10-16 22:50:07 +02:00
|
|
|
elif mime == 'application/x-archive':
|
2014-04-03 16:04:06 +02:00
|
|
|
count += handleproblem('static library', fd, fp)
|
2014-04-15 23:33:54 +02:00
|
|
|
|
2013-10-16 22:50:07 +02:00
|
|
|
elif mime == 'application/x-executable':
|
2014-04-12 00:14:05 +02:00
|
|
|
count += handleproblem('binary executable', fd, fp)
|
2014-04-15 23:33:54 +02:00
|
|
|
|
2014-02-18 08:32:16 +01:00
|
|
|
elif mime == 'application/x-java-applet':
|
2014-04-12 00:14:05 +02:00
|
|
|
count += handleproblem('Java compiled class', fd, fp)
|
2014-04-15 23:33:54 +02:00
|
|
|
|
|
|
|
elif mime in (
|
2014-04-15 17:50:08 +02:00
|
|
|
'application/jar',
|
2014-03-16 23:38:00 +01:00
|
|
|
'application/zip',
|
|
|
|
'application/java-archive',
|
2014-04-15 23:33:54 +02:00
|
|
|
'application/octet-stream',
|
2014-03-16 23:38:00 +01:00
|
|
|
'binary',
|
2014-04-15 17:50:08 +02:00
|
|
|
):
|
2014-04-15 23:33:54 +02:00
|
|
|
|
|
|
|
if has_extension(fp, 'apk'):
|
|
|
|
removeproblem('APK file', fd, fp)
|
|
|
|
|
|
|
|
elif has_extension(fp, 'jar'):
|
|
|
|
|
|
|
|
if any(suspect.match(curfile) for suspect in usual_suspects):
|
|
|
|
count += handleproblem('usual supect', fd, fp)
|
|
|
|
else:
|
|
|
|
warnproblem('JAR file', fd)
|
|
|
|
|
|
|
|
elif has_extension(fp, 'zip'):
|
|
|
|
warnproblem('ZIP file', fd)
|
|
|
|
|
|
|
|
else:
|
|
|
|
warnproblem('unknown compressed or binary file', fd)
|
2013-10-16 22:50:07 +02:00
|
|
|
|
2013-12-30 11:33:37 +01:00
|
|
|
elif has_extension(fp, 'java'):
|
2013-10-16 22:50:07 +02:00
|
|
|
for line in file(fp):
|
|
|
|
if 'DexClassLoader' in line:
|
2014-04-03 16:04:06 +02:00
|
|
|
count += handleproblem('DexClassLoader', fd, fp)
|
2013-10-15 23:24:20 +02:00
|
|
|
break
|
2014-02-17 04:42:16 +01:00
|
|
|
if ms is not None:
|
|
|
|
ms.close()
|
2012-03-06 20:54:37 +01:00
|
|
|
|
2012-02-02 23:13:31 +01:00
|
|
|
# Presence of a jni directory without buildjni=yes might
|
2014-01-27 15:59:40 +01:00
|
|
|
# indicate a problem (if it's not a problem, explicitly use
|
2012-08-13 18:59:03 +02:00
|
|
|
# buildjni=no to bypass this check)
|
2013-12-30 17:04:16 +01:00
|
|
|
if (os.path.exists(os.path.join(root_dir, 'jni')) and
|
2014-05-31 23:10:16 +02:00
|
|
|
not thisbuild['buildjni']):
|
2014-06-24 22:31:39 +02:00
|
|
|
logging.error('Found jni directory, but buildjni is not enabled. Set it to \'no\' to ignore.')
|
2014-04-03 16:04:06 +02:00
|
|
|
count += 1
|
2012-02-02 23:13:31 +01:00
|
|
|
|
2014-04-03 16:04:06 +02:00
|
|
|
return count
|
2012-02-02 23:13:31 +01:00
|
|
|
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2012-01-17 18:25:28 +01:00
|
|
|
class KnownApks:
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.path = os.path.join('stats', 'known_apks.txt')
|
|
|
|
self.apks = {}
|
|
|
|
if os.path.exists(self.path):
|
2014-05-02 04:21:47 +02:00
|
|
|
for line in file(self.path):
|
2012-01-17 18:25:28 +01:00
|
|
|
t = line.rstrip().split(' ')
|
2012-01-20 00:03:35 +01:00
|
|
|
if len(t) == 2:
|
|
|
|
self.apks[t[0]] = (t[1], None)
|
|
|
|
else:
|
|
|
|
self.apks[t[0]] = (t[1], time.strptime(t[2], '%Y-%m-%d'))
|
2012-01-17 18:25:28 +01:00
|
|
|
self.changed = False
|
|
|
|
|
|
|
|
def writeifchanged(self):
|
|
|
|
if self.changed:
|
|
|
|
if not os.path.exists('stats'):
|
|
|
|
os.mkdir('stats')
|
|
|
|
f = open(self.path, 'w')
|
2012-01-19 15:14:14 +01:00
|
|
|
lst = []
|
2012-01-17 18:25:28 +01:00
|
|
|
for apk, app in self.apks.iteritems():
|
2012-01-20 00:03:35 +01:00
|
|
|
appid, added = app
|
|
|
|
line = apk + ' ' + appid
|
|
|
|
if added:
|
|
|
|
line += ' ' + time.strftime('%Y-%m-%d', added)
|
|
|
|
lst.append(line)
|
2012-01-19 15:14:14 +01:00
|
|
|
for line in sorted(lst):
|
|
|
|
f.write(line + '\n')
|
2012-01-17 18:25:28 +01:00
|
|
|
f.close()
|
|
|
|
|
2012-07-12 22:48:59 +02:00
|
|
|
# Record an apk (if it's new, otherwise does nothing)
|
|
|
|
# Returns the date it was added.
|
2012-01-17 18:25:28 +01:00
|
|
|
def recordapk(self, apk, app):
|
2014-05-28 09:33:14 +02:00
|
|
|
if apk not in self.apks:
|
2012-01-20 00:03:35 +01:00
|
|
|
self.apks[apk] = (app, time.gmtime(time.time()))
|
2012-01-17 18:25:28 +01:00
|
|
|
self.changed = True
|
2012-07-12 22:48:59 +02:00
|
|
|
_, added = self.apks[apk]
|
|
|
|
return added
|
2012-01-17 18:25:28 +01:00
|
|
|
|
2012-07-12 22:48:59 +02:00
|
|
|
# Look up information - given the 'apkname', returns (app id, date added/None).
|
|
|
|
# Or returns None for an unknown apk.
|
2012-01-17 18:25:28 +01:00
|
|
|
def getapp(self, apkname):
|
|
|
|
if apkname in self.apks:
|
|
|
|
return self.apks[apkname]
|
|
|
|
return None
|
2012-01-22 15:03:56 +01:00
|
|
|
|
2012-07-12 22:48:59 +02:00
|
|
|
# Get the most recent 'num' apps added to the repo, as a list of package ids
|
|
|
|
# with the most recent first.
|
2012-01-22 15:03:56 +01:00
|
|
|
def getlatest(self, num):
|
|
|
|
apps = {}
|
|
|
|
for apk, app in self.apks.iteritems():
|
|
|
|
appid, added = app
|
|
|
|
if added:
|
|
|
|
if appid in apps:
|
|
|
|
if apps[appid] > added:
|
|
|
|
apps[appid] = added
|
|
|
|
else:
|
|
|
|
apps[appid] = added
|
|
|
|
sortedapps = sorted(apps.iteritems(), key=operator.itemgetter(1))[-num:]
|
2014-05-02 04:16:32 +02:00
|
|
|
lst = [app for app, _ in sortedapps]
|
2012-01-26 22:36:23 +01:00
|
|
|
lst.reverse()
|
2012-01-22 15:03:56 +01:00
|
|
|
return lst
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-10-31 16:37:39 +01:00
|
|
|
def isApkDebuggable(apkfile, config):
|
2013-04-15 16:07:23 +02:00
|
|
|
"""Returns True if the given apk file is debuggable
|
|
|
|
|
2013-07-31 19:35:57 +02:00
|
|
|
:param apkfile: full path to the apk to check"""
|
2013-04-15 14:04:13 +02:00
|
|
|
|
2014-05-06 19:50:52 +02:00
|
|
|
p = SilentPopen([os.path.join(config['sdk_path'], 'build-tools',
|
|
|
|
config['build_tools'], 'aapt'),
|
|
|
|
'dump', 'xmltree', apkfile, 'AndroidManifest.xml'])
|
2013-04-15 14:04:13 +02:00
|
|
|
if p.returncode != 0:
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.critical("Failed to get apk manifest information")
|
2013-04-15 14:04:13 +02:00
|
|
|
sys.exit(1)
|
2014-01-27 16:22:25 +01:00
|
|
|
for line in p.stdout.splitlines():
|
2013-12-20 09:34:03 +01:00
|
|
|
if 'android:debuggable' in line and not line.endswith('0x0'):
|
2013-04-15 14:04:13 +02:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2013-10-16 23:17:51 +02:00
|
|
|
class AsynchronousFileReader(threading.Thread):
|
|
|
|
'''
|
|
|
|
Helper class to implement asynchronous reading of a file
|
|
|
|
in a separate thread. Pushes read lines on a queue to
|
|
|
|
be consumed in another thread.
|
|
|
|
'''
|
2013-12-30 17:04:16 +01:00
|
|
|
|
2013-10-16 23:17:51 +02:00
|
|
|
def __init__(self, fd, queue):
|
|
|
|
assert isinstance(queue, Queue.Queue)
|
|
|
|
assert callable(fd.readline)
|
|
|
|
threading.Thread.__init__(self)
|
|
|
|
self._fd = fd
|
|
|
|
self._queue = queue
|
2013-12-30 17:04:16 +01:00
|
|
|
|
2013-10-16 23:17:51 +02:00
|
|
|
def run(self):
|
|
|
|
'''The body of the tread: read lines and put them on the queue.'''
|
|
|
|
for line in iter(self._fd.readline, ''):
|
|
|
|
self._queue.put(line)
|
2013-12-30 17:04:16 +01:00
|
|
|
|
2013-10-16 23:17:51 +02:00
|
|
|
def eof(self):
|
|
|
|
'''Check whether there is no more content to expect.'''
|
|
|
|
return not self.is_alive() and self._queue.empty()
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-10-16 23:17:51 +02:00
|
|
|
class PopenResult:
|
|
|
|
returncode = None
|
|
|
|
stdout = ''
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-02-17 13:25:55 +01:00
|
|
|
def SilentPopen(commands, cwd=None, shell=False):
|
2014-03-18 12:47:35 +01:00
|
|
|
return FDroidPopen(commands, cwd=cwd, shell=shell, output=False)
|
2014-02-17 13:12:25 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-06-22 21:29:07 +02:00
|
|
|
def FDroidPopen(commands, cwd=None, shell=False, output=True):
|
2014-02-17 13:12:25 +01:00
|
|
|
"""
|
|
|
|
Run a command and capture the possibly huge output.
|
|
|
|
|
|
|
|
:param commands: command and argument list like in subprocess.Popen
|
|
|
|
:param cwd: optionally specifies a working directory
|
|
|
|
:returns: A PopenResult.
|
|
|
|
"""
|
|
|
|
|
2014-06-25 10:25:47 +02:00
|
|
|
if cwd:
|
|
|
|
cwd = os.path.normpath(cwd)
|
|
|
|
logging.debug("Directory: %s" % cwd)
|
|
|
|
logging.debug("> %s" % ' '.join(commands))
|
2014-02-16 00:27:19 +01:00
|
|
|
|
2013-10-16 23:17:51 +02:00
|
|
|
result = PopenResult()
|
2014-02-17 13:25:55 +01:00
|
|
|
p = subprocess.Popen(commands, cwd=cwd, shell=shell,
|
2014-05-06 19:50:52 +02:00
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
2013-12-30 17:04:16 +01:00
|
|
|
|
2013-10-16 23:17:51 +02:00
|
|
|
stdout_queue = Queue.Queue()
|
|
|
|
stdout_reader = AsynchronousFileReader(p.stdout, stdout_queue)
|
|
|
|
stdout_reader.start()
|
2013-12-30 17:04:16 +01:00
|
|
|
|
2014-01-16 11:17:22 +01:00
|
|
|
# Check the queue for output (until there is no more to get)
|
|
|
|
while not stdout_reader.eof():
|
2013-10-16 23:17:51 +02:00
|
|
|
while not stdout_queue.empty():
|
|
|
|
line = stdout_queue.get()
|
2014-06-21 23:03:36 +02:00
|
|
|
if output:
|
2013-10-16 23:17:51 +02:00
|
|
|
# Output directly to console
|
|
|
|
sys.stdout.write(line)
|
|
|
|
sys.stdout.flush()
|
|
|
|
result.stdout += line
|
|
|
|
|
2013-12-19 17:58:10 +01:00
|
|
|
time.sleep(0.1)
|
2013-10-16 23:17:51 +02:00
|
|
|
|
|
|
|
p.communicate()
|
|
|
|
result.returncode = p.returncode
|
|
|
|
return result
|
2013-10-27 23:43:38 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-11-14 14:09:37 +01:00
|
|
|
def remove_signing_keys(build_dir):
|
2014-01-15 18:36:43 +01:00
|
|
|
comment = re.compile(r'[ ]*//')
|
2014-02-23 11:54:30 +01:00
|
|
|
signing_configs = re.compile(r'^[\t ]*signingConfigs[ \t]*{[ \t]*$')
|
|
|
|
line_matches = [
|
2014-05-06 19:50:52 +02:00
|
|
|
re.compile(r'^[\t ]*signingConfig [^ ]*$'),
|
2014-06-05 10:02:30 +02:00
|
|
|
re.compile(r'.*android\.signingConfigs\.[^{]*$'),
|
2014-05-06 19:50:52 +02:00
|
|
|
re.compile(r'.*variant\.outputFile = .*'),
|
|
|
|
re.compile(r'.*\.readLine\(.*'),
|
|
|
|
]
|
2013-11-14 14:09:37 +01:00
|
|
|
for root, dirs, files in os.walk(build_dir):
|
|
|
|
if 'build.gradle' in files:
|
|
|
|
path = os.path.join(root, 'build.gradle')
|
2013-10-27 23:43:38 +01:00
|
|
|
|
2013-11-14 14:09:37 +01:00
|
|
|
with open(path, "r") as o:
|
|
|
|
lines = o.readlines()
|
2013-12-30 17:04:16 +01:00
|
|
|
|
2014-06-22 21:34:14 +02:00
|
|
|
changed = False
|
|
|
|
|
2013-11-14 14:09:37 +01:00
|
|
|
opened = 0
|
2013-11-16 12:54:35 +01:00
|
|
|
with open(path, "w") as o:
|
|
|
|
for line in lines:
|
2014-01-15 18:36:43 +01:00
|
|
|
if comment.match(line):
|
2014-02-28 10:54:14 +01:00
|
|
|
continue
|
|
|
|
|
|
|
|
if opened > 0:
|
|
|
|
opened += line.count('{')
|
|
|
|
opened -= line.count('}')
|
|
|
|
continue
|
|
|
|
|
|
|
|
if signing_configs.match(line):
|
2014-06-22 21:34:14 +02:00
|
|
|
changed = True
|
2014-02-28 10:54:14 +01:00
|
|
|
opened += 1
|
|
|
|
continue
|
|
|
|
|
|
|
|
if any(s.match(line) for s in line_matches):
|
2014-06-22 21:34:14 +02:00
|
|
|
changed = True
|
2014-02-28 10:54:14 +01:00
|
|
|
continue
|
|
|
|
|
|
|
|
if opened == 0:
|
2013-11-14 14:09:37 +01:00
|
|
|
o.write(line)
|
|
|
|
|
2014-06-22 21:34:14 +02:00
|
|
|
if changed:
|
|
|
|
logging.info("Cleaned build.gradle of keysigning configs at %s" % path)
|
2013-11-20 19:08:59 +01:00
|
|
|
|
2014-03-13 10:31:22 +01:00
|
|
|
for propfile in [
|
|
|
|
'project.properties',
|
|
|
|
'build.properties',
|
|
|
|
'default.properties',
|
|
|
|
'ant.properties',
|
|
|
|
]:
|
2013-11-14 14:09:37 +01:00
|
|
|
if propfile in files:
|
|
|
|
path = os.path.join(root, propfile)
|
2013-11-15 12:42:39 +01:00
|
|
|
|
2013-11-16 12:54:35 +01:00
|
|
|
with open(path, "r") as o:
|
|
|
|
lines = o.readlines()
|
|
|
|
|
2014-06-22 21:34:14 +02:00
|
|
|
changed = False
|
|
|
|
|
2013-11-16 12:54:35 +01:00
|
|
|
with open(path, "w") as o:
|
|
|
|
for line in lines:
|
2014-06-22 21:34:14 +02:00
|
|
|
if any(line.startswith(s) for s in ('key.store', 'key.alias')):
|
|
|
|
changed = True
|
2014-03-13 10:31:22 +01:00
|
|
|
continue
|
2014-06-22 21:34:14 +02:00
|
|
|
|
2014-03-13 10:31:22 +01:00
|
|
|
o.write(line)
|
2013-10-27 23:43:38 +01:00
|
|
|
|
2014-06-22 21:34:14 +02:00
|
|
|
if changed:
|
|
|
|
logging.info("Cleaned %s of keysigning configs at %s" % (propfile, path))
|
2013-11-15 12:42:39 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-11-08 20:44:27 +01:00
|
|
|
def replace_config_vars(cmd):
|
|
|
|
cmd = cmd.replace('$$SDK$$', config['sdk_path'])
|
|
|
|
cmd = cmd.replace('$$NDK$$', config['ndk_path'])
|
|
|
|
cmd = cmd.replace('$$MVN3$$', config['mvn3'])
|
|
|
|
return cmd
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-11-15 20:42:17 +01:00
|
|
|
def place_srclib(root_dir, number, libpath):
|
|
|
|
if not number:
|
|
|
|
return
|
|
|
|
relpath = os.path.relpath(libpath, root_dir)
|
2013-11-17 23:20:58 +01:00
|
|
|
proppath = os.path.join(root_dir, 'project.properties')
|
|
|
|
|
2014-01-27 22:34:34 +01:00
|
|
|
lines = []
|
|
|
|
if os.path.isfile(proppath):
|
|
|
|
with open(proppath, "r") as o:
|
|
|
|
lines = o.readlines()
|
2013-11-17 23:20:58 +01:00
|
|
|
|
|
|
|
with open(proppath, "w") as o:
|
|
|
|
placed = False
|
|
|
|
for line in lines:
|
|
|
|
if line.startswith('android.library.reference.%d=' % number):
|
2014-05-02 04:16:32 +02:00
|
|
|
o.write('android.library.reference.%d=%s\n' % (number, relpath))
|
2013-11-17 23:20:58 +01:00
|
|
|
placed = True
|
|
|
|
else:
|
|
|
|
o.write(line)
|
|
|
|
if not placed:
|
2014-05-02 04:16:32 +02:00
|
|
|
o.write('android.library.reference.%d=%s\n' % (number, relpath))
|