2011-01-26 17:26:51 +01:00
|
|
|
# -*- coding: utf-8 -*-
|
2010-11-11 23:34:39 +01:00
|
|
|
#
|
2011-02-17 21:16:26 +01:00
|
|
|
# common.py - part of the FDroid server tools
|
2013-03-18 10:17:23 +01:00
|
|
|
# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com
|
2014-01-28 14:07:19 +01:00
|
|
|
# Copyright (C) 2013-2014 Daniel Martí <mvdan@mvdan.cc>
|
2010-11-11 23:34:39 +01:00
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU Affero General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU Affero General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU Affero General Public License
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
2014-05-02 04:36:12 +02:00
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import re
|
2012-01-04 22:37:11 +01:00
|
|
|
import shutil
|
2014-04-15 23:53:44 +02:00
|
|
|
import glob
|
2015-07-24 06:42:21 +02:00
|
|
|
import requests
|
2013-11-05 23:27:08 +01:00
|
|
|
import stat
|
2011-08-07 17:14:54 +02:00
|
|
|
import subprocess
|
2012-01-20 00:03:35 +01:00
|
|
|
import time
|
2012-01-22 15:03:56 +01:00
|
|
|
import operator
|
2013-10-16 23:17:51 +02:00
|
|
|
import Queue
|
|
|
|
import threading
|
2014-01-27 15:59:40 +01:00
|
|
|
import logging
|
2015-04-21 01:14:58 +02:00
|
|
|
import hashlib
|
|
|
|
import socket
|
2015-06-03 14:35:50 +02:00
|
|
|
import xml.etree.ElementTree as XMLElementTree
|
2015-04-21 02:27:38 +02:00
|
|
|
|
2014-06-12 10:00:46 +02:00
|
|
|
from distutils.version import LooseVersion
|
2015-01-31 16:55:18 +01:00
|
|
|
from zipfile import ZipFile
|
2011-08-07 17:14:54 +02:00
|
|
|
|
2013-11-19 15:35:16 +01:00
|
|
|
import metadata
|
|
|
|
|
2015-06-03 15:42:45 +02:00
|
|
|
XMLElementTree.register_namespace('android', 'http://schemas.android.com/apk/res/android')
|
|
|
|
|
2013-11-01 12:10:57 +01:00
|
|
|
config = None
|
|
|
|
options = None
|
2014-07-01 21:03:50 +02:00
|
|
|
env = None
|
2015-01-06 19:41:55 +01:00
|
|
|
orig_path = None
|
2013-11-01 12:10:57 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-09-15 12:34:40 +02:00
|
|
|
default_config = {
|
|
|
|
'sdk_path': "$ANDROID_HOME",
|
2015-01-03 00:02:54 +01:00
|
|
|
'ndk_paths': {
|
|
|
|
'r9b': None,
|
2015-06-18 17:05:48 +02:00
|
|
|
'r10e': "$ANDROID_NDK"
|
2015-01-03 00:02:54 +01:00
|
|
|
},
|
2015-06-18 16:56:08 +02:00
|
|
|
'build_tools': "22.0.1",
|
2014-09-15 12:34:40 +02:00
|
|
|
'ant': "ant",
|
|
|
|
'mvn3': "mvn",
|
|
|
|
'gradle': 'gradle',
|
|
|
|
'sync_from_local_copy_dir': False,
|
static URLs to "Current Version" of each app
I wrote up the feature to automatically generate symlinks with a constant name
that points to the current release version. I have it on by default, with a
*config.py* option to turn it off. There is also an option to set where the
symlink name comes from which defaults to app['Name'] i.e. F-Droid.apk, but
can easily be set to app['id'], i.e. _org.fdroid.fdroid.apk_. I think the best
place for the symlinks is in the root of the repo, so like
https://f-droid.org/F-Droid.apk or https://guardianproject.info/fdroid/ChatSecure.apk
For the case of the current FDroid static link https://f-droid.org/FDroid.apk
it can just be a symlink to the generated one (https://f-droid.org/F-Droid.apk
or https://f-droid.org/org.fdroid.fdroid.apk). Right now, this feature is all
or nothing, meaning it generates symlinks for all apps in the repo, or none. I
can’t think of any problems that this might cause since its only symlinks, so
the amount of disk space is tiny. Also, I think it would be useful for having
an easy “Download this app” button on each app’s page on the “Browse” view. As
long as this button is less prominent than the “Download F-Droid” button, and
it is clear that it is better to use the FDroid app than doing direct
downloads. For the f-droid.org repo, the symlinks should probably be based on
app['id'] to prevent name conflicts.
more info here:
https://f-droid.org/forums/topic/static-urls-to-current-version-of-each-app/
2014-10-11 02:47:21 +02:00
|
|
|
'make_current_version_link': True,
|
|
|
|
'current_version_name_source': 'Name',
|
2014-09-15 12:34:40 +02:00
|
|
|
'update_stats': False,
|
|
|
|
'stats_ignore': [],
|
|
|
|
'stats_server': None,
|
|
|
|
'stats_user': None,
|
|
|
|
'stats_to_carbon': False,
|
|
|
|
'repo_maxage': 0,
|
|
|
|
'build_server_always': False,
|
2015-04-21 01:29:50 +02:00
|
|
|
'keystore': 'keystore.jks',
|
2014-09-15 12:34:40 +02:00
|
|
|
'smartcardoptions': [],
|
|
|
|
'char_limits': {
|
2015-07-15 23:43:38 +02:00
|
|
|
'Summary': 80,
|
|
|
|
'Description': 4000
|
2014-09-15 12:34:40 +02:00
|
|
|
},
|
|
|
|
'keyaliases': {},
|
|
|
|
'repo_url': "https://MyFirstFDroidRepo.org/fdroid/repo",
|
|
|
|
'repo_name': "My First FDroid Repo Demo",
|
|
|
|
'repo_icon': "fdroid-icon.png",
|
|
|
|
'repo_description': '''
|
|
|
|
This is a repository of apps to be used with FDroid. Applications in this
|
|
|
|
repository are either official binaries built by the original application
|
|
|
|
developers, or are binaries built from source by the admin of f-droid.org
|
|
|
|
using the tools on https://gitlab.com/u/fdroid.
|
|
|
|
''',
|
|
|
|
'archive_older': 0,
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-09-17 00:12:24 +02:00
|
|
|
def fill_config_defaults(thisconfig):
|
2014-09-15 12:34:40 +02:00
|
|
|
for k, v in default_config.items():
|
2014-09-17 00:12:24 +02:00
|
|
|
if k not in thisconfig:
|
|
|
|
thisconfig[k] = v
|
2014-09-15 12:34:40 +02:00
|
|
|
|
2014-09-15 12:39:42 +02:00
|
|
|
# Expand paths (~users and $vars)
|
2015-01-03 00:02:54 +01:00
|
|
|
def expand_path(path):
|
|
|
|
if path is None:
|
|
|
|
return None
|
|
|
|
orig = path
|
|
|
|
path = os.path.expanduser(path)
|
|
|
|
path = os.path.expandvars(path)
|
|
|
|
if orig == path:
|
|
|
|
return None
|
|
|
|
return path
|
|
|
|
|
|
|
|
for k in ['sdk_path', 'ant', 'mvn3', 'gradle', 'keystore', 'repo_icon']:
|
2014-09-17 00:12:24 +02:00
|
|
|
v = thisconfig[k]
|
2015-01-03 00:02:54 +01:00
|
|
|
exp = expand_path(v)
|
|
|
|
if exp is not None:
|
|
|
|
thisconfig[k] = exp
|
|
|
|
thisconfig[k + '_orig'] = v
|
|
|
|
|
|
|
|
for k in ['ndk_paths']:
|
|
|
|
d = thisconfig[k]
|
|
|
|
for k2 in d.copy():
|
|
|
|
v = d[k2]
|
|
|
|
exp = expand_path(v)
|
|
|
|
if exp is not None:
|
|
|
|
thisconfig[k][k2] = exp
|
|
|
|
thisconfig[k][k2 + '_orig'] = v
|
2014-04-23 18:44:37 +02:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2015-07-30 22:13:12 +02:00
|
|
|
def regsub_file(pattern, repl, path):
|
|
|
|
with open(path, 'r') as f:
|
|
|
|
text = f.read()
|
|
|
|
text = re.sub(pattern, repl, text)
|
|
|
|
with open(path, 'w') as f:
|
|
|
|
f.write(text)
|
|
|
|
|
|
|
|
|
2013-11-04 10:22:22 +01:00
|
|
|
def read_config(opts, config_file='config.py'):
|
2013-10-31 16:37:39 +01:00
|
|
|
"""Read the repository config
|
|
|
|
|
2013-11-04 10:22:22 +01:00
|
|
|
The config is read from config_file, which is in the current directory when
|
2013-10-31 16:37:39 +01:00
|
|
|
any of the repo management commands are used.
|
|
|
|
"""
|
2015-01-06 19:41:55 +01:00
|
|
|
global config, options, env, orig_path
|
2013-11-01 12:10:57 +01:00
|
|
|
|
|
|
|
if config is not None:
|
|
|
|
return config
|
2013-11-04 10:22:22 +01:00
|
|
|
if not os.path.isfile(config_file):
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.critical("Missing config file - is this a repo directory?")
|
2013-10-31 16:37:39 +01:00
|
|
|
sys.exit(2)
|
2013-12-06 12:55:56 +01:00
|
|
|
|
2013-11-01 12:10:57 +01:00
|
|
|
options = opts
|
|
|
|
|
2014-02-17 20:04:39 +01:00
|
|
|
config = {}
|
|
|
|
|
2014-03-18 12:20:51 +01:00
|
|
|
logging.debug("Reading %s" % config_file)
|
2014-02-17 20:04:39 +01:00
|
|
|
execfile(config_file, config)
|
|
|
|
|
2014-04-04 04:07:45 +02:00
|
|
|
# smartcardoptions must be a list since its command line args for Popen
|
|
|
|
if 'smartcardoptions' in config:
|
|
|
|
config['smartcardoptions'] = config['smartcardoptions'].split(' ')
|
|
|
|
elif 'keystore' in config and config['keystore'] == 'NONE':
|
|
|
|
# keystore='NONE' means use smartcard, these are required defaults
|
|
|
|
config['smartcardoptions'] = ['-storetype', 'PKCS11', '-providerName',
|
|
|
|
'SunPKCS11-OpenSC', '-providerClass',
|
|
|
|
'sun.security.pkcs11.SunPKCS11',
|
|
|
|
'-providerArg', 'opensc-fdroid.cfg']
|
|
|
|
|
2014-06-22 21:24:05 +02:00
|
|
|
if any(k in config for k in ["keystore", "keystorepass", "keypass"]):
|
|
|
|
st = os.stat(config_file)
|
|
|
|
if st.st_mode & stat.S_IRWXG or st.st_mode & stat.S_IRWXO:
|
|
|
|
logging.warn("unsafe permissions on {0} (should be 0600)!".format(config_file))
|
|
|
|
|
2014-09-15 12:34:40 +02:00
|
|
|
fill_config_defaults(config)
|
2013-12-06 12:55:56 +01:00
|
|
|
|
2014-07-01 21:03:50 +02:00
|
|
|
# There is no standard, so just set up the most common environment
|
|
|
|
# variables
|
2014-07-03 00:27:44 +02:00
|
|
|
env = os.environ
|
2015-01-06 19:41:55 +01:00
|
|
|
orig_path = env['PATH']
|
2014-07-02 15:01:08 +02:00
|
|
|
for n in ['ANDROID_HOME', 'ANDROID_SDK']:
|
2014-07-01 21:03:50 +02:00
|
|
|
env[n] = config['sdk_path']
|
|
|
|
|
2014-04-01 03:02:42 +02:00
|
|
|
for k in ["keystorepass", "keypass"]:
|
|
|
|
if k in config:
|
|
|
|
write_password_file(k)
|
|
|
|
|
2014-07-03 02:57:47 +02:00
|
|
|
for k in ["repo_description", "archive_description"]:
|
|
|
|
if k in config:
|
|
|
|
config[k] = clean_description(config[k])
|
|
|
|
|
2014-04-16 02:47:03 +02:00
|
|
|
if 'serverwebroot' in config:
|
2014-07-14 21:03:58 +02:00
|
|
|
if isinstance(config['serverwebroot'], basestring):
|
|
|
|
roots = [config['serverwebroot']]
|
|
|
|
elif all(isinstance(item, basestring) for item in config['serverwebroot']):
|
|
|
|
roots = config['serverwebroot']
|
|
|
|
else:
|
|
|
|
raise TypeError('only accepts strings, lists, and tuples')
|
|
|
|
rootlist = []
|
|
|
|
for rootstr in roots:
|
|
|
|
# since this is used with rsync, where trailing slashes have
|
|
|
|
# meaning, ensure there is always a trailing slash
|
|
|
|
if rootstr[-1] != '/':
|
|
|
|
rootstr += '/'
|
|
|
|
rootlist.append(rootstr.replace('//', '/'))
|
|
|
|
config['serverwebroot'] = rootlist
|
2014-04-16 02:47:03 +02:00
|
|
|
|
2013-11-01 12:10:57 +01:00
|
|
|
return config
|
2013-10-31 16:37:39 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2015-01-03 00:02:54 +01:00
|
|
|
def get_ndk_path(version):
|
|
|
|
if version is None:
|
2015-08-05 19:33:18 +02:00
|
|
|
version = 'r10e' # falls back to latest
|
2015-01-03 00:02:54 +01:00
|
|
|
paths = config['ndk_paths']
|
|
|
|
if version not in paths:
|
2015-01-04 20:41:22 +01:00
|
|
|
return ''
|
|
|
|
return paths[version] or ''
|
2015-01-03 00:02:54 +01:00
|
|
|
|
|
|
|
|
2014-12-09 14:12:41 +01:00
|
|
|
def find_sdk_tools_cmd(cmd):
|
|
|
|
'''find a working path to a tool from the Android SDK'''
|
|
|
|
|
|
|
|
tooldirs = []
|
2014-12-09 15:20:29 +01:00
|
|
|
if config is not None and 'sdk_path' in config and os.path.exists(config['sdk_path']):
|
2014-12-09 14:12:41 +01:00
|
|
|
# try to find a working path to this command, in all the recent possible paths
|
|
|
|
if 'build_tools' in config:
|
|
|
|
build_tools = os.path.join(config['sdk_path'], 'build-tools')
|
|
|
|
# if 'build_tools' was manually set and exists, check only that one
|
|
|
|
configed_build_tools = os.path.join(build_tools, config['build_tools'])
|
|
|
|
if os.path.exists(configed_build_tools):
|
|
|
|
tooldirs.append(configed_build_tools)
|
|
|
|
else:
|
|
|
|
# no configed version, so hunt known paths for it
|
|
|
|
for f in sorted(os.listdir(build_tools), reverse=True):
|
|
|
|
if os.path.isdir(os.path.join(build_tools, f)):
|
|
|
|
tooldirs.append(os.path.join(build_tools, f))
|
|
|
|
tooldirs.append(build_tools)
|
|
|
|
sdk_tools = os.path.join(config['sdk_path'], 'tools')
|
|
|
|
if os.path.exists(sdk_tools):
|
|
|
|
tooldirs.append(sdk_tools)
|
|
|
|
sdk_platform_tools = os.path.join(config['sdk_path'], 'platform-tools')
|
|
|
|
if os.path.exists(sdk_platform_tools):
|
|
|
|
tooldirs.append(sdk_platform_tools)
|
|
|
|
tooldirs.append('/usr/bin')
|
|
|
|
for d in tooldirs:
|
|
|
|
if os.path.isfile(os.path.join(d, cmd)):
|
|
|
|
return os.path.join(d, cmd)
|
|
|
|
# did not find the command, exit with error message
|
|
|
|
ensure_build_tools_exists(config)
|
|
|
|
|
|
|
|
|
2014-09-17 00:12:24 +02:00
|
|
|
def test_sdk_exists(thisconfig):
|
2014-12-09 14:12:41 +01:00
|
|
|
if 'sdk_path' not in thisconfig:
|
2014-12-09 15:20:29 +01:00
|
|
|
if 'aapt' in thisconfig and os.path.isfile(thisconfig['aapt']):
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
logging.error("'sdk_path' not set in config.py!")
|
|
|
|
return False
|
2014-09-17 00:12:24 +02:00
|
|
|
if thisconfig['sdk_path'] == default_config['sdk_path']:
|
2014-09-15 12:34:40 +02:00
|
|
|
logging.error('No Android SDK found!')
|
2014-07-03 00:43:03 +02:00
|
|
|
logging.error('You can use ANDROID_HOME to set the path to your SDK, i.e.:')
|
|
|
|
logging.error('\texport ANDROID_HOME=/opt/android-sdk')
|
2014-04-23 18:44:37 +02:00
|
|
|
return False
|
2014-09-17 00:12:24 +02:00
|
|
|
if not os.path.exists(thisconfig['sdk_path']):
|
|
|
|
logging.critical('Android SDK path "' + thisconfig['sdk_path'] + '" does not exist!')
|
2014-04-23 18:44:37 +02:00
|
|
|
return False
|
2014-09-17 00:12:24 +02:00
|
|
|
if not os.path.isdir(thisconfig['sdk_path']):
|
|
|
|
logging.critical('Android SDK path "' + thisconfig['sdk_path'] + '" is not a directory!')
|
2014-04-23 18:44:37 +02:00
|
|
|
return False
|
2014-07-03 18:23:35 +02:00
|
|
|
for d in ['build-tools', 'platform-tools', 'tools']:
|
2014-09-17 00:12:24 +02:00
|
|
|
if not os.path.isdir(os.path.join(thisconfig['sdk_path'], d)):
|
2014-07-03 18:23:35 +02:00
|
|
|
logging.critical('Android SDK path "%s" does not contain "%s/"!' % (
|
2014-09-17 00:12:24 +02:00
|
|
|
thisconfig['sdk_path'], d))
|
2014-07-03 18:23:35 +02:00
|
|
|
return False
|
2014-04-23 18:44:37 +02:00
|
|
|
return True
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-12-09 14:12:41 +01:00
|
|
|
def ensure_build_tools_exists(thisconfig):
|
2014-09-17 00:12:24 +02:00
|
|
|
if not test_sdk_exists(thisconfig):
|
2014-12-09 14:12:41 +01:00
|
|
|
sys.exit(3)
|
2014-09-17 00:12:24 +02:00
|
|
|
build_tools = os.path.join(thisconfig['sdk_path'], 'build-tools')
|
|
|
|
versioned_build_tools = os.path.join(build_tools, thisconfig['build_tools'])
|
2014-05-30 03:43:16 +02:00
|
|
|
if not os.path.isdir(versioned_build_tools):
|
|
|
|
logging.critical('Android Build Tools path "'
|
|
|
|
+ versioned_build_tools + '" does not exist!')
|
2014-12-09 14:12:41 +01:00
|
|
|
sys.exit(3)
|
2014-05-30 03:43:16 +02:00
|
|
|
|
|
|
|
|
2014-04-01 03:02:42 +02:00
|
|
|
def write_password_file(pwtype, password=None):
|
|
|
|
'''
|
|
|
|
writes out passwords to a protected file instead of passing passwords as
|
|
|
|
command line argments
|
|
|
|
'''
|
|
|
|
filename = '.fdroid.' + pwtype + '.txt'
|
2014-04-16 05:48:48 +02:00
|
|
|
fd = os.open(filename, os.O_CREAT | os.O_TRUNC | os.O_WRONLY, 0600)
|
2014-05-02 04:02:40 +02:00
|
|
|
if password is None:
|
2014-04-01 03:02:42 +02:00
|
|
|
os.write(fd, config[pwtype])
|
|
|
|
else:
|
|
|
|
os.write(fd, password)
|
|
|
|
os.close(fd)
|
|
|
|
config[pwtype + 'file'] = filename
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-12-19 22:28:38 +01:00
|
|
|
# Given the arguments in the form of multiple appid:[vc] strings, this returns
|
|
|
|
# a dictionary with the set of vercodes specified for each package.
|
|
|
|
def read_pkg_args(args, allow_vercodes=False):
|
2013-12-11 19:08:15 +01:00
|
|
|
|
|
|
|
vercodes = {}
|
2013-12-19 22:28:38 +01:00
|
|
|
if not args:
|
|
|
|
return vercodes
|
|
|
|
|
2013-12-11 19:08:15 +01:00
|
|
|
for p in args:
|
2013-12-11 19:37:38 +01:00
|
|
|
if allow_vercodes and ':' in p:
|
2013-12-11 19:08:15 +01:00
|
|
|
package, vercode = p.split(':')
|
|
|
|
else:
|
|
|
|
package, vercode = p, None
|
|
|
|
if package not in vercodes:
|
|
|
|
vercodes[package] = [vercode] if vercode else []
|
|
|
|
continue
|
2013-12-11 19:37:38 +01:00
|
|
|
elif vercode and vercode not in vercodes[package]:
|
2013-12-11 19:08:15 +01:00
|
|
|
vercodes[package] += [vercode] if vercode else []
|
2013-12-19 17:51:16 +01:00
|
|
|
|
|
|
|
return vercodes
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-12-19 22:28:38 +01:00
|
|
|
# On top of what read_pkg_args does, this returns the whole app metadata, but
|
|
|
|
# limiting the builds list to the builds matching the vercodes specified.
|
|
|
|
def read_app_args(args, allapps, allow_vercodes=False):
|
|
|
|
|
|
|
|
vercodes = read_pkg_args(args, allow_vercodes)
|
|
|
|
|
|
|
|
if not vercodes:
|
|
|
|
return allapps
|
2013-12-19 17:51:16 +01:00
|
|
|
|
2014-08-16 12:46:02 +02:00
|
|
|
apps = {}
|
|
|
|
for appid, app in allapps.iteritems():
|
|
|
|
if appid in vercodes:
|
|
|
|
apps[appid] = app
|
2013-12-19 22:28:38 +01:00
|
|
|
|
2013-12-19 17:51:16 +01:00
|
|
|
if len(apps) != len(vercodes):
|
|
|
|
for p in vercodes:
|
2014-08-25 00:27:46 +02:00
|
|
|
if p not in allapps:
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.critical("No such package: %s" % p)
|
2014-07-07 15:41:32 +02:00
|
|
|
raise FDroidException("Found invalid app ids in arguments")
|
2014-02-15 00:40:25 +01:00
|
|
|
if not apps:
|
2014-07-07 15:41:32 +02:00
|
|
|
raise FDroidException("No packages specified")
|
2013-12-11 19:08:15 +01:00
|
|
|
|
2013-12-11 17:29:38 +01:00
|
|
|
error = False
|
2014-08-16 12:46:02 +02:00
|
|
|
for appid, app in apps.iteritems():
|
|
|
|
vc = vercodes[appid]
|
2013-12-19 22:28:38 +01:00
|
|
|
if not vc:
|
|
|
|
continue
|
|
|
|
app['builds'] = [b for b in app['builds'] if b['vercode'] in vc]
|
2014-08-16 12:46:02 +02:00
|
|
|
if len(app['builds']) != len(vercodes[appid]):
|
2013-12-19 22:28:38 +01:00
|
|
|
error = True
|
|
|
|
allvcs = [b['vercode'] for b in app['builds']]
|
2014-08-16 12:46:02 +02:00
|
|
|
for v in vercodes[appid]:
|
2013-12-19 22:28:38 +01:00
|
|
|
if v not in allvcs:
|
2014-08-16 12:46:02 +02:00
|
|
|
logging.critical("No such vercode %s for app %s" % (v, appid))
|
2013-12-11 17:29:38 +01:00
|
|
|
|
|
|
|
if error:
|
2014-07-07 15:41:32 +02:00
|
|
|
raise FDroidException("Found invalid vercodes for some apps")
|
2013-12-11 17:29:38 +01:00
|
|
|
|
|
|
|
return apps
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-12-30 11:33:37 +01:00
|
|
|
def has_extension(filename, extension):
|
|
|
|
name, ext = os.path.splitext(filename)
|
|
|
|
ext = ext.lower()[1:]
|
|
|
|
return ext == extension
|
|
|
|
|
2013-12-19 17:51:16 +01:00
|
|
|
apk_regex = None
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-07-03 02:57:47 +02:00
|
|
|
def clean_description(description):
|
|
|
|
'Remove unneeded newlines and spaces from a block of description text'
|
|
|
|
returnstring = ''
|
|
|
|
# this is split up by paragraph to make removing the newlines easier
|
|
|
|
for paragraph in re.split(r'\n\n', description):
|
|
|
|
paragraph = re.sub('\r', '', paragraph)
|
|
|
|
paragraph = re.sub('\n', ' ', paragraph)
|
|
|
|
paragraph = re.sub(' {2,}', ' ', paragraph)
|
|
|
|
paragraph = re.sub('^\s*(\w)', r'\1', paragraph)
|
|
|
|
returnstring += paragraph + '\n\n'
|
|
|
|
return returnstring.rstrip('\n')
|
|
|
|
|
|
|
|
|
2013-12-19 17:58:10 +01:00
|
|
|
def apknameinfo(filename):
|
2013-12-19 17:51:16 +01:00
|
|
|
global apk_regex
|
2013-12-19 17:58:10 +01:00
|
|
|
filename = os.path.basename(filename)
|
2013-12-19 17:51:16 +01:00
|
|
|
if apk_regex is None:
|
2013-12-22 12:27:26 +01:00
|
|
|
apk_regex = re.compile(r"^(.+)_([0-9]+)\.apk$")
|
2013-12-19 17:58:10 +01:00
|
|
|
m = apk_regex.match(filename)
|
2013-12-19 17:51:16 +01:00
|
|
|
try:
|
|
|
|
result = (m.group(1), m.group(2))
|
|
|
|
except AttributeError:
|
2014-07-07 15:41:32 +02:00
|
|
|
raise FDroidException("Invalid apk name: %s" % filename)
|
2013-12-19 17:51:16 +01:00
|
|
|
return result
|
2013-12-11 17:29:38 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-11-19 15:35:16 +01:00
|
|
|
def getapkname(app, build):
|
|
|
|
return "%s_%s.apk" % (app['id'], build['vercode'])
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-11-19 15:35:16 +01:00
|
|
|
def getsrcname(app, build):
|
|
|
|
return "%s_%s_src.tar.gz" % (app['id'], build['vercode'])
|
2013-10-31 16:37:39 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-12-02 15:28:30 +01:00
|
|
|
def getappname(app):
|
2013-12-11 17:29:38 +01:00
|
|
|
if app['Name']:
|
2014-02-10 10:55:29 +01:00
|
|
|
return app['Name']
|
2013-12-11 17:29:38 +01:00
|
|
|
if app['Auto Name']:
|
2014-02-10 10:55:29 +01:00
|
|
|
return app['Auto Name']
|
2013-12-11 17:29:38 +01:00
|
|
|
return app['id']
|
2013-12-02 15:28:30 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-12-02 15:28:30 +01:00
|
|
|
def getcvname(app):
|
2013-12-11 17:29:38 +01:00
|
|
|
return '%s (%s)' % (app['Current Version'], app['Current Version Code'])
|
2013-12-02 15:28:30 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2015-01-05 00:29:27 +01:00
|
|
|
def getvcs(vcstype, remote, local):
|
2011-08-07 17:14:54 +02:00
|
|
|
if vcstype == 'git':
|
2013-11-08 20:44:27 +01:00
|
|
|
return vcs_git(remote, local)
|
2013-04-08 12:20:21 +02:00
|
|
|
if vcstype == 'git-svn':
|
2013-11-08 20:44:27 +01:00
|
|
|
return vcs_gitsvn(remote, local)
|
2013-04-08 12:20:21 +02:00
|
|
|
if vcstype == 'hg':
|
2013-11-08 20:44:27 +01:00
|
|
|
return vcs_hg(remote, local)
|
2013-04-08 12:20:21 +02:00
|
|
|
if vcstype == 'bzr':
|
2013-11-08 20:44:27 +01:00
|
|
|
return vcs_bzr(remote, local)
|
2013-04-08 12:20:21 +02:00
|
|
|
if vcstype == 'srclib':
|
2014-07-18 12:21:52 +02:00
|
|
|
if local != os.path.join('build', 'srclib', remote):
|
2013-10-09 11:11:34 +02:00
|
|
|
raise VCSException("Error: srclib paths are hard-coded!")
|
2015-01-05 00:29:27 +01:00
|
|
|
return getsrclib(remote, os.path.join('build', 'srclib'), raw=True)
|
2014-07-18 12:39:24 +02:00
|
|
|
if vcstype == 'svn':
|
|
|
|
raise VCSException("Deprecated vcs type 'svn' - please use 'git-svn' instead")
|
2012-01-02 12:51:14 +01:00
|
|
|
raise VCSException("Invalid vcs type " + vcstype)
|
2011-08-07 17:14:54 +02:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-05-24 23:35:56 +02:00
|
|
|
def getsrclibvcs(name):
|
2014-05-28 09:33:14 +02:00
|
|
|
if name not in metadata.srclibs:
|
2013-05-31 08:50:47 +02:00
|
|
|
raise VCSException("Missing srclib " + name)
|
2014-05-20 23:44:47 +02:00
|
|
|
return metadata.srclibs[name]['Repo Type']
|
2013-05-24 23:35:56 +02:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2011-08-07 17:14:54 +02:00
|
|
|
class vcs:
|
2014-12-31 16:42:26 +01:00
|
|
|
|
2013-11-08 20:44:27 +01:00
|
|
|
def __init__(self, remote, local):
|
2011-08-07 17:14:54 +02:00
|
|
|
|
2013-11-05 19:42:29 +01:00
|
|
|
# svn, git-svn and bzr may require auth
|
2013-10-02 11:25:26 +02:00
|
|
|
self.username = None
|
2014-07-18 12:39:24 +02:00
|
|
|
if self.repotype() in ('git-svn', 'bzr'):
|
2013-11-05 19:42:29 +01:00
|
|
|
if '@' in remote:
|
2015-01-11 18:46:10 +01:00
|
|
|
if self.repotype == 'git-svn':
|
|
|
|
raise VCSException("Authentication is not supported for git-svn")
|
2013-11-05 19:42:29 +01:00
|
|
|
self.username, remote = remote.split('@')
|
|
|
|
if ':' not in self.username:
|
2013-10-02 11:25:26 +02:00
|
|
|
raise VCSException("Password required with username")
|
2013-11-05 19:42:29 +01:00
|
|
|
self.username, self.password = self.username.split(':')
|
2011-08-07 17:14:54 +02:00
|
|
|
|
|
|
|
self.remote = remote
|
|
|
|
self.local = local
|
2014-07-02 17:21:45 +02:00
|
|
|
self.clone_failed = False
|
2012-01-23 15:15:40 +01:00
|
|
|
self.refreshed = False
|
2012-01-30 22:11:50 +01:00
|
|
|
self.srclib = None
|
2012-01-08 19:13:15 +01:00
|
|
|
|
2014-04-02 23:37:29 +02:00
|
|
|
def repotype(self):
|
|
|
|
return None
|
|
|
|
|
2012-01-23 15:15:40 +01:00
|
|
|
# Take the local repository to a clean version of the given revision, which
|
|
|
|
# is specificed in the VCS's native format. Beforehand, the repository can
|
|
|
|
# be dirty, or even non-existent. If the repository does already exist
|
|
|
|
# locally, it will be updated from the origin, but only once in the
|
|
|
|
# lifetime of the vcs object.
|
2012-02-05 12:02:01 +01:00
|
|
|
# None is acceptable for 'rev' if you know you are cloning a clean copy of
|
|
|
|
# the repo - otherwise it must specify a valid revision.
|
2015-07-14 12:32:39 +02:00
|
|
|
def gotorevision(self, rev, refresh=True):
|
2012-08-13 18:59:03 +02:00
|
|
|
|
2014-07-02 17:21:45 +02:00
|
|
|
if self.clone_failed:
|
|
|
|
raise VCSException("Downloading the repository already failed once, not trying again.")
|
|
|
|
|
2012-08-13 18:59:03 +02:00
|
|
|
# The .fdroidvcs-id file for a repo tells us what VCS type
|
|
|
|
# and remote that directory was created from, allowing us to drop it
|
|
|
|
# automatically if either of those things changes.
|
|
|
|
fdpath = os.path.join(self.local, '..',
|
2014-05-06 19:50:52 +02:00
|
|
|
'.fdroidvcs-' + os.path.basename(self.local))
|
2012-08-13 18:59:03 +02:00
|
|
|
cdata = self.repotype() + ' ' + self.remote
|
|
|
|
writeback = True
|
2013-06-05 13:54:47 +02:00
|
|
|
deleterepo = False
|
2012-08-13 18:59:03 +02:00
|
|
|
if os.path.exists(self.local):
|
|
|
|
if os.path.exists(fdpath):
|
|
|
|
with open(fdpath, 'r') as f:
|
2014-03-05 13:22:58 +01:00
|
|
|
fsdata = f.read().strip()
|
2012-08-13 18:59:03 +02:00
|
|
|
if fsdata == cdata:
|
|
|
|
writeback = False
|
|
|
|
else:
|
2013-06-05 13:54:47 +02:00
|
|
|
deleterepo = True
|
2014-12-31 16:42:26 +01:00
|
|
|
logging.info("Repository details for %s changed - deleting" % (
|
|
|
|
self.local))
|
2013-06-05 13:54:47 +02:00
|
|
|
else:
|
|
|
|
deleterepo = True
|
2014-07-03 00:43:03 +02:00
|
|
|
logging.info("Repository details for %s missing - deleting" % (
|
|
|
|
self.local))
|
2013-06-05 13:54:47 +02:00
|
|
|
if deleterepo:
|
|
|
|
shutil.rmtree(self.local)
|
2012-08-13 18:59:03 +02:00
|
|
|
|
2014-07-09 10:30:28 +02:00
|
|
|
exc = None
|
2015-07-14 12:32:39 +02:00
|
|
|
if not refresh:
|
|
|
|
self.refreshed = True
|
2014-07-09 10:30:28 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
self.gotorevisionx(rev)
|
|
|
|
except FDroidException, e:
|
|
|
|
exc = e
|
2012-08-13 18:59:03 +02:00
|
|
|
|
|
|
|
# If necessary, write the .fdroidvcs file.
|
2014-07-09 10:30:28 +02:00
|
|
|
if writeback and not self.clone_failed:
|
2012-08-13 18:59:03 +02:00
|
|
|
with open(fdpath, 'w') as f:
|
|
|
|
f.write(cdata)
|
|
|
|
|
2014-07-09 10:30:28 +02:00
|
|
|
if exc is not None:
|
|
|
|
raise exc
|
|
|
|
|
2012-08-13 18:59:03 +02:00
|
|
|
# Derived classes need to implement this. It's called once basic checking
|
|
|
|
# has been performend.
|
|
|
|
def gotorevisionx(self, rev):
|
|
|
|
raise VCSException("This VCS type doesn't define gotorevisionx")
|
2011-08-07 17:14:54 +02:00
|
|
|
|
|
|
|
# Initialise and update submodules
|
|
|
|
def initsubmodules(self):
|
2012-01-23 15:15:40 +01:00
|
|
|
raise VCSException('Submodules not supported for this vcs type')
|
2011-08-07 17:14:54 +02:00
|
|
|
|
2012-08-23 15:25:39 +02:00
|
|
|
# Get a list of all known tags
|
|
|
|
def gettags(self):
|
2015-01-10 16:15:01 +01:00
|
|
|
if not self._gettags:
|
|
|
|
raise VCSException('gettags not supported for this vcs type')
|
|
|
|
rtags = []
|
|
|
|
for tag in self._gettags():
|
|
|
|
if re.match('[-A-Za-z0-9_. ]+$', tag):
|
|
|
|
rtags.append(tag)
|
|
|
|
return rtags
|
2012-08-23 15:25:39 +02:00
|
|
|
|
2015-01-26 19:14:29 +01:00
|
|
|
def latesttags(self, tags, number):
|
|
|
|
"""Get the most recent tags in a given list.
|
|
|
|
|
|
|
|
:param tags: a list of tags
|
|
|
|
:param number: the number to return
|
|
|
|
:returns: A list containing the most recent tags in the provided
|
|
|
|
list, up to the maximum number given.
|
|
|
|
"""
|
2014-04-17 21:05:18 +02:00
|
|
|
raise VCSException('latesttags not supported for this vcs type')
|
|
|
|
|
2013-10-17 23:27:55 +02:00
|
|
|
# Get current commit reference (hash, revision, etc)
|
|
|
|
def getref(self):
|
|
|
|
raise VCSException('getref not supported for this vcs type')
|
|
|
|
|
2012-01-30 22:11:50 +01:00
|
|
|
# Returns the srclib (name, path) used in setting up the current
|
|
|
|
# revision, or None.
|
|
|
|
def getsrclib(self):
|
|
|
|
return self.srclib
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2011-08-07 17:14:54 +02:00
|
|
|
class vcs_git(vcs):
|
|
|
|
|
2012-08-13 18:59:03 +02:00
|
|
|
def repotype(self):
|
|
|
|
return 'git'
|
|
|
|
|
2012-01-23 15:15:40 +01:00
|
|
|
# If the local directory exists, but is somehow not a git repository, git
|
|
|
|
# will traverse up the directory tree until it finds one that is (i.e.
|
2012-08-13 18:59:03 +02:00
|
|
|
# fdroidserver) and then we'll proceed to destroy it! This is called as
|
2012-01-23 15:15:40 +01:00
|
|
|
# a safety check.
|
2012-01-08 14:43:59 +01:00
|
|
|
def checkrepo(self):
|
2014-12-14 15:32:20 +01:00
|
|
|
p = FDroidPopen(['git', 'rev-parse', '--show-toplevel'], cwd=self.local, output=False)
|
2014-07-01 18:04:41 +02:00
|
|
|
result = p.output.rstrip()
|
2012-01-08 15:13:41 +01:00
|
|
|
if not result.endswith(self.local):
|
2012-01-08 14:43:59 +01:00
|
|
|
raise VCSException('Repository mismatch')
|
|
|
|
|
2012-08-13 18:59:03 +02:00
|
|
|
def gotorevisionx(self, rev):
|
2012-01-23 15:15:40 +01:00
|
|
|
if not os.path.exists(self.local):
|
2014-01-27 15:59:40 +01:00
|
|
|
# Brand new checkout
|
2014-03-18 12:47:35 +01:00
|
|
|
p = FDroidPopen(['git', 'clone', self.remote, self.local])
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2014-07-02 17:21:45 +02:00
|
|
|
self.clone_failed = True
|
2014-07-02 15:30:05 +02:00
|
|
|
raise VCSException("Git clone failed", p.output)
|
2012-01-23 15:15:40 +01:00
|
|
|
self.checkrepo()
|
|
|
|
else:
|
|
|
|
self.checkrepo()
|
2014-01-27 15:59:40 +01:00
|
|
|
# Discard any working tree changes
|
2015-06-29 04:37:28 +02:00
|
|
|
p = FDroidPopen(['git', 'submodule', 'foreach', '--recursive',
|
|
|
|
'git', 'reset', '--hard'], cwd=self.local, output=False)
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2014-07-02 15:30:05 +02:00
|
|
|
raise VCSException("Git reset failed", p.output)
|
2012-01-23 15:15:40 +01:00
|
|
|
# Remove untracked files now, in case they're tracked in the target
|
2014-01-27 15:59:40 +01:00
|
|
|
# revision (it happens!)
|
2015-06-29 04:37:28 +02:00
|
|
|
p = FDroidPopen(['git', 'submodule', 'foreach', '--recursive',
|
|
|
|
'git', 'clean', '-dffx'], cwd=self.local, output=False)
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2014-07-02 15:30:05 +02:00
|
|
|
raise VCSException("Git clean failed", p.output)
|
2012-01-23 15:15:40 +01:00
|
|
|
if not self.refreshed:
|
2014-01-27 15:59:40 +01:00
|
|
|
# Get latest commits and tags from remote
|
2014-03-18 12:47:35 +01:00
|
|
|
p = FDroidPopen(['git', 'fetch', 'origin'], cwd=self.local)
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2014-07-02 15:30:05 +02:00
|
|
|
raise VCSException("Git fetch failed", p.output)
|
2014-12-14 15:32:20 +01:00
|
|
|
p = FDroidPopen(['git', 'fetch', '--prune', '--tags', 'origin'], cwd=self.local, output=False)
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2014-07-02 15:30:05 +02:00
|
|
|
raise VCSException("Git fetch failed", p.output)
|
2014-06-26 13:03:51 +02:00
|
|
|
# Recreate origin/HEAD as git clone would do it, in case it disappeared
|
2014-12-14 15:32:20 +01:00
|
|
|
p = FDroidPopen(['git', 'remote', 'set-head', 'origin', '--auto'], cwd=self.local, output=False)
|
2014-06-26 13:03:51 +02:00
|
|
|
if p.returncode != 0:
|
2014-07-03 16:25:24 +02:00
|
|
|
lines = p.output.splitlines()
|
|
|
|
if 'Multiple remote HEAD branches' not in lines[0]:
|
|
|
|
raise VCSException("Git remote set-head failed", p.output)
|
|
|
|
branch = lines[1].split(' ')[-1]
|
2014-12-14 15:32:20 +01:00
|
|
|
p2 = FDroidPopen(['git', 'remote', 'set-head', 'origin', branch], cwd=self.local, output=False)
|
2014-07-03 16:25:24 +02:00
|
|
|
if p2.returncode != 0:
|
|
|
|
raise VCSException("Git remote set-head failed", p.output + '\n' + p2.output)
|
2012-01-23 15:15:40 +01:00
|
|
|
self.refreshed = True
|
2014-06-25 11:12:53 +02:00
|
|
|
# origin/HEAD is the HEAD of the remote, e.g. the "default branch" on
|
|
|
|
# a github repo. Most of the time this is the same as origin/master.
|
2014-07-03 17:35:28 +02:00
|
|
|
rev = rev or 'origin/HEAD'
|
2014-12-14 15:32:20 +01:00
|
|
|
p = FDroidPopen(['git', 'checkout', '-f', rev], cwd=self.local, output=False)
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2014-07-02 15:30:05 +02:00
|
|
|
raise VCSException("Git checkout of '%s' failed" % rev, p.output)
|
2014-01-27 15:59:40 +01:00
|
|
|
# Get rid of any uncontrolled files left behind
|
2014-12-14 15:32:20 +01:00
|
|
|
p = FDroidPopen(['git', 'clean', '-dffx'], cwd=self.local, output=False)
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2014-07-02 15:30:05 +02:00
|
|
|
raise VCSException("Git clean failed", p.output)
|
2011-08-07 17:14:54 +02:00
|
|
|
|
|
|
|
def initsubmodules(self):
|
2012-01-08 14:43:59 +01:00
|
|
|
self.checkrepo()
|
2014-03-27 18:38:12 +01:00
|
|
|
submfile = os.path.join(self.local, '.gitmodules')
|
|
|
|
if not os.path.isfile(submfile):
|
|
|
|
raise VCSException("No git submodules available")
|
|
|
|
|
|
|
|
# fix submodules not accessible without an account and public key auth
|
|
|
|
with open(submfile, 'r') as f:
|
|
|
|
lines = f.readlines()
|
|
|
|
with open(submfile, 'w') as f:
|
|
|
|
for line in lines:
|
|
|
|
if 'git@github.com' in line:
|
|
|
|
line = line.replace('git@github.com:', 'https://github.com/')
|
|
|
|
f.write(line)
|
|
|
|
|
2014-12-14 15:32:20 +01:00
|
|
|
p = FDroidPopen(['git', 'submodule', 'sync'], cwd=self.local, output=False)
|
2014-04-29 12:00:03 +02:00
|
|
|
if p.returncode != 0:
|
2014-07-02 15:30:05 +02:00
|
|
|
raise VCSException("Git submodule sync failed", p.output)
|
2014-03-18 12:47:35 +01:00
|
|
|
p = FDroidPopen(['git', 'submodule', 'update', '--init', '--force', '--recursive'], cwd=self.local)
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2014-07-02 15:30:05 +02:00
|
|
|
raise VCSException("Git submodule update failed", p.output)
|
2011-08-07 17:14:54 +02:00
|
|
|
|
2015-01-10 16:15:01 +01:00
|
|
|
def _gettags(self):
|
2012-08-23 15:25:39 +02:00
|
|
|
self.checkrepo()
|
2014-12-14 15:32:20 +01:00
|
|
|
p = FDroidPopen(['git', 'tag'], cwd=self.local, output=False)
|
2014-07-01 18:04:41 +02:00
|
|
|
return p.output.splitlines()
|
2012-08-23 15:25:39 +02:00
|
|
|
|
2015-01-26 19:14:29 +01:00
|
|
|
def latesttags(self, tags, number):
|
2014-04-17 21:05:18 +02:00
|
|
|
self.checkrepo()
|
2015-01-26 19:14:29 +01:00
|
|
|
tl = []
|
|
|
|
for tag in tags:
|
|
|
|
p = FDroidPopen(
|
|
|
|
['git', 'show', '--format=format:%ct', '-s', tag],
|
|
|
|
cwd=self.local, output=False)
|
|
|
|
# Timestamp is on the last line. For a normal tag, it's the only
|
|
|
|
# line, but for annotated tags, the rest of the info precedes it.
|
|
|
|
ts = int(p.output.splitlines()[-1])
|
|
|
|
tl.append((ts, tag))
|
|
|
|
latest = []
|
|
|
|
for _, t in sorted(tl)[-number:]:
|
|
|
|
latest.append(t)
|
|
|
|
return latest
|
2014-04-17 21:05:18 +02:00
|
|
|
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2012-01-04 22:37:11 +01:00
|
|
|
class vcs_gitsvn(vcs):
|
|
|
|
|
2012-08-13 18:59:03 +02:00
|
|
|
def repotype(self):
|
|
|
|
return 'git-svn'
|
|
|
|
|
2012-01-23 15:15:40 +01:00
|
|
|
# If the local directory exists, but is somehow not a git repository, git
|
|
|
|
# will traverse up the directory tree until it finds one that is (i.e.
|
|
|
|
# fdroidserver) and then we'll proceed to destory it! This is called as
|
|
|
|
# a safety check.
|
2012-01-08 14:43:59 +01:00
|
|
|
def checkrepo(self):
|
2014-12-14 15:32:20 +01:00
|
|
|
p = FDroidPopen(['git', 'rev-parse', '--show-toplevel'], cwd=self.local, output=False)
|
2014-07-01 18:04:41 +02:00
|
|
|
result = p.output.rstrip()
|
2012-01-08 15:16:42 +01:00
|
|
|
if not result.endswith(self.local):
|
2012-01-08 14:43:59 +01:00
|
|
|
raise VCSException('Repository mismatch')
|
|
|
|
|
2012-08-13 18:59:03 +02:00
|
|
|
def gotorevisionx(self, rev):
|
2012-01-23 15:15:40 +01:00
|
|
|
if not os.path.exists(self.local):
|
2014-01-27 15:59:40 +01:00
|
|
|
# Brand new checkout
|
2015-01-11 18:46:10 +01:00
|
|
|
gitsvn_args = ['git', 'svn', 'clone']
|
2013-10-31 11:53:12 +01:00
|
|
|
if ';' in self.remote:
|
|
|
|
remote_split = self.remote.split(';')
|
2013-04-05 21:55:34 +02:00
|
|
|
for i in remote_split[1:]:
|
|
|
|
if i.startswith('trunk='):
|
2015-01-11 18:46:10 +01:00
|
|
|
gitsvn_args.extend(['-T', i[6:]])
|
2013-04-05 21:55:34 +02:00
|
|
|
elif i.startswith('tags='):
|
2015-01-11 18:46:10 +01:00
|
|
|
gitsvn_args.extend(['-t', i[5:]])
|
2013-05-27 15:00:35 +02:00
|
|
|
elif i.startswith('branches='):
|
2015-01-11 18:46:10 +01:00
|
|
|
gitsvn_args.extend(['-b', i[9:]])
|
|
|
|
gitsvn_args.extend([remote_split[0], self.local])
|
|
|
|
p = FDroidPopen(gitsvn_args, output=False)
|
2014-08-18 11:25:39 +02:00
|
|
|
if p.returncode != 0:
|
2014-07-02 17:21:45 +02:00
|
|
|
self.clone_failed = True
|
2014-08-18 11:25:39 +02:00
|
|
|
raise VCSException("Git svn clone failed", p.output)
|
2013-04-05 21:55:34 +02:00
|
|
|
else:
|
2015-01-13 16:15:11 +01:00
|
|
|
gitsvn_args.extend([self.remote, self.local])
|
2015-01-11 18:46:10 +01:00
|
|
|
p = FDroidPopen(gitsvn_args, output=False)
|
2014-08-18 11:25:39 +02:00
|
|
|
if p.returncode != 0:
|
2014-07-02 17:21:45 +02:00
|
|
|
self.clone_failed = True
|
2014-08-18 11:25:39 +02:00
|
|
|
raise VCSException("Git svn clone failed", p.output)
|
2012-01-23 15:15:40 +01:00
|
|
|
self.checkrepo()
|
2012-01-04 22:37:11 +01:00
|
|
|
else:
|
2012-01-23 15:15:40 +01:00
|
|
|
self.checkrepo()
|
2014-01-27 15:59:40 +01:00
|
|
|
# Discard any working tree changes
|
2014-12-14 15:32:20 +01:00
|
|
|
p = FDroidPopen(['git', 'reset', '--hard'], cwd=self.local, output=False)
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2014-07-02 15:30:05 +02:00
|
|
|
raise VCSException("Git reset failed", p.output)
|
2012-01-23 15:15:40 +01:00
|
|
|
# Remove untracked files now, in case they're tracked in the target
|
2014-01-27 15:59:40 +01:00
|
|
|
# revision (it happens!)
|
2014-12-14 15:32:20 +01:00
|
|
|
p = FDroidPopen(['git', 'clean', '-dffx'], cwd=self.local, output=False)
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2014-07-02 15:30:05 +02:00
|
|
|
raise VCSException("Git clean failed", p.output)
|
2012-01-23 15:15:40 +01:00
|
|
|
if not self.refreshed:
|
2014-02-18 10:34:49 +01:00
|
|
|
# Get new commits, branches and tags from repo
|
2015-01-11 18:46:10 +01:00
|
|
|
p = FDroidPopen(['git', 'svn', 'fetch'], cwd=self.local, output=False)
|
2014-02-18 10:34:49 +01:00
|
|
|
if p.returncode != 0:
|
|
|
|
raise VCSException("Git svn fetch failed")
|
2015-01-11 18:46:10 +01:00
|
|
|
p = FDroidPopen(['git', 'svn', 'rebase'], cwd=self.local, output=False)
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2014-07-02 15:30:05 +02:00
|
|
|
raise VCSException("Git svn rebase failed", p.output)
|
2012-01-23 15:15:40 +01:00
|
|
|
self.refreshed = True
|
2013-09-15 23:20:27 +02:00
|
|
|
|
2014-07-03 17:35:28 +02:00
|
|
|
rev = rev or 'master'
|
2012-02-05 12:02:01 +01:00
|
|
|
if rev:
|
2013-06-03 11:20:49 +02:00
|
|
|
nospaces_rev = rev.replace(' ', '%20')
|
2013-05-28 16:25:23 +02:00
|
|
|
# Try finding a svn tag
|
2014-07-08 12:04:24 +02:00
|
|
|
for treeish in ['origin/', '']:
|
2014-12-14 15:32:20 +01:00
|
|
|
p = FDroidPopen(['git', 'checkout', treeish + 'tags/' + nospaces_rev], cwd=self.local, output=False)
|
2014-07-08 12:04:24 +02:00
|
|
|
if p.returncode == 0:
|
|
|
|
break
|
2014-01-27 16:07:30 +01:00
|
|
|
if p.returncode != 0:
|
2013-05-28 16:25:23 +02:00
|
|
|
# No tag found, normal svn rev translation
|
|
|
|
# Translate svn rev into git format
|
2014-01-24 10:10:40 +01:00
|
|
|
rev_split = rev.split('/')
|
|
|
|
|
2014-07-05 12:15:20 +02:00
|
|
|
p = None
|
|
|
|
for treeish in ['origin/', '']:
|
|
|
|
if len(rev_split) > 1:
|
|
|
|
treeish += rev_split[0]
|
|
|
|
svn_rev = rev_split[1]
|
|
|
|
|
|
|
|
else:
|
|
|
|
# if no branch is specified, then assume trunk (i.e. 'master' branch):
|
|
|
|
treeish += 'master'
|
|
|
|
svn_rev = rev
|
|
|
|
|
2014-07-09 10:41:35 +02:00
|
|
|
svn_rev = svn_rev if svn_rev[0] == 'r' else 'r' + svn_rev
|
|
|
|
|
2014-12-14 15:32:20 +01:00
|
|
|
p = FDroidPopen(['git', 'svn', 'find-rev', '--before', svn_rev, treeish], cwd=self.local, output=False)
|
2014-07-05 12:15:20 +02:00
|
|
|
git_rev = p.output.rstrip()
|
2014-01-24 10:10:40 +01:00
|
|
|
|
2014-07-05 12:15:20 +02:00
|
|
|
if p.returncode == 0 and git_rev:
|
|
|
|
break
|
2014-01-24 10:10:40 +01:00
|
|
|
|
2013-11-05 19:42:29 +01:00
|
|
|
if p.returncode != 0 or not git_rev:
|
2013-05-28 16:36:52 +02:00
|
|
|
# Try a plain git checkout as a last resort
|
2014-12-14 15:32:20 +01:00
|
|
|
p = FDroidPopen(['git', 'checkout', rev], cwd=self.local, output=False)
|
2014-01-27 16:07:30 +01:00
|
|
|
if p.returncode != 0:
|
2014-07-02 15:30:05 +02:00
|
|
|
raise VCSException("No git treeish found and direct git checkout of '%s' failed" % rev, p.output)
|
2013-06-03 11:20:49 +02:00
|
|
|
else:
|
|
|
|
# Check out the git rev equivalent to the svn rev
|
2014-12-14 15:32:20 +01:00
|
|
|
p = FDroidPopen(['git', 'checkout', git_rev], cwd=self.local, output=False)
|
2014-01-27 16:07:30 +01:00
|
|
|
if p.returncode != 0:
|
2014-08-16 10:55:21 +02:00
|
|
|
raise VCSException("Git checkout of '%s' failed" % rev, p.output)
|
2014-01-24 10:10:40 +01:00
|
|
|
|
2014-01-27 15:59:40 +01:00
|
|
|
# Get rid of any uncontrolled files left behind
|
2014-12-14 15:32:20 +01:00
|
|
|
p = FDroidPopen(['git', 'clean', '-dffx'], cwd=self.local, output=False)
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2014-07-02 15:30:05 +02:00
|
|
|
raise VCSException("Git clean failed", p.output)
|
2012-01-04 22:37:11 +01:00
|
|
|
|
2015-01-10 16:15:01 +01:00
|
|
|
def _gettags(self):
|
2013-04-05 15:43:12 +02:00
|
|
|
self.checkrepo()
|
2014-07-18 12:12:16 +02:00
|
|
|
for treeish in ['origin/', '']:
|
|
|
|
d = os.path.join(self.local, '.git', 'svn', 'refs', 'remotes', treeish, 'tags')
|
|
|
|
if os.path.isdir(d):
|
|
|
|
return os.listdir(d)
|
2013-04-05 15:43:12 +02:00
|
|
|
|
2013-10-17 23:27:55 +02:00
|
|
|
def getref(self):
|
|
|
|
self.checkrepo()
|
2014-12-14 15:32:20 +01:00
|
|
|
p = FDroidPopen(['git', 'svn', 'find-rev', 'HEAD'], cwd=self.local, output=False)
|
2014-01-27 16:07:30 +01:00
|
|
|
if p.returncode != 0:
|
|
|
|
return None
|
2014-07-01 18:04:41 +02:00
|
|
|
return p.output.strip()
|
2013-10-17 23:27:55 +02:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2011-08-07 17:14:54 +02:00
|
|
|
class vcs_hg(vcs):
|
|
|
|
|
2012-08-13 18:59:03 +02:00
|
|
|
def repotype(self):
|
|
|
|
return 'hg'
|
|
|
|
|
|
|
|
def gotorevisionx(self, rev):
|
2012-01-23 15:15:40 +01:00
|
|
|
if not os.path.exists(self.local):
|
2014-12-14 15:32:20 +01:00
|
|
|
p = FDroidPopen(['hg', 'clone', self.remote, self.local], output=False)
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2014-07-02 17:21:45 +02:00
|
|
|
self.clone_failed = True
|
2014-07-02 15:30:05 +02:00
|
|
|
raise VCSException("Hg clone failed", p.output)
|
2011-08-07 17:14:54 +02:00
|
|
|
else:
|
2015-01-26 19:14:29 +01:00
|
|
|
p = FDroidPopen(['hg', 'status', '-uS'], cwd=self.local, output=False)
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2015-01-26 19:14:29 +01:00
|
|
|
raise VCSException("Hg status failed", p.output)
|
|
|
|
for line in p.output.splitlines():
|
|
|
|
if not line.startswith('? '):
|
|
|
|
raise VCSException("Unexpected output from hg status -uS: " + line)
|
|
|
|
FDroidPopen(['rm', '-rf', line[2:]], cwd=self.local, output=False)
|
2012-01-23 15:15:40 +01:00
|
|
|
if not self.refreshed:
|
2014-12-14 15:32:20 +01:00
|
|
|
p = FDroidPopen(['hg', 'pull'], cwd=self.local, output=False)
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2014-07-02 15:30:05 +02:00
|
|
|
raise VCSException("Hg pull failed", p.output)
|
2012-01-23 15:15:40 +01:00
|
|
|
self.refreshed = True
|
2013-09-15 23:20:27 +02:00
|
|
|
|
2014-07-03 17:35:28 +02:00
|
|
|
rev = rev or 'default'
|
2013-11-01 19:12:22 +01:00
|
|
|
if not rev:
|
|
|
|
return
|
2014-12-14 15:32:20 +01:00
|
|
|
p = FDroidPopen(['hg', 'update', '-C', rev], cwd=self.local, output=False)
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2014-07-02 15:30:05 +02:00
|
|
|
raise VCSException("Hg checkout of '%s' failed" % rev, p.output)
|
2014-12-14 15:32:20 +01:00
|
|
|
p = FDroidPopen(['hg', 'purge', '--all'], cwd=self.local, output=False)
|
2014-01-15 16:40:27 +01:00
|
|
|
# Also delete untracked files, we have to enable purge extension for that:
|
2014-07-01 18:04:41 +02:00
|
|
|
if "'purge' is provided by the following extension" in p.output:
|
2014-07-18 12:21:52 +02:00
|
|
|
with open(os.path.join(self.local, '.hg', 'hgrc'), "a") as myfile:
|
2014-01-20 00:33:31 +01:00
|
|
|
myfile.write("\n[extensions]\nhgext.purge=\n")
|
2014-12-14 15:32:20 +01:00
|
|
|
p = FDroidPopen(['hg', 'purge', '--all'], cwd=self.local, output=False)
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2014-07-02 15:30:05 +02:00
|
|
|
raise VCSException("HG purge failed", p.output)
|
2014-01-20 16:14:00 +01:00
|
|
|
elif p.returncode != 0:
|
2014-07-02 15:30:05 +02:00
|
|
|
raise VCSException("HG purge failed", p.output)
|
2011-08-07 17:14:54 +02:00
|
|
|
|
2015-01-10 16:15:01 +01:00
|
|
|
def _gettags(self):
|
2014-12-14 15:32:20 +01:00
|
|
|
p = FDroidPopen(['hg', 'tags', '-q'], cwd=self.local, output=False)
|
2014-07-01 18:04:41 +02:00
|
|
|
return p.output.splitlines()[1:]
|
2013-09-06 20:33:47 +02:00
|
|
|
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2011-08-07 17:14:54 +02:00
|
|
|
class vcs_bzr(vcs):
|
|
|
|
|
2012-08-13 18:59:03 +02:00
|
|
|
def repotype(self):
|
|
|
|
return 'bzr'
|
|
|
|
|
|
|
|
def gotorevisionx(self, rev):
|
2012-01-23 15:15:40 +01:00
|
|
|
if not os.path.exists(self.local):
|
2014-12-14 15:32:20 +01:00
|
|
|
p = FDroidPopen(['bzr', 'branch', self.remote, self.local], output=False)
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2014-07-02 17:21:45 +02:00
|
|
|
self.clone_failed = True
|
2014-07-02 15:30:05 +02:00
|
|
|
raise VCSException("Bzr branch failed", p.output)
|
2011-08-07 17:14:54 +02:00
|
|
|
else:
|
2014-12-14 15:32:20 +01:00
|
|
|
p = FDroidPopen(['bzr', 'clean-tree', '--force', '--unknown', '--ignored'], cwd=self.local, output=False)
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2014-07-02 15:30:05 +02:00
|
|
|
raise VCSException("Bzr revert failed", p.output)
|
2012-01-23 15:15:40 +01:00
|
|
|
if not self.refreshed:
|
2014-12-14 15:32:20 +01:00
|
|
|
p = FDroidPopen(['bzr', 'pull'], cwd=self.local, output=False)
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2014-07-02 15:30:05 +02:00
|
|
|
raise VCSException("Bzr update failed", p.output)
|
2012-01-23 15:15:40 +01:00
|
|
|
self.refreshed = True
|
2013-09-15 23:20:27 +02:00
|
|
|
|
|
|
|
revargs = list(['-r', rev] if rev else [])
|
2014-12-14 15:32:20 +01:00
|
|
|
p = FDroidPopen(['bzr', 'revert'] + revargs, cwd=self.local, output=False)
|
2014-02-17 13:25:55 +01:00
|
|
|
if p.returncode != 0:
|
2014-07-02 15:30:05 +02:00
|
|
|
raise VCSException("Bzr revert of '%s' failed" % rev, p.output)
|
2011-08-07 17:14:54 +02:00
|
|
|
|
2015-01-10 16:15:01 +01:00
|
|
|
def _gettags(self):
|
2014-12-14 15:32:20 +01:00
|
|
|
p = FDroidPopen(['bzr', 'tags'], cwd=self.local, output=False)
|
2013-10-30 21:54:09 +01:00
|
|
|
return [tag.split(' ')[0].strip() for tag in
|
2014-07-01 18:04:41 +02:00
|
|
|
p.output.splitlines()]
|
2013-10-30 21:54:09 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2015-06-04 15:56:20 +02:00
|
|
|
def unescape_string(string):
|
|
|
|
if string[0] == '"' and string[-1] == '"':
|
|
|
|
return string[1:-1]
|
|
|
|
|
|
|
|
return string.replace("\\'", "'")
|
|
|
|
|
|
|
|
|
2014-02-23 20:09:42 +01:00
|
|
|
def retrieve_string(app_dir, string, xmlfiles=None):
|
2014-03-16 09:43:54 +01:00
|
|
|
|
2014-02-23 20:09:42 +01:00
|
|
|
if xmlfiles is None:
|
|
|
|
xmlfiles = []
|
2015-06-03 15:42:45 +02:00
|
|
|
for res_dir in [
|
|
|
|
os.path.join(app_dir, 'res'),
|
2015-06-03 15:43:55 +02:00
|
|
|
os.path.join(app_dir, 'src', 'main', 'res'),
|
2015-06-03 15:42:45 +02:00
|
|
|
]:
|
2014-05-02 04:16:32 +02:00
|
|
|
for r, d, f in os.walk(res_dir):
|
2014-07-18 12:21:52 +02:00
|
|
|
if os.path.basename(r) == 'values':
|
2014-05-02 04:16:32 +02:00
|
|
|
xmlfiles += [os.path.join(r, x) for x in f if x.endswith('.xml')]
|
2014-02-23 20:09:42 +01:00
|
|
|
|
2015-06-03 15:42:45 +02:00
|
|
|
if not string.startswith('@string/'):
|
2015-06-04 15:56:20 +02:00
|
|
|
return unescape_string(string)
|
2014-02-23 20:09:42 +01:00
|
|
|
|
2015-06-03 15:42:45 +02:00
|
|
|
name = string[len('@string/'):]
|
|
|
|
|
|
|
|
for path in xmlfiles:
|
|
|
|
if not os.path.isfile(path):
|
|
|
|
continue
|
|
|
|
xml = parse_xml(path)
|
2015-06-03 18:05:17 +02:00
|
|
|
element = xml.find('string[@name="' + name + '"]')
|
2015-06-07 14:02:50 +02:00
|
|
|
if element is not None and element.text is not None:
|
2015-06-03 18:42:24 +02:00
|
|
|
return retrieve_string(app_dir, element.text.encode('utf-8'), xmlfiles)
|
2013-12-02 15:09:59 +01:00
|
|
|
|
2015-06-03 15:42:45 +02:00
|
|
|
return ''
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2015-06-18 17:54:56 +02:00
|
|
|
def retrieve_string_singleline(app_dir, string, xmlfiles=None):
|
|
|
|
return retrieve_string(app_dir, string, xmlfiles).replace('\n', ' ').strip()
|
|
|
|
|
|
|
|
|
2013-08-13 15:58:43 +02:00
|
|
|
# Return list of existing files that will be used to find the highest vercode
|
2014-09-13 13:01:08 +02:00
|
|
|
def manifest_paths(app_dir, flavours):
|
2013-08-03 16:44:14 +02:00
|
|
|
|
2014-05-06 19:50:52 +02:00
|
|
|
possible_manifests = \
|
|
|
|
[os.path.join(app_dir, 'AndroidManifest.xml'),
|
|
|
|
os.path.join(app_dir, 'src', 'main', 'AndroidManifest.xml'),
|
|
|
|
os.path.join(app_dir, 'src', 'AndroidManifest.xml'),
|
|
|
|
os.path.join(app_dir, 'build.gradle')]
|
2013-08-09 17:15:27 +02:00
|
|
|
|
2014-09-13 13:04:24 +02:00
|
|
|
for flavour in flavours:
|
2014-09-15 17:03:45 +02:00
|
|
|
if flavour == 'yes':
|
|
|
|
continue
|
2014-09-13 13:04:24 +02:00
|
|
|
possible_manifests.append(
|
|
|
|
os.path.join(app_dir, 'src', flavour, 'AndroidManifest.xml'))
|
2013-12-30 17:04:16 +01:00
|
|
|
|
2013-08-13 12:02:48 +02:00
|
|
|
return [path for path in possible_manifests if os.path.isfile(path)]
|
2013-08-03 16:44:14 +02:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-03-16 23:12:37 +01:00
|
|
|
# Retrieve the package name. Returns the name, or None if not found.
|
2014-09-13 13:01:08 +02:00
|
|
|
def fetch_real_name(app_dir, flavours):
|
2015-06-03 14:35:50 +02:00
|
|
|
for path in manifest_paths(app_dir, flavours):
|
|
|
|
if not has_extension(path, 'xml') or not os.path.isfile(path):
|
2013-08-13 15:58:43 +02:00
|
|
|
continue
|
2015-06-03 14:35:50 +02:00
|
|
|
logging.debug("fetch_real_name: Checking manifest at " + path)
|
2015-06-03 15:42:45 +02:00
|
|
|
xml = parse_xml(path)
|
2015-06-03 14:35:50 +02:00
|
|
|
app = xml.find('application')
|
2015-06-03 18:30:31 +02:00
|
|
|
if "{http://schemas.android.com/apk/res/android}label" not in app.attrib:
|
|
|
|
continue
|
2015-06-03 18:42:24 +02:00
|
|
|
label = app.attrib["{http://schemas.android.com/apk/res/android}label"].encode('utf-8')
|
2015-06-18 17:54:56 +02:00
|
|
|
result = retrieve_string_singleline(app_dir, label)
|
2015-06-03 14:35:50 +02:00
|
|
|
if result:
|
|
|
|
result = result.strip()
|
|
|
|
return result
|
2014-03-16 23:12:37 +01:00
|
|
|
return None
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-02-10 23:03:02 +01:00
|
|
|
def get_library_references(root_dir):
|
|
|
|
libraries = []
|
2013-11-04 17:03:43 +01:00
|
|
|
proppath = os.path.join(root_dir, 'project.properties')
|
|
|
|
if not os.path.isfile(proppath):
|
2014-02-10 23:03:02 +01:00
|
|
|
return libraries
|
2015-04-03 00:14:32 +02:00
|
|
|
for line in file(proppath):
|
|
|
|
if not line.startswith('android.library.reference.'):
|
|
|
|
continue
|
|
|
|
path = line.split('=')[1].strip()
|
|
|
|
relpath = os.path.join(root_dir, path)
|
|
|
|
if not os.path.isdir(relpath):
|
|
|
|
continue
|
|
|
|
logging.debug("Found subproject at %s" % path)
|
|
|
|
libraries.append(path)
|
2014-02-10 23:03:02 +01:00
|
|
|
return libraries
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-02-10 23:03:02 +01:00
|
|
|
def ant_subprojects(root_dir):
|
|
|
|
subprojects = get_library_references(root_dir)
|
|
|
|
for subpath in subprojects:
|
|
|
|
subrelpath = os.path.join(root_dir, subpath)
|
|
|
|
for p in get_library_references(subrelpath):
|
2014-05-02 04:16:32 +02:00
|
|
|
relp = os.path.normpath(os.path.join(subpath, p))
|
2014-02-10 23:03:02 +01:00
|
|
|
if relp not in subprojects:
|
|
|
|
subprojects.insert(0, relp)
|
2013-11-04 17:03:43 +01:00
|
|
|
return subprojects
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-02-11 17:56:36 +01:00
|
|
|
def remove_debuggable_flags(root_dir):
|
|
|
|
# Remove forced debuggable flags
|
2014-07-05 15:25:39 +02:00
|
|
|
logging.debug("Removing debuggable flags from %s" % root_dir)
|
2014-02-11 17:56:36 +01:00
|
|
|
for root, dirs, files in os.walk(root_dir):
|
|
|
|
if 'AndroidManifest.xml' in files:
|
2015-07-30 22:13:12 +02:00
|
|
|
regsub_file(r'android:debuggable="[^"]*"',
|
|
|
|
'',
|
|
|
|
os.path.join(root, 'AndroidManifest.xml'))
|
2014-02-11 17:56:36 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2012-03-10 13:50:34 +01:00
|
|
|
# Extract some information from the AndroidManifest.xml at the given path.
|
|
|
|
# Returns (version, vercode, package), any or all of which might be None.
|
2012-08-23 15:25:39 +02:00
|
|
|
# All values returned are strings.
|
2014-05-13 21:04:22 +02:00
|
|
|
def parse_androidmanifests(paths, ignoreversions=None):
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2013-08-14 15:07:26 +02:00
|
|
|
if not paths:
|
|
|
|
return (None, None, None)
|
|
|
|
|
2014-04-12 01:00:59 +02:00
|
|
|
vcsearch_g = re.compile(r'.*versionCode *=* *["\']*([0-9]+)["\']*').search
|
|
|
|
vnsearch_g = re.compile(r'.*versionName *=* *(["\'])((?:(?=(\\?))\3.)*?)\1.*').search
|
|
|
|
psearch_g = re.compile(r'.*packageName *=* *["\']([^"]+)["\'].*').search
|
2013-08-13 12:02:48 +02:00
|
|
|
|
2014-05-13 21:04:22 +02:00
|
|
|
ignoresearch = re.compile(ignoreversions).search if ignoreversions else None
|
|
|
|
|
2013-08-13 12:02:48 +02:00
|
|
|
max_version = None
|
|
|
|
max_vercode = None
|
|
|
|
max_package = None
|
|
|
|
|
|
|
|
for path in paths:
|
|
|
|
|
2015-04-03 00:05:22 +02:00
|
|
|
if not os.path.isfile(path):
|
|
|
|
continue
|
|
|
|
|
2015-01-10 16:15:23 +01:00
|
|
|
logging.debug("Parsing manifest at {0}".format(path))
|
2013-12-30 11:33:37 +01:00
|
|
|
gradle = has_extension(path, 'gradle')
|
2014-06-20 11:10:52 +02:00
|
|
|
version = None
|
|
|
|
vercode = None
|
2013-08-13 15:25:47 +02:00
|
|
|
# Remember package name, may be defined separately from version+vercode
|
|
|
|
package = max_package
|
2013-08-13 12:02:48 +02:00
|
|
|
|
2015-06-03 15:23:18 +02:00
|
|
|
if gradle:
|
|
|
|
for line in file(path):
|
|
|
|
if not package:
|
2013-08-13 12:02:48 +02:00
|
|
|
matches = psearch_g(line)
|
2015-06-03 15:23:18 +02:00
|
|
|
if matches:
|
|
|
|
package = matches.group(1)
|
|
|
|
if not version:
|
2013-08-13 12:02:48 +02:00
|
|
|
matches = vnsearch_g(line)
|
2015-06-03 15:23:18 +02:00
|
|
|
if matches:
|
|
|
|
version = matches.group(2)
|
|
|
|
if not vercode:
|
2013-08-13 12:02:48 +02:00
|
|
|
matches = vcsearch_g(line)
|
2015-06-03 15:23:18 +02:00
|
|
|
if matches:
|
|
|
|
vercode = matches.group(1)
|
|
|
|
else:
|
2015-06-03 15:42:45 +02:00
|
|
|
xml = parse_xml(path)
|
2015-06-03 15:23:18 +02:00
|
|
|
if "package" in xml.attrib:
|
2015-06-03 18:42:24 +02:00
|
|
|
package = xml.attrib["package"].encode('utf-8')
|
2015-06-03 15:23:18 +02:00
|
|
|
if "{http://schemas.android.com/apk/res/android}versionName" in xml.attrib:
|
2015-06-03 18:42:24 +02:00
|
|
|
version = xml.attrib["{http://schemas.android.com/apk/res/android}versionName"].encode('utf-8')
|
2015-06-18 17:54:56 +02:00
|
|
|
base_dir = os.path.dirname(path)
|
|
|
|
version = retrieve_string_singleline(base_dir, version)
|
2015-06-03 15:23:18 +02:00
|
|
|
if "{http://schemas.android.com/apk/res/android}versionCode" in xml.attrib:
|
2015-06-03 19:40:43 +02:00
|
|
|
a = xml.attrib["{http://schemas.android.com/apk/res/android}versionCode"].encode('utf-8')
|
|
|
|
if string_is_integer(a):
|
|
|
|
vercode = a
|
2013-08-13 12:02:48 +02:00
|
|
|
|
2015-01-10 16:15:23 +01:00
|
|
|
logging.debug("..got package={0}, version={1}, vercode={2}"
|
|
|
|
.format(package, version, vercode))
|
|
|
|
|
2014-06-22 17:36:00 +02:00
|
|
|
# Always grab the package name and version name in case they are not
|
|
|
|
# together with the highest version code
|
|
|
|
if max_package is None and package is not None:
|
2013-08-13 15:25:47 +02:00
|
|
|
max_package = package
|
2014-06-22 17:36:00 +02:00
|
|
|
if max_version is None and version is not None:
|
|
|
|
max_version = version
|
2013-08-13 15:25:47 +02:00
|
|
|
|
2013-08-13 12:02:48 +02:00
|
|
|
if max_vercode is None or (vercode is not None and vercode > max_vercode):
|
2014-05-13 21:04:22 +02:00
|
|
|
if not ignoresearch or not ignoresearch(version):
|
2014-06-22 17:36:00 +02:00
|
|
|
if version is not None:
|
|
|
|
max_version = version
|
|
|
|
if vercode is not None:
|
|
|
|
max_vercode = vercode
|
|
|
|
if package is not None:
|
|
|
|
max_package = package
|
2014-05-13 21:04:22 +02:00
|
|
|
else:
|
|
|
|
max_version = "Ignore"
|
2013-08-13 12:02:48 +02:00
|
|
|
|
2013-08-15 16:01:33 +02:00
|
|
|
if max_version is None:
|
2014-06-20 11:10:52 +02:00
|
|
|
max_version = "Unknown"
|
2013-08-15 16:01:33 +02:00
|
|
|
|
2015-01-27 08:13:21 +01:00
|
|
|
if max_package and not is_valid_package_name(max_package):
|
2015-01-26 19:29:39 +01:00
|
|
|
raise FDroidException("Invalid package name {0}".format(max_package))
|
|
|
|
|
2013-08-13 12:02:48 +02:00
|
|
|
return (max_version, max_vercode, max_package)
|
2012-03-10 13:50:34 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2015-01-26 19:29:39 +01:00
|
|
|
def is_valid_package_name(name):
|
|
|
|
return re.match("[A-Za-z_][A-Za-z_0-9.]+$", name)
|
|
|
|
|
|
|
|
|
2014-07-03 13:59:36 +02:00
|
|
|
class FDroidException(Exception):
|
2014-12-31 16:42:26 +01:00
|
|
|
|
2014-05-02 04:27:58 +02:00
|
|
|
def __init__(self, value, detail=None):
|
2012-01-02 12:51:14 +01:00
|
|
|
self.value = value
|
2014-01-16 11:17:22 +01:00
|
|
|
self.detail = detail
|
2012-01-02 12:51:14 +01:00
|
|
|
|
2013-05-20 22:19:53 +02:00
|
|
|
def get_wikitext(self):
|
|
|
|
ret = repr(self.value) + "\n"
|
2014-01-16 11:17:22 +01:00
|
|
|
if self.detail:
|
|
|
|
ret += "=detail=\n"
|
2013-05-20 22:19:53 +02:00
|
|
|
ret += "<pre>\n"
|
2014-03-17 14:52:01 +01:00
|
|
|
txt = self.detail[-8192:] if len(self.detail) > 8192 else self.detail
|
|
|
|
ret += str(txt)
|
2013-05-20 22:19:53 +02:00
|
|
|
ret += "</pre>\n"
|
|
|
|
return ret
|
|
|
|
|
2012-01-02 12:51:14 +01:00
|
|
|
def __str__(self):
|
2014-05-20 23:14:19 +02:00
|
|
|
ret = self.value
|
2014-01-16 11:17:22 +01:00
|
|
|
if self.detail:
|
|
|
|
ret += "\n==== detail begin ====\n%s\n==== detail end ====" % self.detail.strip()
|
2012-01-08 19:13:15 +01:00
|
|
|
return ret
|
2012-01-02 12:51:14 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-07-03 13:59:36 +02:00
|
|
|
class VCSException(FDroidException):
|
2014-07-02 15:30:05 +02:00
|
|
|
pass
|
2012-01-02 12:51:14 +01:00
|
|
|
|
2014-07-02 15:30:05 +02:00
|
|
|
|
2014-07-03 13:59:36 +02:00
|
|
|
class BuildException(FDroidException):
|
2014-07-02 15:30:05 +02:00
|
|
|
pass
|
2012-01-02 12:51:14 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-05-20 13:16:06 +02:00
|
|
|
# Get the specified source library.
|
|
|
|
# Returns the path to it. Normally this is the path to be used when referencing
|
|
|
|
# it, which may be a subdirectory of the actual project. If you want the base
|
|
|
|
# directory of the project, pass 'basepath=True'.
|
2015-06-03 13:51:41 +02:00
|
|
|
def getsrclib(spec, srclib_dir, subdir=None, basepath=False,
|
2015-07-14 12:32:39 +02:00
|
|
|
raw=False, prepare=True, preponly=False, refresh=True):
|
2013-03-01 18:59:01 +01:00
|
|
|
|
2013-11-18 22:31:52 +01:00
|
|
|
number = None
|
|
|
|
subdir = None
|
2013-05-24 23:35:56 +02:00
|
|
|
if raw:
|
|
|
|
name = spec
|
|
|
|
ref = None
|
|
|
|
else:
|
|
|
|
name, ref = spec.split('@')
|
2013-11-18 22:31:52 +01:00
|
|
|
if ':' in name:
|
|
|
|
number, name = name.split(':', 1)
|
|
|
|
if '/' in name:
|
2014-05-02 04:16:32 +02:00
|
|
|
name, subdir = name.split('/', 1)
|
2013-03-01 20:39:30 +01:00
|
|
|
|
2014-05-28 09:33:14 +02:00
|
|
|
if name not in metadata.srclibs:
|
2014-07-03 13:53:54 +02:00
|
|
|
raise VCSException('srclib ' + name + ' not found.')
|
2013-03-15 16:29:29 +01:00
|
|
|
|
2014-05-20 23:44:47 +02:00
|
|
|
srclib = metadata.srclibs[name]
|
2013-04-07 20:39:53 +02:00
|
|
|
|
2013-05-20 13:34:03 +02:00
|
|
|
sdir = os.path.join(srclib_dir, name)
|
2013-04-23 21:11:10 +02:00
|
|
|
|
2013-06-04 23:42:18 +02:00
|
|
|
if not preponly:
|
2015-01-05 00:29:27 +01:00
|
|
|
vcs = getvcs(srclib["Repo Type"], srclib["Repo"], sdir)
|
2013-11-18 22:31:52 +01:00
|
|
|
vcs.srclib = (name, number, sdir)
|
2013-10-23 16:57:02 +02:00
|
|
|
if ref:
|
2015-07-14 12:32:39 +02:00
|
|
|
vcs.gotorevision(ref, refresh)
|
2013-06-04 23:42:18 +02:00
|
|
|
|
|
|
|
if raw:
|
|
|
|
return vcs
|
2013-05-24 23:35:56 +02:00
|
|
|
|
2013-05-20 13:16:06 +02:00
|
|
|
libdir = None
|
2013-12-06 12:15:13 +01:00
|
|
|
if subdir:
|
2013-11-16 12:54:23 +01:00
|
|
|
libdir = os.path.join(sdir, subdir)
|
2013-12-06 12:15:13 +01:00
|
|
|
elif srclib["Subdir"]:
|
2013-05-20 13:16:06 +02:00
|
|
|
for subdir in srclib["Subdir"]:
|
|
|
|
libdir_candidate = os.path.join(sdir, subdir)
|
|
|
|
if os.path.exists(libdir_candidate):
|
|
|
|
libdir = libdir_candidate
|
|
|
|
break
|
2013-04-28 19:52:27 +02:00
|
|
|
|
2013-05-20 13:16:06 +02:00
|
|
|
if libdir is None:
|
|
|
|
libdir = sdir
|
2013-05-03 16:53:37 +02:00
|
|
|
|
2014-02-11 17:56:36 +01:00
|
|
|
remove_signing_keys(sdir)
|
|
|
|
remove_debuggable_flags(sdir)
|
|
|
|
|
2013-06-04 23:42:18 +02:00
|
|
|
if prepare:
|
|
|
|
|
2013-12-06 12:15:13 +01:00
|
|
|
if srclib["Prepare"]:
|
2015-05-10 13:53:06 +02:00
|
|
|
cmd = replace_config_vars(srclib["Prepare"], None)
|
2013-06-09 23:15:46 +02:00
|
|
|
|
2013-10-16 23:31:02 +02:00
|
|
|
p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=libdir)
|
2013-06-04 23:42:18 +02:00
|
|
|
if p.returncode != 0:
|
2013-10-16 23:17:51 +02:00
|
|
|
raise BuildException("Error running prepare command for srclib %s"
|
2014-07-01 18:04:41 +02:00
|
|
|
% name, p.output)
|
2013-12-30 17:04:16 +01:00
|
|
|
|
2013-05-20 13:16:06 +02:00
|
|
|
if basepath:
|
2013-11-20 19:00:22 +01:00
|
|
|
libdir = sdir
|
|
|
|
|
|
|
|
return (name, number, libdir)
|
2012-01-28 01:05:30 +01:00
|
|
|
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2012-01-03 22:39:30 +01:00
|
|
|
# Prepare the source code for a particular build
|
2012-01-04 22:37:11 +01:00
|
|
|
# 'vcs' - the appropriate vcs object for the application
|
|
|
|
# 'app' - the application details from the metadata
|
|
|
|
# 'build' - the build details from the metadata
|
2012-01-27 23:10:08 +01:00
|
|
|
# 'build_dir' - the path to the build directory, usually
|
|
|
|
# 'build/app.id'
|
2013-05-20 13:34:03 +02:00
|
|
|
# 'srclib_dir' - the path to the source libraries directory, usually
|
|
|
|
# 'build/srclib'
|
2012-01-27 23:10:08 +01:00
|
|
|
# 'extlib_dir' - the path to the external libraries directory, usually
|
|
|
|
# 'build/extlib'
|
2013-03-20 10:30:56 +01:00
|
|
|
# Returns the (root, srclibpaths) where:
|
|
|
|
# 'root' is the root directory, which may be the same as 'build_dir' or may
|
|
|
|
# be a subdirectory of it.
|
|
|
|
# 'srclibpaths' is information on the srclibs being used
|
2015-07-14 12:32:39 +02:00
|
|
|
def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=False, refresh=True):
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2014-01-27 15:59:40 +01:00
|
|
|
# Optionally, the actual app source can be in a subdirectory
|
2014-05-31 23:10:16 +02:00
|
|
|
if build['subdir']:
|
2012-01-03 22:39:30 +01:00
|
|
|
root_dir = os.path.join(build_dir, build['subdir'])
|
|
|
|
else:
|
|
|
|
root_dir = build_dir
|
|
|
|
|
2014-01-27 15:59:40 +01:00
|
|
|
# Get a working copy of the right revision
|
|
|
|
logging.info("Getting source for revision " + build['commit'])
|
2015-07-14 12:32:39 +02:00
|
|
|
vcs.gotorevision(build['commit'], refresh)
|
2012-01-03 22:39:30 +01:00
|
|
|
|
2015-06-29 04:37:28 +02:00
|
|
|
# Initialise submodules if required
|
2013-11-09 12:21:43 +01:00
|
|
|
if build['submodules']:
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.info("Initialising submodules")
|
2012-01-03 22:39:30 +01:00
|
|
|
vcs.initsubmodules()
|
|
|
|
|
2014-02-09 19:11:15 +01:00
|
|
|
# Check that a subdir (if we're using one) exists. This has to happen
|
|
|
|
# after the checkout, since it might not exist elsewhere
|
|
|
|
if not os.path.exists(root_dir):
|
|
|
|
raise BuildException('Missing subdir ' + root_dir)
|
|
|
|
|
2014-01-27 15:59:40 +01:00
|
|
|
# Run an init command if one is required
|
2014-05-31 23:10:16 +02:00
|
|
|
if build['init']:
|
2015-05-10 13:53:06 +02:00
|
|
|
cmd = replace_config_vars(build['init'], build)
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.info("Running 'init' commands in %s" % root_dir)
|
2013-10-09 23:36:24 +02:00
|
|
|
|
2013-11-01 12:10:57 +01:00
|
|
|
p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=root_dir)
|
2013-10-09 23:36:24 +02:00
|
|
|
if p.returncode != 0:
|
|
|
|
raise BuildException("Error running init command for %s:%s" %
|
2014-07-01 18:04:41 +02:00
|
|
|
(app['id'], build['version']), p.output)
|
2012-02-04 22:19:07 +01:00
|
|
|
|
2014-01-23 10:29:04 +01:00
|
|
|
# Apply patches if any
|
2014-05-31 23:10:16 +02:00
|
|
|
if build['patch']:
|
|
|
|
logging.info("Applying patches")
|
2014-02-12 11:13:20 +01:00
|
|
|
for patch in build['patch']:
|
2014-01-23 10:29:04 +01:00
|
|
|
patch = patch.strip()
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.info("Applying " + patch)
|
2014-01-23 10:29:04 +01:00
|
|
|
patch_path = os.path.join('metadata', app['id'], patch)
|
2014-02-17 13:25:55 +01:00
|
|
|
p = FDroidPopen(['patch', '-p1', '-i', os.path.abspath(patch_path)], cwd=build_dir)
|
|
|
|
if p.returncode != 0:
|
2014-01-23 10:29:04 +01:00
|
|
|
raise BuildException("Failed to apply patch %s" % patch_path)
|
|
|
|
|
2014-01-27 15:59:40 +01:00
|
|
|
# Get required source libraries
|
2014-01-23 10:29:04 +01:00
|
|
|
srclibpaths = []
|
2014-05-31 23:10:16 +02:00
|
|
|
if build['srclibs']:
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.info("Collecting source libraries")
|
2014-02-12 11:13:20 +01:00
|
|
|
for lib in build['srclibs']:
|
2015-07-14 12:32:39 +02:00
|
|
|
srclibpaths.append(getsrclib(lib, srclib_dir, build, preponly=onserver, refresh=refresh))
|
2014-01-23 10:29:04 +01:00
|
|
|
|
|
|
|
for name, number, libpath in srclibpaths:
|
|
|
|
place_srclib(root_dir, int(number) if number else None, libpath)
|
|
|
|
|
|
|
|
basesrclib = vcs.getsrclib()
|
|
|
|
# If one was used for the main source, add that too.
|
|
|
|
if basesrclib:
|
|
|
|
srclibpaths.append(basesrclib)
|
|
|
|
|
2014-01-27 15:59:40 +01:00
|
|
|
# Update the local.properties file
|
2014-05-02 04:24:48 +02:00
|
|
|
localprops = [os.path.join(build_dir, 'local.properties')]
|
2014-05-31 23:10:16 +02:00
|
|
|
if build['subdir']:
|
2014-05-02 04:24:48 +02:00
|
|
|
localprops += [os.path.join(root_dir, 'local.properties')]
|
2014-01-21 10:14:37 +01:00
|
|
|
for path in localprops:
|
2014-09-25 18:11:56 +02:00
|
|
|
props = ""
|
|
|
|
if os.path.isfile(path):
|
|
|
|
logging.info("Updating local.properties file at %s" % path)
|
|
|
|
f = open(path, 'r')
|
|
|
|
props += f.read()
|
|
|
|
f.close()
|
|
|
|
props += '\n'
|
|
|
|
else:
|
|
|
|
logging.info("Creating local.properties file at %s" % path)
|
2014-01-21 10:14:37 +01:00
|
|
|
# Fix old-fashioned 'sdk-location' by copying
|
2014-01-27 15:59:40 +01:00
|
|
|
# from sdk.dir, if necessary
|
2014-01-21 10:14:37 +01:00
|
|
|
if build['oldsdkloc']:
|
|
|
|
sdkloc = re.match(r".*^sdk.dir=(\S+)$.*", props,
|
2014-05-06 19:50:52 +02:00
|
|
|
re.S | re.M).group(1)
|
2014-01-21 10:14:37 +01:00
|
|
|
props += "sdk-location=%s\n" % sdkloc
|
|
|
|
else:
|
|
|
|
props += "sdk.dir=%s\n" % config['sdk_path']
|
2014-02-10 18:26:33 +01:00
|
|
|
props += "sdk-location=%s\n" % config['sdk_path']
|
2015-01-03 00:02:54 +01:00
|
|
|
if build['ndk_path']:
|
2014-01-27 15:59:40 +01:00
|
|
|
# Add ndk location
|
2015-01-03 00:02:54 +01:00
|
|
|
props += "ndk.dir=%s\n" % build['ndk_path']
|
|
|
|
props += "ndk-location=%s\n" % build['ndk_path']
|
2014-01-27 15:59:40 +01:00
|
|
|
# Add java.encoding if necessary
|
2014-05-31 23:10:16 +02:00
|
|
|
if build['encoding']:
|
2014-01-21 10:14:37 +01:00
|
|
|
props += "java.encoding=%s\n" % build['encoding']
|
|
|
|
f = open(path, 'w')
|
|
|
|
f.write(props)
|
|
|
|
f.close()
|
|
|
|
|
2014-09-13 13:04:24 +02:00
|
|
|
flavours = []
|
2014-01-10 20:39:39 +01:00
|
|
|
if build['type'] == 'gradle':
|
2014-09-13 13:01:08 +02:00
|
|
|
flavours = build['gradle']
|
2013-10-30 17:17:44 +01:00
|
|
|
|
2015-05-08 00:58:52 +02:00
|
|
|
version_regex = re.compile(r"[^/]*'com\.android\.tools\.build:gradle:([^\.]+\.[^\.]+).*'.*")
|
2014-06-12 10:00:46 +02:00
|
|
|
gradlepluginver = None
|
|
|
|
|
2015-04-06 17:42:26 +02:00
|
|
|
gradle_dirs = [root_dir]
|
2014-06-19 12:41:34 +02:00
|
|
|
|
|
|
|
# Parent dir build.gradle
|
|
|
|
parent_dir = os.path.normpath(os.path.join(root_dir, '..'))
|
|
|
|
if parent_dir.startswith(build_dir):
|
2015-04-06 17:42:26 +02:00
|
|
|
gradle_dirs.append(parent_dir)
|
2014-06-19 12:41:34 +02:00
|
|
|
|
2015-04-06 17:42:26 +02:00
|
|
|
for dir_path in gradle_dirs:
|
2014-06-19 12:41:34 +02:00
|
|
|
if gradlepluginver:
|
|
|
|
break
|
2015-04-06 17:42:26 +02:00
|
|
|
if not os.path.isdir(dir_path):
|
2014-06-19 12:41:34 +02:00
|
|
|
continue
|
2015-04-06 17:42:26 +02:00
|
|
|
for filename in os.listdir(dir_path):
|
|
|
|
if not filename.endswith('.gradle'):
|
|
|
|
continue
|
|
|
|
path = os.path.join(dir_path, filename)
|
2015-04-17 13:02:47 +02:00
|
|
|
if not os.path.isfile(path):
|
|
|
|
continue
|
2015-04-06 17:42:26 +02:00
|
|
|
for line in file(path):
|
2014-06-12 10:00:46 +02:00
|
|
|
match = version_regex.match(line)
|
|
|
|
if match:
|
|
|
|
gradlepluginver = match.group(1)
|
|
|
|
break
|
|
|
|
|
2014-06-19 12:41:34 +02:00
|
|
|
if gradlepluginver:
|
|
|
|
build['gradlepluginver'] = LooseVersion(gradlepluginver)
|
|
|
|
else:
|
|
|
|
logging.warn("Could not fetch the gradle plugin version, defaulting to 0.11")
|
|
|
|
build['gradlepluginver'] = LooseVersion('0.11')
|
2014-06-12 10:00:46 +02:00
|
|
|
|
2014-05-31 23:10:16 +02:00
|
|
|
if build['target']:
|
2014-01-28 14:14:18 +01:00
|
|
|
n = build["target"].split('-')[1]
|
2015-07-30 22:13:12 +02:00
|
|
|
regsub_file(r'compileSdkVersion[ =]+[0-9]+',
|
|
|
|
r'compileSdkVersion %s' % n,
|
|
|
|
os.path.join(root_dir, 'build.gradle'))
|
2014-01-28 14:01:32 +01:00
|
|
|
|
2013-10-30 17:17:44 +01:00
|
|
|
# Remove forced debuggable flags
|
2014-02-11 17:56:36 +01:00
|
|
|
remove_debuggable_flags(root_dir)
|
2013-10-30 17:17:44 +01:00
|
|
|
|
2014-01-27 15:59:40 +01:00
|
|
|
# Insert version code and number into the manifest if necessary
|
2013-11-09 12:21:43 +01:00
|
|
|
if build['forceversion']:
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.info("Changing the version name")
|
2014-09-13 13:01:08 +02:00
|
|
|
for path in manifest_paths(root_dir, flavours):
|
2013-10-20 13:43:15 +02:00
|
|
|
if not os.path.isfile(path):
|
|
|
|
continue
|
2013-12-30 11:33:37 +01:00
|
|
|
if has_extension(path, 'xml'):
|
2015-07-30 22:13:12 +02:00
|
|
|
regsub_file(r'android:versionName="[^"]*"',
|
|
|
|
r'android:versionName="%s"' % build['version'],
|
|
|
|
path)
|
2013-12-30 11:33:37 +01:00
|
|
|
elif has_extension(path, 'gradle'):
|
2015-07-30 22:13:12 +02:00
|
|
|
regsub_file(r"""(\s*)versionName[\s'"=]+.*""",
|
|
|
|
r"""\1versionName '%s'""" % build['version'],
|
|
|
|
path)
|
|
|
|
|
2013-11-09 12:21:43 +01:00
|
|
|
if build['forcevercode']:
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.info("Changing the version code")
|
2014-09-13 13:01:08 +02:00
|
|
|
for path in manifest_paths(root_dir, flavours):
|
2013-10-20 13:43:15 +02:00
|
|
|
if not os.path.isfile(path):
|
|
|
|
continue
|
2013-12-30 11:33:37 +01:00
|
|
|
if has_extension(path, 'xml'):
|
2015-07-30 22:13:12 +02:00
|
|
|
regsub_file(r'android:versionCode="[^"]*"',
|
|
|
|
r'android:versionCode="%s"' % build['vercode'],
|
|
|
|
path)
|
2013-12-30 11:33:37 +01:00
|
|
|
elif has_extension(path, 'gradle'):
|
2015-07-30 22:13:12 +02:00
|
|
|
regsub_file(r'versionCode[ =]+[0-9]+',
|
|
|
|
r'versionCode %s' % build['vercode'],
|
|
|
|
path)
|
2012-01-03 22:39:30 +01:00
|
|
|
|
2014-01-27 15:59:40 +01:00
|
|
|
# Delete unwanted files
|
2014-05-31 23:10:16 +02:00
|
|
|
if build['rm']:
|
|
|
|
logging.info("Removing specified files")
|
2014-04-15 23:58:12 +02:00
|
|
|
for part in getpaths(build_dir, build, 'rm'):
|
2014-02-17 14:59:55 +01:00
|
|
|
dest = os.path.join(build_dir, part)
|
|
|
|
logging.info("Removing {0}".format(part))
|
|
|
|
if os.path.lexists(dest):
|
|
|
|
if os.path.islink(dest):
|
2015-01-26 19:14:29 +01:00
|
|
|
FDroidPopen(['unlink', dest], output=False)
|
2013-11-24 11:29:28 +01:00
|
|
|
else:
|
2015-01-26 19:14:29 +01:00
|
|
|
FDroidPopen(['rm', '-rf', dest], output=False)
|
2013-11-24 11:29:28 +01:00
|
|
|
else:
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.info("...but it didn't exist")
|
2012-01-03 22:39:30 +01:00
|
|
|
|
2013-11-24 10:39:12 +01:00
|
|
|
remove_signing_keys(build_dir)
|
|
|
|
|
2014-01-27 15:59:40 +01:00
|
|
|
# Add required external libraries
|
2014-05-31 23:10:16 +02:00
|
|
|
if build['extlibs']:
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.info("Collecting prebuilt libraries")
|
2012-01-27 23:10:08 +01:00
|
|
|
libsdir = os.path.join(root_dir, 'libs')
|
|
|
|
if not os.path.exists(libsdir):
|
|
|
|
os.mkdir(libsdir)
|
2014-02-12 11:13:20 +01:00
|
|
|
for lib in build['extlibs']:
|
2013-09-11 13:45:02 +02:00
|
|
|
lib = lib.strip()
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.info("...installing extlib {0}".format(lib))
|
2012-01-27 23:10:08 +01:00
|
|
|
libf = os.path.basename(lib)
|
2013-11-12 21:14:16 +01:00
|
|
|
libsrc = os.path.join(extlib_dir, lib)
|
|
|
|
if not os.path.exists(libsrc):
|
|
|
|
raise BuildException("Missing extlib file {0}".format(libsrc))
|
|
|
|
shutil.copyfile(libsrc, os.path.join(libsdir, libf))
|
2012-01-27 23:10:08 +01:00
|
|
|
|
2014-01-27 15:59:40 +01:00
|
|
|
# Run a pre-build command if one is required
|
2014-05-31 23:10:16 +02:00
|
|
|
if build['prebuild']:
|
|
|
|
logging.info("Running 'prebuild' commands in %s" % root_dir)
|
|
|
|
|
2015-05-10 13:53:06 +02:00
|
|
|
cmd = replace_config_vars(build['prebuild'], build)
|
2013-08-26 23:52:04 +02:00
|
|
|
|
2014-01-27 15:59:40 +01:00
|
|
|
# Substitute source library paths into prebuild commands
|
2013-11-15 20:42:17 +01:00
|
|
|
for name, number, libpath in srclibpaths:
|
2012-01-28 01:05:30 +01:00
|
|
|
libpath = os.path.relpath(libpath, root_dir)
|
2013-10-09 23:36:24 +02:00
|
|
|
cmd = cmd.replace('$$' + name + '$$', libpath)
|
2013-11-08 20:44:27 +01:00
|
|
|
|
2013-11-01 12:10:57 +01:00
|
|
|
p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=root_dir)
|
2012-09-24 15:06:15 +02:00
|
|
|
if p.returncode != 0:
|
2013-10-09 23:36:24 +02:00
|
|
|
raise BuildException("Error running prebuild command for %s:%s" %
|
2014-07-01 18:04:41 +02:00
|
|
|
(app['id'], build['version']), p.output)
|
2012-01-03 22:39:30 +01:00
|
|
|
|
2014-02-11 16:30:49 +01:00
|
|
|
# Generate (or update) the ant build file, build.xml...
|
2014-05-31 23:10:16 +02:00
|
|
|
if build['update'] and build['update'] != ['no'] and build['type'] == 'ant':
|
2014-12-09 15:15:36 +01:00
|
|
|
parms = ['android', 'update', 'lib-project']
|
|
|
|
lparms = ['android', 'update', 'project']
|
2014-02-11 16:30:49 +01:00
|
|
|
|
2014-05-31 23:10:16 +02:00
|
|
|
if build['target']:
|
2014-02-11 16:30:49 +01:00
|
|
|
parms += ['-t', build['target']]
|
|
|
|
lparms += ['-t', build['target']]
|
2014-05-31 23:10:16 +02:00
|
|
|
if build['update'] == ['auto']:
|
2014-02-11 16:30:49 +01:00
|
|
|
update_dirs = ant_subprojects(root_dir) + ['.']
|
|
|
|
else:
|
2014-05-31 23:10:16 +02:00
|
|
|
update_dirs = build['update']
|
2014-02-11 16:30:49 +01:00
|
|
|
|
|
|
|
for d in update_dirs:
|
|
|
|
subdir = os.path.join(root_dir, d)
|
|
|
|
if d == '.':
|
2014-07-05 15:25:39 +02:00
|
|
|
logging.debug("Updating main project")
|
2014-02-11 16:30:49 +01:00
|
|
|
cmd = parms + ['-p', d]
|
|
|
|
else:
|
2014-07-05 15:25:39 +02:00
|
|
|
logging.debug("Updating subproject %s" % d)
|
2014-02-11 16:30:49 +01:00
|
|
|
cmd = lparms + ['-p', d]
|
2014-12-09 15:15:36 +01:00
|
|
|
p = SdkToolsPopen(cmd, cwd=root_dir)
|
2014-02-11 16:30:49 +01:00
|
|
|
# Check to see whether an error was returned without a proper exit
|
|
|
|
# code (this is the case for the 'no target set or target invalid'
|
|
|
|
# error)
|
2014-07-01 18:04:41 +02:00
|
|
|
if p.returncode != 0 or p.output.startswith("Error: "):
|
|
|
|
raise BuildException("Failed to update project at %s" % d, p.output)
|
2014-02-11 16:30:49 +01:00
|
|
|
# Clean update dirs via ant
|
2014-02-13 09:19:26 +01:00
|
|
|
if d != '.':
|
2014-02-11 16:30:49 +01:00
|
|
|
logging.info("Cleaning subproject %s" % d)
|
2014-02-13 09:19:26 +01:00
|
|
|
p = FDroidPopen(['ant', 'clean'], cwd=subdir)
|
2014-02-11 16:30:49 +01:00
|
|
|
|
2013-03-20 10:30:56 +01:00
|
|
|
return (root_dir, srclibpaths)
|
2012-01-03 22:39:30 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2014-04-15 23:53:44 +02:00
|
|
|
# Split and extend via globbing the paths from a field
|
|
|
|
def getpaths(build_dir, build, field):
|
|
|
|
paths = []
|
|
|
|
for p in build[field]:
|
|
|
|
p = p.strip()
|
|
|
|
full_path = os.path.join(build_dir, p)
|
|
|
|
full_path = os.path.normpath(full_path)
|
2014-05-06 19:56:44 +02:00
|
|
|
paths += [r[len(build_dir) + 1:] for r in glob.glob(full_path)]
|
2014-04-15 23:53:44 +02:00
|
|
|
return paths
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2015-07-31 15:50:20 +02:00
|
|
|
def get_mime_type(path):
|
|
|
|
'''
|
|
|
|
There are two incompatible versions of the 'magic' module, one
|
|
|
|
that comes as part of libmagic, which is what Debian includes as
|
|
|
|
python-magic, then another called python-magic that is a separate
|
|
|
|
project that wraps libmagic. The second is 'magic' on pypi, so
|
|
|
|
both need to be supported. Then on platforms where libmagic is
|
|
|
|
not easily included, e.g. OSX and Windows, fallback to the
|
|
|
|
built-in 'mimetypes' module so this will work without
|
|
|
|
libmagic. Hence this function with the following hacks:
|
|
|
|
'''
|
|
|
|
|
2015-08-13 19:16:28 +02:00
|
|
|
ms = None
|
2015-07-31 15:50:20 +02:00
|
|
|
try:
|
|
|
|
import magic
|
|
|
|
try:
|
|
|
|
ms = magic.open(magic.MIME_TYPE)
|
|
|
|
ms.load()
|
2015-08-13 19:16:28 +02:00
|
|
|
result = magic.from_file(path, mime=True)
|
2015-07-31 15:50:20 +02:00
|
|
|
except AttributeError:
|
2015-08-13 19:16:28 +02:00
|
|
|
result = ms.file(path)
|
2015-07-31 15:50:20 +02:00
|
|
|
except UnicodeError:
|
|
|
|
logging.warn('Found malformed magic number at %s' % path)
|
2015-08-13 19:16:28 +02:00
|
|
|
result = None
|
2015-07-31 15:50:20 +02:00
|
|
|
except ImportError:
|
|
|
|
import mimetypes
|
|
|
|
mimetypes.init()
|
2015-08-13 19:16:28 +02:00
|
|
|
result = mimetypes.guess_type(path, strict=False)
|
|
|
|
if ms is not None:
|
|
|
|
ms.close()
|
|
|
|
return result
|
2015-07-31 15:50:20 +02:00
|
|
|
|
|
|
|
|
2012-02-02 23:13:31 +01:00
|
|
|
# Scan the source code in the given directory (and all subdirectories)
|
2014-04-03 16:04:06 +02:00
|
|
|
# and return the number of fatal problems encountered
|
2012-02-03 17:01:35 +01:00
|
|
|
def scan_source(build_dir, root_dir, thisbuild):
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2014-04-03 16:04:06 +02:00
|
|
|
count = 0
|
2012-02-02 23:13:31 +01:00
|
|
|
|
2013-10-09 10:43:02 +02:00
|
|
|
# Common known non-free blobs (always lower case):
|
2014-04-15 23:33:54 +02:00
|
|
|
usual_suspects = [
|
2015-07-05 23:26:41 +02:00
|
|
|
re.compile(r'.*flurryagent', re.IGNORECASE),
|
|
|
|
re.compile(r'.*paypal.*mpl', re.IGNORECASE),
|
|
|
|
re.compile(r'.*google.*analytics', re.IGNORECASE),
|
|
|
|
re.compile(r'.*admob.*sdk.*android', re.IGNORECASE),
|
|
|
|
re.compile(r'.*google.*ad.*view', re.IGNORECASE),
|
|
|
|
re.compile(r'.*google.*admob', re.IGNORECASE),
|
|
|
|
re.compile(r'.*google.*play.*services', re.IGNORECASE),
|
|
|
|
re.compile(r'.*crittercism', re.IGNORECASE),
|
|
|
|
re.compile(r'.*heyzap', re.IGNORECASE),
|
|
|
|
re.compile(r'.*jpct.*ae', re.IGNORECASE),
|
|
|
|
re.compile(r'.*youtube.*android.*player.*api', re.IGNORECASE),
|
|
|
|
re.compile(r'.*bugsense', re.IGNORECASE),
|
|
|
|
re.compile(r'.*crashlytics', re.IGNORECASE),
|
|
|
|
re.compile(r'.*ouya.*sdk', re.IGNORECASE),
|
|
|
|
re.compile(r'.*libspen23', re.IGNORECASE),
|
2014-12-31 16:42:26 +01:00
|
|
|
]
|
2012-03-06 20:50:19 +01:00
|
|
|
|
2014-04-15 23:53:44 +02:00
|
|
|
scanignore = getpaths(build_dir, thisbuild, 'scanignore')
|
|
|
|
scandelete = getpaths(build_dir, thisbuild, 'scandelete')
|
2013-11-01 13:46:19 +01:00
|
|
|
|
2015-01-06 14:37:13 +01:00
|
|
|
scanignore_worked = set()
|
|
|
|
scandelete_worked = set()
|
|
|
|
|
2013-11-01 13:46:19 +01:00
|
|
|
def toignore(fd):
|
2015-01-06 14:37:13 +01:00
|
|
|
for p in scanignore:
|
|
|
|
if fd.startswith(p):
|
|
|
|
scanignore_worked.add(p)
|
2013-11-01 13:46:19 +01:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def todelete(fd):
|
2015-01-06 14:37:13 +01:00
|
|
|
for p in scandelete:
|
|
|
|
if fd.startswith(p):
|
|
|
|
scandelete_worked.add(p)
|
2013-11-01 13:46:19 +01:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2015-01-06 14:37:13 +01:00
|
|
|
def ignoreproblem(what, fd, fp):
|
|
|
|
logging.info('Ignoring %s at %s' % (what, fd))
|
|
|
|
return 0
|
|
|
|
|
2013-11-01 13:46:19 +01:00
|
|
|
def removeproblem(what, fd, fp):
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.info('Removing %s at %s' % (what, fd))
|
2013-11-01 13:46:19 +01:00
|
|
|
os.remove(fp)
|
2015-01-06 14:37:13 +01:00
|
|
|
return 0
|
2013-12-30 17:04:16 +01:00
|
|
|
|
2014-04-03 16:04:06 +02:00
|
|
|
def warnproblem(what, fd):
|
|
|
|
logging.warn('Found %s at %s' % (what, fd))
|
|
|
|
|
2013-11-01 13:46:19 +01:00
|
|
|
def handleproblem(what, fd, fp):
|
2014-08-21 20:02:52 +02:00
|
|
|
if toignore(fd):
|
2015-01-06 14:37:13 +01:00
|
|
|
return ignoreproblem(what, fd, fp)
|
|
|
|
if todelete(fd):
|
|
|
|
return removeproblem(what, fd, fp)
|
|
|
|
logging.error('Found %s at %s' % (what, fd))
|
|
|
|
return 1
|
2013-12-30 11:25:15 +01:00
|
|
|
|
2014-01-27 15:59:40 +01:00
|
|
|
# Iterate through all files in the source code
|
2014-07-09 19:11:13 +02:00
|
|
|
for r, d, f in os.walk(build_dir, topdown=True):
|
2013-10-16 22:50:07 +02:00
|
|
|
|
2014-07-09 19:11:13 +02:00
|
|
|
# It's topdown, so checking the basename is enough
|
|
|
|
for ignoredir in ('.hg', '.git', '.svn', '.bzr'):
|
|
|
|
if ignoredir in d:
|
|
|
|
d.remove(ignoredir)
|
2014-01-30 00:16:03 +01:00
|
|
|
|
|
|
|
for curfile in f:
|
2013-10-16 22:50:07 +02:00
|
|
|
|
2014-01-27 15:59:40 +01:00
|
|
|
# Path (relative) to the file
|
2013-10-16 22:50:07 +02:00
|
|
|
fp = os.path.join(r, curfile)
|
2014-05-06 19:56:44 +02:00
|
|
|
fd = fp[len(build_dir) + 1:]
|
2013-10-16 22:50:07 +02:00
|
|
|
|
2015-07-31 15:50:20 +02:00
|
|
|
mime = get_mime_type(fp)
|
2014-04-15 23:33:54 +02:00
|
|
|
|
2013-10-16 22:50:07 +02:00
|
|
|
if mime == 'application/x-sharedlib':
|
2014-04-03 16:04:06 +02:00
|
|
|
count += handleproblem('shared library', fd, fp)
|
2014-04-15 23:33:54 +02:00
|
|
|
|
2013-10-16 22:50:07 +02:00
|
|
|
elif mime == 'application/x-archive':
|
2014-04-03 16:04:06 +02:00
|
|
|
count += handleproblem('static library', fd, fp)
|
2014-04-15 23:33:54 +02:00
|
|
|
|
2015-07-31 15:50:20 +02:00
|
|
|
elif mime == 'application/x-executable' or mime == 'application/x-mach-binary':
|
2014-04-12 00:14:05 +02:00
|
|
|
count += handleproblem('binary executable', fd, fp)
|
2014-04-15 23:33:54 +02:00
|
|
|
|
2014-02-18 08:32:16 +01:00
|
|
|
elif mime == 'application/x-java-applet':
|
2014-04-12 00:14:05 +02:00
|
|
|
count += handleproblem('Java compiled class', fd, fp)
|
2014-04-15 23:33:54 +02:00
|
|
|
|
|
|
|
elif mime in (
|
2014-04-15 17:50:08 +02:00
|
|
|
'application/jar',
|
2014-03-16 23:38:00 +01:00
|
|
|
'application/zip',
|
|
|
|
'application/java-archive',
|
2014-04-15 23:33:54 +02:00
|
|
|
'application/octet-stream',
|
2015-01-20 18:01:29 +01:00
|
|
|
'binary', ):
|
2014-04-15 23:33:54 +02:00
|
|
|
|
|
|
|
if has_extension(fp, 'apk'):
|
|
|
|
removeproblem('APK file', fd, fp)
|
|
|
|
|
|
|
|
elif has_extension(fp, 'jar'):
|
|
|
|
|
|
|
|
if any(suspect.match(curfile) for suspect in usual_suspects):
|
|
|
|
count += handleproblem('usual supect', fd, fp)
|
|
|
|
else:
|
|
|
|
warnproblem('JAR file', fd)
|
|
|
|
|
|
|
|
elif has_extension(fp, 'zip'):
|
|
|
|
warnproblem('ZIP file', fd)
|
|
|
|
|
|
|
|
else:
|
|
|
|
warnproblem('unknown compressed or binary file', fd)
|
2013-10-16 22:50:07 +02:00
|
|
|
|
2015-07-05 23:26:41 +02:00
|
|
|
elif has_extension(fp, 'java'):
|
2015-04-17 13:02:47 +02:00
|
|
|
if not os.path.isfile(fp):
|
|
|
|
continue
|
2013-10-16 22:50:07 +02:00
|
|
|
for line in file(fp):
|
|
|
|
if 'DexClassLoader' in line:
|
2014-04-03 16:04:06 +02:00
|
|
|
count += handleproblem('DexClassLoader', fd, fp)
|
2013-10-15 23:24:20 +02:00
|
|
|
break
|
2015-07-05 23:26:41 +02:00
|
|
|
|
|
|
|
elif has_extension(fp, 'gradle'):
|
|
|
|
if not os.path.isfile(fp):
|
|
|
|
continue
|
|
|
|
for i, line in enumerate(file(fp)):
|
|
|
|
if any(suspect.match(line) for suspect in usual_suspects):
|
|
|
|
count += handleproblem('usual suspect at line %d' % i, fd, fp)
|
|
|
|
break
|
2012-03-06 20:54:37 +01:00
|
|
|
|
2015-01-06 14:37:13 +01:00
|
|
|
for p in scanignore:
|
|
|
|
if p not in scanignore_worked:
|
|
|
|
logging.error('Unused scanignore path: %s' % p)
|
|
|
|
count += 1
|
|
|
|
|
|
|
|
for p in scandelete:
|
|
|
|
if p not in scandelete_worked:
|
|
|
|
logging.error('Unused scandelete path: %s' % p)
|
|
|
|
count += 1
|
|
|
|
|
2012-02-02 23:13:31 +01:00
|
|
|
# Presence of a jni directory without buildjni=yes might
|
2014-01-27 15:59:40 +01:00
|
|
|
# indicate a problem (if it's not a problem, explicitly use
|
2012-08-13 18:59:03 +02:00
|
|
|
# buildjni=no to bypass this check)
|
2013-12-30 17:04:16 +01:00
|
|
|
if (os.path.exists(os.path.join(root_dir, 'jni')) and
|
2014-05-31 23:10:16 +02:00
|
|
|
not thisbuild['buildjni']):
|
2014-06-24 22:31:39 +02:00
|
|
|
logging.error('Found jni directory, but buildjni is not enabled. Set it to \'no\' to ignore.')
|
2014-04-03 16:04:06 +02:00
|
|
|
count += 1
|
2012-02-02 23:13:31 +01:00
|
|
|
|
2014-04-03 16:04:06 +02:00
|
|
|
return count
|
2012-02-02 23:13:31 +01:00
|
|
|
|
2013-03-27 00:25:41 +01:00
|
|
|
|
2012-01-17 18:25:28 +01:00
|
|
|
class KnownApks:
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.path = os.path.join('stats', 'known_apks.txt')
|
|
|
|
self.apks = {}
|
2015-04-03 00:05:22 +02:00
|
|
|
if os.path.isfile(self.path):
|
2014-05-02 04:21:47 +02:00
|
|
|
for line in file(self.path):
|
2012-01-17 18:25:28 +01:00
|
|
|
t = line.rstrip().split(' ')
|
2012-01-20 00:03:35 +01:00
|
|
|
if len(t) == 2:
|
|
|
|
self.apks[t[0]] = (t[1], None)
|
|
|
|
else:
|
|
|
|
self.apks[t[0]] = (t[1], time.strptime(t[2], '%Y-%m-%d'))
|
2012-01-17 18:25:28 +01:00
|
|
|
self.changed = False
|
|
|
|
|
|
|
|
def writeifchanged(self):
|
|
|
|
if self.changed:
|
|
|
|
if not os.path.exists('stats'):
|
|
|
|
os.mkdir('stats')
|
|
|
|
f = open(self.path, 'w')
|
2012-01-19 15:14:14 +01:00
|
|
|
lst = []
|
2012-01-17 18:25:28 +01:00
|
|
|
for apk, app in self.apks.iteritems():
|
2012-01-20 00:03:35 +01:00
|
|
|
appid, added = app
|
|
|
|
line = apk + ' ' + appid
|
|
|
|
if added:
|
|
|
|
line += ' ' + time.strftime('%Y-%m-%d', added)
|
|
|
|
lst.append(line)
|
2012-01-19 15:14:14 +01:00
|
|
|
for line in sorted(lst):
|
|
|
|
f.write(line + '\n')
|
2012-01-17 18:25:28 +01:00
|
|
|
f.close()
|
|
|
|
|
2012-07-12 22:48:59 +02:00
|
|
|
# Record an apk (if it's new, otherwise does nothing)
|
|
|
|
# Returns the date it was added.
|
2012-01-17 18:25:28 +01:00
|
|
|
def recordapk(self, apk, app):
|
2014-05-28 09:33:14 +02:00
|
|
|
if apk not in self.apks:
|
2012-01-20 00:03:35 +01:00
|
|
|
self.apks[apk] = (app, time.gmtime(time.time()))
|
2012-01-17 18:25:28 +01:00
|
|
|
self.changed = True
|
2012-07-12 22:48:59 +02:00
|
|
|
_, added = self.apks[apk]
|
|
|
|
return added
|
2012-01-17 18:25:28 +01:00
|
|
|
|
2012-07-12 22:48:59 +02:00
|
|
|
# Look up information - given the 'apkname', returns (app id, date added/None).
|
|
|
|
# Or returns None for an unknown apk.
|
2012-01-17 18:25:28 +01:00
|
|
|
def getapp(self, apkname):
|
|
|
|
if apkname in self.apks:
|
|
|
|
return self.apks[apkname]
|
|
|
|
return None
|
2012-01-22 15:03:56 +01:00
|
|
|
|
2012-07-12 22:48:59 +02:00
|
|
|
# Get the most recent 'num' apps added to the repo, as a list of package ids
|
|
|
|
# with the most recent first.
|
2012-01-22 15:03:56 +01:00
|
|
|
def getlatest(self, num):
|
|
|
|
apps = {}
|
|
|
|
for apk, app in self.apks.iteritems():
|
|
|
|
appid, added = app
|
|
|
|
if added:
|
|
|
|
if appid in apps:
|
|
|
|
if apps[appid] > added:
|
|
|
|
apps[appid] = added
|
|
|
|
else:
|
|
|
|
apps[appid] = added
|
|
|
|
sortedapps = sorted(apps.iteritems(), key=operator.itemgetter(1))[-num:]
|
2014-05-02 04:16:32 +02:00
|
|
|
lst = [app for app, _ in sortedapps]
|
2012-01-26 22:36:23 +01:00
|
|
|
lst.reverse()
|
2012-01-22 15:03:56 +01:00
|
|
|
return lst
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-10-31 16:37:39 +01:00
|
|
|
def isApkDebuggable(apkfile, config):
|
2013-04-15 16:07:23 +02:00
|
|
|
"""Returns True if the given apk file is debuggable
|
|
|
|
|
2013-07-31 19:35:57 +02:00
|
|
|
:param apkfile: full path to the apk to check"""
|
2013-04-15 14:04:13 +02:00
|
|
|
|
2014-12-31 16:34:11 +01:00
|
|
|
p = SdkToolsPopen(['aapt', 'dump', 'xmltree', apkfile, 'AndroidManifest.xml'],
|
|
|
|
output=False)
|
2013-04-15 14:04:13 +02:00
|
|
|
if p.returncode != 0:
|
2014-01-27 15:59:40 +01:00
|
|
|
logging.critical("Failed to get apk manifest information")
|
2013-04-15 14:04:13 +02:00
|
|
|
sys.exit(1)
|
2014-07-01 18:04:41 +02:00
|
|
|
for line in p.output.splitlines():
|
2013-12-20 09:34:03 +01:00
|
|
|
if 'android:debuggable' in line and not line.endswith('0x0'):
|
2013-04-15 14:04:13 +02:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2013-10-16 23:17:51 +02:00
|
|
|
class AsynchronousFileReader(threading.Thread):
|
2014-12-31 16:42:26 +01:00
|
|
|
|
2013-10-16 23:17:51 +02:00
|
|
|
'''
|
|
|
|
Helper class to implement asynchronous reading of a file
|
|
|
|
in a separate thread. Pushes read lines on a queue to
|
|
|
|
be consumed in another thread.
|
|
|
|
'''
|
2013-12-30 17:04:16 +01:00
|
|
|
|
2013-10-16 23:17:51 +02:00
|
|
|
def __init__(self, fd, queue):
|
|
|
|
assert isinstance(queue, Queue.Queue)
|
|
|
|
assert callable(fd.readline)
|
|
|
|
threading.Thread.__init__(self)
|
|
|
|
self._fd = fd
|
|
|
|
self._queue = queue
|
2013-12-30 17:04:16 +01:00
|
|
|
|
2013-10-16 23:17:51 +02:00
|
|
|
def run(self):
|
|
|
|
'''The body of the tread: read lines and put them on the queue.'''
|
|
|
|
for line in iter(self._fd.readline, ''):
|
|
|
|
self._queue.put(line)
|
2013-12-30 17:04:16 +01:00
|
|
|
|
2013-10-16 23:17:51 +02:00
|
|
|
def eof(self):
|
|
|
|
'''Check whether there is no more content to expect.'''
|
|
|
|
return not self.is_alive() and self._queue.empty()
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-10-16 23:17:51 +02:00
|
|
|
class PopenResult:
|
|
|
|
returncode = None
|
2014-07-01 18:04:41 +02:00
|
|
|
output = ''
|
2013-10-16 23:17:51 +02:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2015-01-26 19:14:29 +01:00
|
|
|
def SdkToolsPopen(commands, cwd=None, output=True):
|
2014-12-09 14:12:41 +01:00
|
|
|
cmd = commands[0]
|
|
|
|
if cmd not in config:
|
|
|
|
config[cmd] = find_sdk_tools_cmd(commands[0])
|
|
|
|
return FDroidPopen([config[cmd]] + commands[1:],
|
2015-01-26 19:14:29 +01:00
|
|
|
cwd=cwd, output=output)
|
2014-02-17 13:12:25 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2015-01-26 19:14:29 +01:00
|
|
|
def FDroidPopen(commands, cwd=None, output=True):
|
2014-02-17 13:12:25 +01:00
|
|
|
"""
|
|
|
|
Run a command and capture the possibly huge output.
|
|
|
|
|
|
|
|
:param commands: command and argument list like in subprocess.Popen
|
|
|
|
:param cwd: optionally specifies a working directory
|
|
|
|
:returns: A PopenResult.
|
|
|
|
"""
|
|
|
|
|
2014-07-01 21:03:50 +02:00
|
|
|
global env
|
|
|
|
|
2014-06-25 10:25:47 +02:00
|
|
|
if cwd:
|
|
|
|
cwd = os.path.normpath(cwd)
|
|
|
|
logging.debug("Directory: %s" % cwd)
|
|
|
|
logging.debug("> %s" % ' '.join(commands))
|
2014-02-16 00:27:19 +01:00
|
|
|
|
2013-10-16 23:17:51 +02:00
|
|
|
result = PopenResult()
|
2014-09-11 23:08:51 +02:00
|
|
|
p = None
|
|
|
|
try:
|
2015-01-26 19:14:29 +01:00
|
|
|
p = subprocess.Popen(commands, cwd=cwd, shell=False, env=env,
|
2014-09-11 23:08:51 +02:00
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
|
|
|
except OSError, e:
|
2015-01-26 19:14:29 +01:00
|
|
|
raise BuildException("OSError while trying to execute " +
|
|
|
|
' '.join(commands) + ': ' + str(e))
|
2013-12-30 17:04:16 +01:00
|
|
|
|
2013-10-16 23:17:51 +02:00
|
|
|
stdout_queue = Queue.Queue()
|
|
|
|
stdout_reader = AsynchronousFileReader(p.stdout, stdout_queue)
|
|
|
|
stdout_reader.start()
|
2013-12-30 17:04:16 +01:00
|
|
|
|
2014-01-16 11:17:22 +01:00
|
|
|
# Check the queue for output (until there is no more to get)
|
|
|
|
while not stdout_reader.eof():
|
2013-10-16 23:17:51 +02:00
|
|
|
while not stdout_queue.empty():
|
|
|
|
line = stdout_queue.get()
|
2014-07-09 11:11:41 +02:00
|
|
|
if output and options.verbose:
|
2013-10-16 23:17:51 +02:00
|
|
|
# Output directly to console
|
2014-07-05 14:10:26 +02:00
|
|
|
sys.stderr.write(line)
|
|
|
|
sys.stderr.flush()
|
2014-07-01 18:04:41 +02:00
|
|
|
result.output += line
|
2013-10-16 23:17:51 +02:00
|
|
|
|
2013-12-19 17:58:10 +01:00
|
|
|
time.sleep(0.1)
|
2013-10-16 23:17:51 +02:00
|
|
|
|
2014-08-10 12:28:19 +02:00
|
|
|
result.returncode = p.wait()
|
2013-10-16 23:17:51 +02:00
|
|
|
return result
|
2013-10-27 23:43:38 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-11-14 14:09:37 +01:00
|
|
|
def remove_signing_keys(build_dir):
|
2014-01-15 18:36:43 +01:00
|
|
|
comment = re.compile(r'[ ]*//')
|
2014-02-23 11:54:30 +01:00
|
|
|
signing_configs = re.compile(r'^[\t ]*signingConfigs[ \t]*{[ \t]*$')
|
|
|
|
line_matches = [
|
2014-05-06 19:50:52 +02:00
|
|
|
re.compile(r'^[\t ]*signingConfig [^ ]*$'),
|
2014-06-05 10:02:30 +02:00
|
|
|
re.compile(r'.*android\.signingConfigs\.[^{]*$'),
|
2014-05-06 19:50:52 +02:00
|
|
|
re.compile(r'.*variant\.outputFile = .*'),
|
2014-12-23 12:24:07 +01:00
|
|
|
re.compile(r'.*output\.outputFile = .*'),
|
2014-05-06 19:50:52 +02:00
|
|
|
re.compile(r'.*\.readLine\(.*'),
|
2014-12-31 16:42:26 +01:00
|
|
|
]
|
2013-11-14 14:09:37 +01:00
|
|
|
for root, dirs, files in os.walk(build_dir):
|
|
|
|
if 'build.gradle' in files:
|
|
|
|
path = os.path.join(root, 'build.gradle')
|
2013-10-27 23:43:38 +01:00
|
|
|
|
2013-11-14 14:09:37 +01:00
|
|
|
with open(path, "r") as o:
|
|
|
|
lines = o.readlines()
|
2013-12-30 17:04:16 +01:00
|
|
|
|
2014-06-22 21:34:14 +02:00
|
|
|
changed = False
|
|
|
|
|
2013-11-14 14:09:37 +01:00
|
|
|
opened = 0
|
2015-01-05 12:59:33 +01:00
|
|
|
i = 0
|
2013-11-16 12:54:35 +01:00
|
|
|
with open(path, "w") as o:
|
2015-01-05 12:59:33 +01:00
|
|
|
while i < len(lines):
|
|
|
|
line = lines[i]
|
|
|
|
i += 1
|
|
|
|
while line.endswith('\\\n'):
|
|
|
|
line = line.rstrip('\\\n') + lines[i]
|
|
|
|
i += 1
|
|
|
|
|
2014-01-15 18:36:43 +01:00
|
|
|
if comment.match(line):
|
2014-02-28 10:54:14 +01:00
|
|
|
continue
|
|
|
|
|
|
|
|
if opened > 0:
|
|
|
|
opened += line.count('{')
|
|
|
|
opened -= line.count('}')
|
|
|
|
continue
|
|
|
|
|
|
|
|
if signing_configs.match(line):
|
2014-06-22 21:34:14 +02:00
|
|
|
changed = True
|
2014-02-28 10:54:14 +01:00
|
|
|
opened += 1
|
|
|
|
continue
|
|
|
|
|
|
|
|
if any(s.match(line) for s in line_matches):
|
2014-06-22 21:34:14 +02:00
|
|
|
changed = True
|
2014-02-28 10:54:14 +01:00
|
|
|
continue
|
|
|
|
|
|
|
|
if opened == 0:
|
2013-11-14 14:09:37 +01:00
|
|
|
o.write(line)
|
|
|
|
|
2014-06-22 21:34:14 +02:00
|
|
|
if changed:
|
|
|
|
logging.info("Cleaned build.gradle of keysigning configs at %s" % path)
|
2013-11-20 19:08:59 +01:00
|
|
|
|
2014-03-13 10:31:22 +01:00
|
|
|
for propfile in [
|
|
|
|
'project.properties',
|
|
|
|
'build.properties',
|
|
|
|
'default.properties',
|
2015-01-20 18:01:29 +01:00
|
|
|
'ant.properties', ]:
|
2013-11-14 14:09:37 +01:00
|
|
|
if propfile in files:
|
|
|
|
path = os.path.join(root, propfile)
|
2013-11-15 12:42:39 +01:00
|
|
|
|
2013-11-16 12:54:35 +01:00
|
|
|
with open(path, "r") as o:
|
|
|
|
lines = o.readlines()
|
|
|
|
|
2014-06-22 21:34:14 +02:00
|
|
|
changed = False
|
|
|
|
|
2013-11-16 12:54:35 +01:00
|
|
|
with open(path, "w") as o:
|
|
|
|
for line in lines:
|
2014-06-22 21:34:14 +02:00
|
|
|
if any(line.startswith(s) for s in ('key.store', 'key.alias')):
|
|
|
|
changed = True
|
2014-03-13 10:31:22 +01:00
|
|
|
continue
|
2014-06-22 21:34:14 +02:00
|
|
|
|
2014-03-13 10:31:22 +01:00
|
|
|
o.write(line)
|
2013-10-27 23:43:38 +01:00
|
|
|
|
2014-06-22 21:34:14 +02:00
|
|
|
if changed:
|
|
|
|
logging.info("Cleaned %s of keysigning configs at %s" % (propfile, path))
|
2013-11-15 12:42:39 +01:00
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2015-01-06 19:41:55 +01:00
|
|
|
def reset_env_path():
|
|
|
|
global env, orig_path
|
|
|
|
env['PATH'] = orig_path
|
|
|
|
|
|
|
|
|
|
|
|
def add_to_env_path(path):
|
|
|
|
global env
|
|
|
|
paths = env['PATH'].split(os.pathsep)
|
|
|
|
if path in paths:
|
|
|
|
return
|
2015-01-13 16:37:10 +01:00
|
|
|
paths.append(path)
|
2015-01-06 19:41:55 +01:00
|
|
|
env['PATH'] = os.pathsep.join(paths)
|
|
|
|
|
|
|
|
|
2015-05-10 13:53:06 +02:00
|
|
|
def replace_config_vars(cmd, build):
|
2015-01-05 00:29:27 +01:00
|
|
|
global env
|
2013-11-08 20:44:27 +01:00
|
|
|
cmd = cmd.replace('$$SDK$$', config['sdk_path'])
|
2015-01-05 00:29:27 +01:00
|
|
|
# env['ANDROID_NDK'] is set in build_local right before prepare_source
|
|
|
|
cmd = cmd.replace('$$NDK$$', env['ANDROID_NDK'])
|
2013-11-08 20:44:27 +01:00
|
|
|
cmd = cmd.replace('$$MVN3$$', config['mvn3'])
|
2015-05-10 13:53:06 +02:00
|
|
|
if build is not None:
|
|
|
|
cmd = cmd.replace('$$COMMIT$$', build['commit'])
|
|
|
|
cmd = cmd.replace('$$VERSION$$', build['version'])
|
|
|
|
cmd = cmd.replace('$$VERCODE$$', build['vercode'])
|
2013-11-08 20:44:27 +01:00
|
|
|
return cmd
|
|
|
|
|
2014-05-02 05:39:33 +02:00
|
|
|
|
2013-11-15 20:42:17 +01:00
|
|
|
def place_srclib(root_dir, number, libpath):
|
|
|
|
if not number:
|
|
|
|
return
|
|
|
|
relpath = os.path.relpath(libpath, root_dir)
|
2013-11-17 23:20:58 +01:00
|
|
|
proppath = os.path.join(root_dir, 'project.properties')
|
|
|
|
|
2014-01-27 22:34:34 +01:00
|
|
|
lines = []
|
2015-04-17 00:58:20 +02:00
|
|
|
if os.path.isfile(proppath):
|
|
|
|
with open(proppath, "r") as o:
|
|
|
|
lines = o.readlines()
|
2013-11-17 23:20:58 +01:00
|
|
|
|
|
|
|
with open(proppath, "w") as o:
|
|
|
|
placed = False
|
|
|
|
for line in lines:
|
|
|
|
if line.startswith('android.library.reference.%d=' % number):
|
2014-05-02 04:16:32 +02:00
|
|
|
o.write('android.library.reference.%d=%s\n' % (number, relpath))
|
2013-11-17 23:20:58 +01:00
|
|
|
placed = True
|
|
|
|
else:
|
|
|
|
o.write(line)
|
|
|
|
if not placed:
|
2014-05-02 04:16:32 +02:00
|
|
|
o.write('android.library.reference.%d=%s\n' % (number, relpath))
|
2014-10-24 22:04:15 +02:00
|
|
|
|
|
|
|
|
2015-01-31 16:55:18 +01:00
|
|
|
def verify_apks(signed_apk, unsigned_apk, tmp_dir):
|
|
|
|
"""Verify that two apks are the same
|
|
|
|
|
|
|
|
One of the inputs is signed, the other is unsigned. The signature metadata
|
|
|
|
is transferred from the signed to the unsigned apk, and then jarsigner is
|
|
|
|
used to verify that the signature from the signed apk is also varlid for
|
|
|
|
the unsigned one.
|
2015-01-31 16:58:08 +01:00
|
|
|
:param signed_apk: Path to a signed apk file
|
|
|
|
:param unsigned_apk: Path to an unsigned apk file expected to match it
|
|
|
|
:param tmp_dir: Path to directory for temporary files
|
|
|
|
:returns: None if the verification is successful, otherwise a string
|
|
|
|
describing what went wrong.
|
2015-01-31 16:55:18 +01:00
|
|
|
"""
|
2015-01-31 17:04:39 +01:00
|
|
|
sigfile = re.compile(r'META-INF/[0-9A-Za-z]+\.(SF|RSA)')
|
2015-01-31 16:55:18 +01:00
|
|
|
with ZipFile(signed_apk) as signed_apk_as_zip:
|
2015-01-31 17:04:39 +01:00
|
|
|
meta_inf_files = ['META-INF/MANIFEST.MF']
|
|
|
|
for f in signed_apk_as_zip.namelist():
|
|
|
|
if sigfile.match(f):
|
|
|
|
meta_inf_files.append(f)
|
|
|
|
if len(meta_inf_files) < 3:
|
|
|
|
return "Signature files missing from {0}".format(signed_apk)
|
2015-01-31 16:55:18 +01:00
|
|
|
signed_apk_as_zip.extractall(tmp_dir, meta_inf_files)
|
|
|
|
with ZipFile(unsigned_apk, mode='a') as unsigned_apk_as_zip:
|
|
|
|
for meta_inf_file in meta_inf_files:
|
|
|
|
unsigned_apk_as_zip.write(os.path.join(tmp_dir, meta_inf_file), arcname=meta_inf_file)
|
|
|
|
|
|
|
|
if subprocess.call(['jarsigner', '-verify', unsigned_apk]) != 0:
|
|
|
|
logging.info("...NOT verified - {0}".format(signed_apk))
|
2015-01-31 16:58:08 +01:00
|
|
|
return compare_apks(signed_apk, unsigned_apk, tmp_dir)
|
2015-01-31 16:55:18 +01:00
|
|
|
logging.info("...successfully verified")
|
2015-01-31 16:58:08 +01:00
|
|
|
return None
|
2015-01-31 16:55:18 +01:00
|
|
|
|
|
|
|
|
2014-10-24 22:04:15 +02:00
|
|
|
def compare_apks(apk1, apk2, tmp_dir):
|
|
|
|
"""Compare two apks
|
|
|
|
|
|
|
|
Returns None if the apk content is the same (apart from the signing key),
|
|
|
|
otherwise a string describing what's different, or what went wrong when
|
|
|
|
trying to do the comparison.
|
|
|
|
"""
|
|
|
|
|
2015-01-07 19:55:26 +01:00
|
|
|
badchars = re.compile('''[/ :;'"]''')
|
|
|
|
apk1dir = os.path.join(tmp_dir, badchars.sub('_', apk1[0:-4])) # trim .apk
|
|
|
|
apk2dir = os.path.join(tmp_dir, badchars.sub('_', apk2[0:-4])) # trim .apk
|
|
|
|
for d in [apk1dir, apk2dir]:
|
2014-10-24 22:04:15 +02:00
|
|
|
if os.path.exists(d):
|
|
|
|
shutil.rmtree(d)
|
|
|
|
os.mkdir(d)
|
2015-01-07 19:56:55 +01:00
|
|
|
os.mkdir(os.path.join(d, 'jar-xf'))
|
2014-10-24 22:04:15 +02:00
|
|
|
|
|
|
|
if subprocess.call(['jar', 'xf',
|
|
|
|
os.path.abspath(apk1)],
|
2015-01-07 19:56:55 +01:00
|
|
|
cwd=os.path.join(apk1dir, 'jar-xf')) != 0:
|
2014-10-24 22:04:15 +02:00
|
|
|
return("Failed to unpack " + apk1)
|
|
|
|
if subprocess.call(['jar', 'xf',
|
|
|
|
os.path.abspath(apk2)],
|
2015-01-07 19:56:55 +01:00
|
|
|
cwd=os.path.join(apk2dir, 'jar-xf')) != 0:
|
2014-10-24 22:04:15 +02:00
|
|
|
return("Failed to unpack " + apk2)
|
|
|
|
|
2015-01-07 20:08:15 +01:00
|
|
|
# try to find apktool in the path, if it hasn't been manually configed
|
2015-01-13 09:26:30 +01:00
|
|
|
if 'apktool' not in config:
|
2015-01-07 20:08:15 +01:00
|
|
|
tmp = find_command('apktool')
|
2015-01-13 09:26:30 +01:00
|
|
|
if tmp is not None:
|
2015-01-07 20:08:15 +01:00
|
|
|
config['apktool'] = tmp
|
|
|
|
if 'apktool' in config:
|
|
|
|
if subprocess.call([config['apktool'], 'd', os.path.abspath(apk1), '--output', 'apktool'],
|
|
|
|
cwd=apk1dir) != 0:
|
|
|
|
return("Failed to unpack " + apk1)
|
|
|
|
if subprocess.call([config['apktool'], 'd', os.path.abspath(apk2), '--output', 'apktool'],
|
|
|
|
cwd=apk2dir) != 0:
|
|
|
|
return("Failed to unpack " + apk2)
|
|
|
|
|
2015-01-07 19:55:26 +01:00
|
|
|
p = FDroidPopen(['diff', '-r', apk1dir, apk2dir], output=False)
|
2014-10-24 22:04:15 +02:00
|
|
|
lines = p.output.splitlines()
|
|
|
|
if len(lines) != 1 or 'META-INF' not in lines[0]:
|
2015-01-07 20:09:03 +01:00
|
|
|
meld = find_command('meld')
|
2015-01-13 09:26:30 +01:00
|
|
|
if meld is not None:
|
2015-01-07 20:09:03 +01:00
|
|
|
p = FDroidPopen(['meld', apk1dir, apk2dir], output=False)
|
2014-10-24 22:04:15 +02:00
|
|
|
return("Unexpected diff output - " + p.output)
|
|
|
|
|
2015-01-12 10:51:54 +01:00
|
|
|
# since everything verifies, delete the comparison to keep cruft down
|
|
|
|
shutil.rmtree(apk1dir)
|
|
|
|
shutil.rmtree(apk2dir)
|
|
|
|
|
2014-10-24 22:04:15 +02:00
|
|
|
# If we get here, it seems like they're the same!
|
|
|
|
return None
|
2015-01-07 20:08:15 +01:00
|
|
|
|
|
|
|
|
|
|
|
def find_command(command):
|
|
|
|
'''find the full path of a command, or None if it can't be found in the PATH'''
|
|
|
|
|
|
|
|
def is_exe(fpath):
|
|
|
|
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
|
|
|
|
|
|
|
|
fpath, fname = os.path.split(command)
|
|
|
|
if fpath:
|
|
|
|
if is_exe(command):
|
|
|
|
return command
|
|
|
|
else:
|
|
|
|
for path in os.environ["PATH"].split(os.pathsep):
|
|
|
|
path = path.strip('"')
|
|
|
|
exe_file = os.path.join(path, command)
|
|
|
|
if is_exe(exe_file):
|
|
|
|
return exe_file
|
|
|
|
|
|
|
|
return None
|
2015-04-21 01:14:58 +02:00
|
|
|
|
|
|
|
|
|
|
|
def genpassword():
|
|
|
|
'''generate a random password for when generating keys'''
|
|
|
|
h = hashlib.sha256()
|
|
|
|
h.update(os.urandom(16)) # salt
|
|
|
|
h.update(bytes(socket.getfqdn()))
|
|
|
|
return h.digest().encode('base64').strip()
|
|
|
|
|
|
|
|
|
2015-04-21 02:27:38 +02:00
|
|
|
def genkeystore(localconfig):
|
|
|
|
'''Generate a new key with random passwords and add it to new keystore'''
|
|
|
|
logging.info('Generating a new key in "' + localconfig['keystore'] + '"...')
|
|
|
|
keystoredir = os.path.dirname(localconfig['keystore'])
|
|
|
|
if keystoredir is None or keystoredir == '':
|
|
|
|
keystoredir = os.path.join(os.getcwd(), keystoredir)
|
|
|
|
if not os.path.exists(keystoredir):
|
|
|
|
os.makedirs(keystoredir, mode=0o700)
|
|
|
|
|
|
|
|
write_password_file("keystorepass", localconfig['keystorepass'])
|
|
|
|
write_password_file("keypass", localconfig['keypass'])
|
2015-04-21 01:14:58 +02:00
|
|
|
p = FDroidPopen(['keytool', '-genkey',
|
2015-04-21 02:27:38 +02:00
|
|
|
'-keystore', localconfig['keystore'],
|
|
|
|
'-alias', localconfig['repo_keyalias'],
|
2015-04-21 01:14:58 +02:00
|
|
|
'-keyalg', 'RSA', '-keysize', '4096',
|
|
|
|
'-sigalg', 'SHA256withRSA',
|
|
|
|
'-validity', '10000',
|
|
|
|
'-storepass:file', config['keystorepassfile'],
|
|
|
|
'-keypass:file', config['keypassfile'],
|
2015-04-21 02:27:38 +02:00
|
|
|
'-dname', localconfig['keydname']])
|
2015-04-21 01:14:58 +02:00
|
|
|
# TODO keypass should be sent via stdin
|
|
|
|
if p.returncode != 0:
|
|
|
|
raise BuildException("Failed to generate key", p.output)
|
2015-07-31 15:54:50 +02:00
|
|
|
os.chmod(localconfig['keystore'], 0o0600)
|
2015-04-21 01:14:58 +02:00
|
|
|
# now show the lovely key that was just generated
|
|
|
|
p = FDroidPopen(['keytool', '-list', '-v',
|
2015-04-21 02:27:38 +02:00
|
|
|
'-keystore', localconfig['keystore'],
|
|
|
|
'-alias', localconfig['repo_keyalias'],
|
2015-04-21 01:14:58 +02:00
|
|
|
'-storepass:file', config['keystorepassfile']])
|
|
|
|
logging.info(p.output.strip() + '\n\n')
|
2015-04-21 02:27:38 +02:00
|
|
|
|
|
|
|
|
|
|
|
def write_to_config(thisconfig, key, value=None):
|
|
|
|
'''write a key/value to the local config.py'''
|
|
|
|
if value is None:
|
|
|
|
origkey = key + '_orig'
|
|
|
|
value = thisconfig[origkey] if origkey in thisconfig else thisconfig[key]
|
|
|
|
with open('config.py', 'r') as f:
|
|
|
|
data = f.read()
|
|
|
|
pattern = '\n[\s#]*' + key + '\s*=\s*"[^"]*"'
|
|
|
|
repl = '\n' + key + ' = "' + value + '"'
|
|
|
|
data = re.sub(pattern, repl, data)
|
2015-04-21 04:27:31 +02:00
|
|
|
# if this key is not in the file, append it
|
|
|
|
if not re.match('\s*' + key + '\s*=\s*"', data):
|
|
|
|
data += repl
|
|
|
|
# make sure the file ends with a carraige return
|
|
|
|
if not re.match('\n$', data):
|
|
|
|
data += '\n'
|
2015-04-21 02:27:38 +02:00
|
|
|
with open('config.py', 'w') as f:
|
|
|
|
f.writelines(data)
|
2015-06-03 14:35:50 +02:00
|
|
|
|
|
|
|
|
2015-06-03 15:42:45 +02:00
|
|
|
def parse_xml(path):
|
2015-06-03 14:35:50 +02:00
|
|
|
return XMLElementTree.parse(path).getroot()
|
2015-06-03 19:40:43 +02:00
|
|
|
|
|
|
|
|
|
|
|
def string_is_integer(string):
|
|
|
|
try:
|
|
|
|
int(string)
|
|
|
|
return True
|
|
|
|
except ValueError:
|
|
|
|
return False
|
2015-07-24 06:42:21 +02:00
|
|
|
|
|
|
|
|
|
|
|
def download_file(url, local_filename=None, dldir='tmp'):
|
|
|
|
filename = url.split('/')[-1]
|
|
|
|
if local_filename is None:
|
|
|
|
local_filename = os.path.join(dldir, filename)
|
|
|
|
# the stream=True parameter keeps memory usage low
|
|
|
|
r = requests.get(url, stream=True)
|
|
|
|
with open(local_filename, 'wb') as f:
|
|
|
|
for chunk in r.iter_content(chunk_size=1024):
|
|
|
|
if chunk: # filter out keep-alive new chunks
|
|
|
|
f.write(chunk)
|
|
|
|
f.flush()
|
|
|
|
return local_filename
|