2016-01-04 21:19:47 +01:00
#!/usr/bin/env python3
2010-11-11 23:34:39 +01:00
#
2011-02-17 21:16:26 +01:00
# common.py - part of the FDroid server tools
2013-03-18 10:17:23 +01:00
# Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com
2014-01-28 14:07:19 +01:00
# Copyright (C) 2013-2014 Daniel Martí <mvdan@mvdan.cc>
2010-11-11 23:34:39 +01:00
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
2015-09-01 02:05:08 +02:00
# common.py is imported by all modules, so do not import third-party
# libraries here as they will become a requirement for all commands.
2016-01-04 17:01:37 +01:00
import io
2014-05-02 04:36:12 +02:00
import os
import sys
import re
2012-01-04 22:37:11 +01:00
import shutil
2014-04-15 23:53:44 +02:00
import glob
2013-11-05 23:27:08 +01:00
import stat
2011-08-07 17:14:54 +02:00
import subprocess
2012-01-20 00:03:35 +01:00
import time
2012-01-22 15:03:56 +01:00
import operator
2014-01-27 15:59:40 +01:00
import logging
2015-04-21 01:14:58 +02:00
import hashlib
import socket
2016-01-04 17:32:58 +01:00
import base64
2017-09-23 09:02:50 +02:00
import zipfile
2017-09-23 09:36:22 +02:00
import tempfile
2017-09-19 16:03:11 +02:00
import json
2015-06-03 14:35:50 +02:00
import xml . etree . ElementTree as XMLElementTree
2015-04-21 02:27:38 +02:00
2017-03-22 19:44:35 +01:00
from binascii import hexlify
2017-11-29 11:21:34 +01:00
from datetime import datetime , timedelta
2016-11-15 14:56:11 +01:00
from distutils . version import LooseVersion
2016-01-04 17:19:28 +01:00
from queue import Queue
2015-01-31 16:55:18 +01:00
from zipfile import ZipFile
2011-08-07 17:14:54 +02:00
2017-04-03 14:23:06 +02:00
from pyasn1 . codec . der import decoder , encoder
from pyasn1_modules import rfc2315
from pyasn1 . error import PyAsn1Error
2017-04-13 14:18:48 +02:00
from distutils . util import strtobool
2016-01-04 16:57:57 +01:00
import fdroidserver . metadata
2017-09-13 18:03:57 +02:00
from fdroidserver import _
2017-12-04 15:10:21 +01:00
from fdroidserver . exception import FDroidException , VCSException , NoSubmodulesException , \
BuildException , VerificationException
2016-01-04 17:37:35 +01:00
from . asynchronousfilereader import AsynchronousFileReader
2015-09-20 20:50:02 +02:00
2017-12-22 17:28:25 +01:00
# this is the build-tools version, aapt has a separate version that
# has to be manually set in test_aapt_version()
MINIMUM_AAPT_VERSION = ' 26.0.0 '
2013-11-19 15:35:16 +01:00
2017-03-29 23:33:09 +02:00
# A signature block file with a .DSA, .RSA, or .EC extension
CERT_PATH_REGEX = re . compile ( r ' ^META-INF/.* \ .(DSA|EC|RSA)$ ' )
2017-05-31 23:02:28 +02:00
APK_NAME_REGEX = re . compile ( r ' ^([a-zA-Z][ \ w.]*)_(-?[0-9]+)_?([0-9a-f] {7} )? \ .apk ' )
STANDARD_FILE_NAME_REGEX = re . compile ( r ' ^( \ w[ \ w.]*)_(-?[0-9]+) \ . \ w+ ' )
2017-03-29 23:33:09 +02:00
2015-06-03 15:42:45 +02:00
XMLElementTree . register_namespace ( ' android ' , ' http://schemas.android.com/apk/res/android ' )
2013-11-01 12:10:57 +01:00
config = None
options = None
2014-07-01 21:03:50 +02:00
env = None
2015-01-06 19:41:55 +01:00
orig_path = None
2013-11-01 12:10:57 +01:00
2014-05-02 05:39:33 +02:00
2014-09-15 12:34:40 +02:00
default_config = {
' sdk_path ' : " $ANDROID_HOME " ,
2015-01-03 00:02:54 +01:00
' ndk_paths ' : {
' r9b ' : None ,
2016-08-02 11:09:32 +02:00
' r10e ' : None ,
2016-08-12 20:24:54 +02:00
' r11c ' : None ,
2016-08-02 11:09:32 +02:00
' r12b ' : " $ANDROID_NDK " ,
2016-11-25 21:10:44 +01:00
' r13b ' : None ,
2017-07-04 09:46:52 +02:00
' r14b ' : None ,
2017-08-11 09:19:40 +02:00
' r15c ' : None ,
2017-11-26 17:08:26 +01:00
' r16 ' : None ,
2015-01-03 00:02:54 +01:00
} ,
2017-12-22 17:28:25 +01:00
' build_tools ' : MINIMUM_AAPT_VERSION ,
2016-06-20 20:00:59 +02:00
' force_build_tools ' : False ,
2016-02-06 00:29:07 +01:00
' java_paths ' : None ,
2014-09-15 12:34:40 +02:00
' ant ' : " ant " ,
' mvn3 ' : " mvn " ,
' gradle ' : ' gradle ' ,
2016-03-21 21:51:23 +01:00
' accepted_formats ' : [ ' txt ' , ' yml ' ] ,
2014-09-15 12:34:40 +02:00
' sync_from_local_copy_dir ' : False ,
2017-06-27 21:40:39 +02:00
' allow_disabled_algorithms ' : False ,
2015-08-20 17:40:18 +02:00
' per_app_repos ' : False ,
static URLs to "Current Version" of each app
I wrote up the feature to automatically generate symlinks with a constant name
that points to the current release version. I have it on by default, with a
*config.py* option to turn it off. There is also an option to set where the
symlink name comes from which defaults to app['Name'] i.e. F-Droid.apk, but
can easily be set to app['id'], i.e. _org.fdroid.fdroid.apk_. I think the best
place for the symlinks is in the root of the repo, so like
https://f-droid.org/F-Droid.apk or https://guardianproject.info/fdroid/ChatSecure.apk
For the case of the current FDroid static link https://f-droid.org/FDroid.apk
it can just be a symlink to the generated one (https://f-droid.org/F-Droid.apk
or https://f-droid.org/org.fdroid.fdroid.apk). Right now, this feature is all
or nothing, meaning it generates symlinks for all apps in the repo, or none. I
can’t think of any problems that this might cause since its only symlinks, so
the amount of disk space is tiny. Also, I think it would be useful for having
an easy “Download this app” button on each app’s page on the “Browse” view. As
long as this button is less prominent than the “Download F-Droid” button, and
it is clear that it is better to use the FDroid app than doing direct
downloads. For the f-droid.org repo, the symlinks should probably be based on
app['id'] to prevent name conflicts.
more info here:
https://f-droid.org/forums/topic/static-urls-to-current-version-of-each-app/
2014-10-11 02:47:21 +02:00
' make_current_version_link ' : True ,
' current_version_name_source ' : ' Name ' ,
2014-09-15 12:34:40 +02:00
' update_stats ' : False ,
' stats_ignore ' : [ ] ,
' stats_server ' : None ,
' stats_user ' : None ,
' stats_to_carbon ' : False ,
' repo_maxage ' : 0 ,
' build_server_always ' : False ,
2015-04-21 01:29:50 +02:00
' keystore ' : ' keystore.jks ' ,
2014-09-15 12:34:40 +02:00
' smartcardoptions ' : [ ] ,
' char_limits ' : {
2017-04-27 21:12:49 +02:00
' author ' : 256 ,
' name ' : 30 ,
' summary ' : 80 ,
' description ' : 4000 ,
' video ' : 256 ,
' whatsNew ' : 500 ,
2014-09-15 12:34:40 +02:00
} ,
' keyaliases ' : { } ,
' repo_url ' : " https://MyFirstFDroidRepo.org/fdroid/repo " ,
' repo_name ' : " My First FDroid Repo Demo " ,
' repo_icon ' : " fdroid-icon.png " ,
' repo_description ' : '''
This is a repository of apps to be used with FDroid . Applications in this
repository are either official binaries built by the original application
developers , or are binaries built from source by the admin of f - droid . org
using the tools on https : / / gitlab . com / u / fdroid .
''' ,
' archive_older ' : 0 ,
}
2015-09-12 08:42:50 +02:00
def setup_global_opts ( parser ) :
2017-12-14 20:30:00 +01:00
try : # the buildserver VM might not have PIL installed
from PIL import PngImagePlugin
logger = logging . getLogger ( PngImagePlugin . __name__ )
logger . setLevel ( logging . INFO ) # tame the "STREAM" debug messages
except ImportError :
pass
2015-09-12 08:42:50 +02:00
parser . add_argument ( " -v " , " --verbose " , action = " store_true " , default = False ,
2017-09-13 18:03:57 +02:00
help = _ ( " Spew out even more information than normal " ) )
2015-09-12 08:42:50 +02:00
parser . add_argument ( " -q " , " --quiet " , action = " store_true " , default = False ,
2017-09-13 18:03:57 +02:00
help = _ ( " Restrict output to warnings and errors " ) )
2015-09-12 08:42:50 +02:00
2017-10-20 11:35:48 +02:00
def _add_java_paths_to_config ( pathlist , thisconfig ) :
def path_version_key ( s ) :
versionlist = [ ]
for u in re . split ( ' [^0-9]+ ' , s ) :
try :
versionlist . append ( int ( u ) )
except ValueError :
pass
return versionlist
for d in sorted ( pathlist , key = path_version_key ) :
if os . path . islink ( d ) :
continue
j = os . path . basename ( d )
# the last one found will be the canonical one, so order appropriately
for regex in [
r ' ^1 \ .([6-9]) \ .0 \ .jdk$ ' , # OSX
r ' ^jdk1 \ .([6-9]) \ .0_[0-9]+.jdk$ ' , # OSX and Oracle tarball
r ' ^jdk1 \ .([6-9]) \ .0_[0-9]+$ ' , # Oracle Windows
r ' ^jdk([6-9])-openjdk$ ' , # Arch
r ' ^java-([6-9])-openjdk$ ' , # Arch
r ' ^java-([6-9])-jdk$ ' , # Arch (oracle)
r ' ^java-1 \ .([6-9]) \ .0-.*$ ' , # RedHat
r ' ^java-([6-9])-oracle$ ' , # Debian WebUpd8
r ' ^jdk-([6-9])-oracle-.*$ ' , # Debian make-jpkg
r ' ^java-([6-9])-openjdk-[^c][^o][^m].*$ ' , # Debian
] :
m = re . match ( regex , j )
if not m :
continue
for p in [ d , os . path . join ( d , ' Contents ' , ' Home ' ) ] :
if os . path . exists ( os . path . join ( p , ' bin ' , ' javac ' ) ) :
thisconfig [ ' java_paths ' ] [ m . group ( 1 ) ] = p
2014-09-17 00:12:24 +02:00
def fill_config_defaults ( thisconfig ) :
2014-09-15 12:34:40 +02:00
for k , v in default_config . items ( ) :
2014-09-17 00:12:24 +02:00
if k not in thisconfig :
thisconfig [ k ] = v
2014-09-15 12:34:40 +02:00
2014-09-15 12:39:42 +02:00
# Expand paths (~users and $vars)
2015-01-03 00:02:54 +01:00
def expand_path ( path ) :
if path is None :
return None
orig = path
path = os . path . expanduser ( path )
path = os . path . expandvars ( path )
if orig == path :
return None
return path
for k in [ ' sdk_path ' , ' ant ' , ' mvn3 ' , ' gradle ' , ' keystore ' , ' repo_icon ' ] :
2014-09-17 00:12:24 +02:00
v = thisconfig [ k ]
2015-01-03 00:02:54 +01:00
exp = expand_path ( v )
if exp is not None :
thisconfig [ k ] = exp
thisconfig [ k + ' _orig ' ] = v
2016-02-06 00:29:07 +01:00
# find all installed JDKs for keytool, jarsigner, and JAVA[6-9]_HOME env vars
if thisconfig [ ' java_paths ' ] is None :
thisconfig [ ' java_paths ' ] = dict ( )
2016-03-15 11:30:27 +01:00
pathlist = [ ]
pathlist + = glob . glob ( ' /usr/lib/jvm/j*[6-9]* ' )
pathlist + = glob . glob ( ' /usr/java/jdk1.[6-9]* ' )
pathlist + = glob . glob ( ' /System/Library/Java/JavaVirtualMachines/1.[6-9].0.jdk ' )
pathlist + = glob . glob ( ' /Library/Java/JavaVirtualMachines/*jdk*[6-9]* ' )
if os . getenv ( ' JAVA_HOME ' ) is not None :
2016-08-03 07:21:46 +02:00
pathlist . append ( os . getenv ( ' JAVA_HOME ' ) )
2016-03-15 11:30:27 +01:00
if os . getenv ( ' PROGRAMFILES ' ) is not None :
pathlist + = glob . glob ( os . path . join ( os . getenv ( ' PROGRAMFILES ' ) , ' Java ' , ' jdk1.[6-9].* ' ) )
2017-10-20 11:35:48 +02:00
_add_java_paths_to_config ( pathlist , thisconfig )
2016-02-06 00:29:07 +01:00
2016-02-11 20:43:55 +01:00
for java_version in ( ' 7 ' , ' 8 ' , ' 9 ' ) :
2016-02-12 00:39:54 +01:00
if java_version not in thisconfig [ ' java_paths ' ] :
2016-02-11 23:49:54 +01:00
continue
2016-02-11 20:43:55 +01:00
java_home = thisconfig [ ' java_paths ' ] [ java_version ]
jarsigner = os . path . join ( java_home , ' bin ' , ' jarsigner ' )
if os . path . exists ( jarsigner ) :
thisconfig [ ' jarsigner ' ] = jarsigner
thisconfig [ ' keytool ' ] = os . path . join ( java_home , ' bin ' , ' keytool ' )
break # Java7 is preferred, so quit if found
2015-09-04 06:44:59 +02:00
for k in [ ' ndk_paths ' , ' java_paths ' ] :
2015-01-03 00:02:54 +01:00
d = thisconfig [ k ]
for k2 in d . copy ( ) :
v = d [ k2 ]
exp = expand_path ( v )
if exp is not None :
thisconfig [ k ] [ k2 ] = exp
thisconfig [ k ] [ k2 + ' _orig ' ] = v
2014-04-23 18:44:37 +02:00
2014-05-02 05:39:33 +02:00
2015-07-30 22:13:12 +02:00
def regsub_file ( pattern , repl , path ) :
use UTF8 as default instead of ASCII for .java .gradle pom.xml
.java .gradle and XML files all can use any encoding. Most code is ASCII,
but authors' names, etc. can easily be non-ASCII. UTF-8 is by far the most
common file encoding. While UTF-8 is the default encoding inside the code
in Python 3, it still has to deal with the real world, so the encoding
needs to be explicitly set when reading and writing files. So this switches
fdroidserver to expect UTF-8 instead of ASCII when parsing these files. For
now, this commit means that we only support UTF-8 encoded *.java, pom.xml
or *.gradle files. Ideally, the code would detect the encoding and use the
actual one, but that's a lot more work, and its something that will not
happen often. We can cross that bridge when we come to it.
One approach, which is taken in the commit when possible, is to keep the
data as `bytes`, in which case the encoding doesn't matter.
This also fixes this crash when parsing gradle and maven files with
non-ASCII chars:
ERROR: test_adapt_gradle (__main__.BuildTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/var/lib/jenkins/workspace/fdroidserver-eighthave/tests/build.TestCase", line 59, in test_adapt_gradle
fdroidserver.build.adapt_gradle(testsdir)
File "/var/lib/jenkins/workspace/fdroidserver-eighthave/fdroidserver/build.py", line 445, in adapt_gradle
path)
File "/var/lib/jenkins/workspace/fdroidserver-eighthave/fdroidserver/common.py", line 188, in regsub_file
text = f.read()
File "/usr/lib/python3.4/encodings/ascii.py", line 26, in decode
return codecs.ascii_decode(input, self.errors)[0]
UnicodeDecodeError: 'ascii' codec can't decode byte 0xe2 in position 9460: ordinal not in range(128)
2016-06-07 20:13:54 +02:00
with open ( path , ' rb ' ) as f :
2015-07-30 22:13:12 +02:00
text = f . read ( )
use UTF8 as default instead of ASCII for .java .gradle pom.xml
.java .gradle and XML files all can use any encoding. Most code is ASCII,
but authors' names, etc. can easily be non-ASCII. UTF-8 is by far the most
common file encoding. While UTF-8 is the default encoding inside the code
in Python 3, it still has to deal with the real world, so the encoding
needs to be explicitly set when reading and writing files. So this switches
fdroidserver to expect UTF-8 instead of ASCII when parsing these files. For
now, this commit means that we only support UTF-8 encoded *.java, pom.xml
or *.gradle files. Ideally, the code would detect the encoding and use the
actual one, but that's a lot more work, and its something that will not
happen often. We can cross that bridge when we come to it.
One approach, which is taken in the commit when possible, is to keep the
data as `bytes`, in which case the encoding doesn't matter.
This also fixes this crash when parsing gradle and maven files with
non-ASCII chars:
ERROR: test_adapt_gradle (__main__.BuildTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/var/lib/jenkins/workspace/fdroidserver-eighthave/tests/build.TestCase", line 59, in test_adapt_gradle
fdroidserver.build.adapt_gradle(testsdir)
File "/var/lib/jenkins/workspace/fdroidserver-eighthave/fdroidserver/build.py", line 445, in adapt_gradle
path)
File "/var/lib/jenkins/workspace/fdroidserver-eighthave/fdroidserver/common.py", line 188, in regsub_file
text = f.read()
File "/usr/lib/python3.4/encodings/ascii.py", line 26, in decode
return codecs.ascii_decode(input, self.errors)[0]
UnicodeDecodeError: 'ascii' codec can't decode byte 0xe2 in position 9460: ordinal not in range(128)
2016-06-07 20:13:54 +02:00
text = re . sub ( bytes ( pattern , ' utf8 ' ) , bytes ( repl , ' utf8 ' ) , text )
with open ( path , ' wb ' ) as f :
2015-07-30 22:13:12 +02:00
f . write ( text )
2013-11-04 10:22:22 +01:00
def read_config ( opts , config_file = ' config.py ' ) :
2013-10-31 16:37:39 +01:00
""" Read the repository config
2015-08-05 14:42:41 +02:00
The config is read from config_file , which is in the current
directory when any of the repo management commands are used . If
there is a local metadata file in the git repo , then config . py is
not required , just use defaults .
2013-10-31 16:37:39 +01:00
"""
2015-08-05 14:42:41 +02:00
global config , options
2013-11-01 12:10:57 +01:00
if config is not None :
return config
2013-12-06 12:55:56 +01:00
2013-11-01 12:10:57 +01:00
options = opts
2014-02-17 20:04:39 +01:00
config = { }
2015-08-05 14:42:41 +02:00
if os . path . isfile ( config_file ) :
2017-09-15 23:20:29 +02:00
logging . debug ( _ ( " Reading ' {config_file} ' " ) . format ( config_file = config_file ) )
2015-08-05 14:42:41 +02:00
with io . open ( config_file , " rb " ) as f :
code = compile ( f . read ( ) , config_file , ' exec ' )
exec ( code , None , config )
2017-09-12 16:21:29 +02:00
else :
2017-09-15 23:20:29 +02:00
logging . warning ( _ ( " No ' config.py ' found, using defaults. " ) )
2014-02-17 20:04:39 +01:00
2017-02-24 10:28:00 +01:00
for k in ( ' mirrors ' , ' install_list ' , ' uninstall_list ' , ' serverwebroot ' , ' servergitroot ' ) :
if k in config :
if not type ( config [ k ] ) in ( str , list , tuple ) :
2017-09-15 23:20:29 +02:00
logging . warning (
_ ( " ' {field} ' will be in random order! Use () or [] brackets if order is important! " )
. format ( field = k ) )
2017-02-24 10:28:00 +01:00
2014-04-04 04:07:45 +02:00
# smartcardoptions must be a list since its command line args for Popen
if ' smartcardoptions ' in config :
config [ ' smartcardoptions ' ] = config [ ' smartcardoptions ' ] . split ( ' ' )
elif ' keystore ' in config and config [ ' keystore ' ] == ' NONE ' :
# keystore='NONE' means use smartcard, these are required defaults
config [ ' smartcardoptions ' ] = [ ' -storetype ' , ' PKCS11 ' , ' -providerName ' ,
' SunPKCS11-OpenSC ' , ' -providerClass ' ,
' sun.security.pkcs11.SunPKCS11 ' ,
' -providerArg ' , ' opensc-fdroid.cfg ' ]
2014-06-22 21:24:05 +02:00
if any ( k in config for k in [ " keystore " , " keystorepass " , " keypass " ] ) :
st = os . stat ( config_file )
if st . st_mode & stat . S_IRWXG or st . st_mode & stat . S_IRWXO :
2017-09-15 23:20:29 +02:00
logging . warning ( _ ( " unsafe permissions on ' {config_file} ' (should be 0600)! " )
. format ( config_file = config_file ) )
2014-06-22 21:24:05 +02:00
2014-09-15 12:34:40 +02:00
fill_config_defaults ( config )
2013-12-06 12:55:56 +01:00
2014-07-03 02:57:47 +02:00
for k in [ " repo_description " , " archive_description " ] :
if k in config :
config [ k ] = clean_description ( config [ k ] )
2014-04-16 02:47:03 +02:00
if ' serverwebroot ' in config :
2016-01-04 17:03:42 +01:00
if isinstance ( config [ ' serverwebroot ' ] , str ) :
2014-07-14 21:03:58 +02:00
roots = [ config [ ' serverwebroot ' ] ]
2016-01-04 17:03:42 +01:00
elif all ( isinstance ( item , str ) for item in config [ ' serverwebroot ' ] ) :
2014-07-14 21:03:58 +02:00
roots = config [ ' serverwebroot ' ]
else :
2017-09-15 23:20:29 +02:00
raise TypeError ( _ ( ' only accepts strings, lists, and tuples ' ) )
2014-07-14 21:03:58 +02:00
rootlist = [ ]
for rootstr in roots :
# since this is used with rsync, where trailing slashes have
# meaning, ensure there is always a trailing slash
if rootstr [ - 1 ] != ' / ' :
rootstr + = ' / '
rootlist . append ( rootstr . replace ( ' // ' , ' / ' ) )
config [ ' serverwebroot ' ] = rootlist
2014-04-16 02:47:03 +02:00
2017-02-10 18:38:25 +01:00
if ' servergitmirrors ' in config :
if isinstance ( config [ ' servergitmirrors ' ] , str ) :
roots = [ config [ ' servergitmirrors ' ] ]
elif all ( isinstance ( item , str ) for item in config [ ' servergitmirrors ' ] ) :
roots = config [ ' servergitmirrors ' ]
else :
2017-09-15 23:20:29 +02:00
raise TypeError ( _ ( ' only accepts strings, lists, and tuples ' ) )
2017-02-10 18:38:25 +01:00
config [ ' servergitmirrors ' ] = roots
2013-11-01 12:10:57 +01:00
return config
2013-10-31 16:37:39 +01:00
2014-05-02 05:39:33 +02:00
2017-10-16 18:11:57 +02:00
def assert_config_keystore ( config ) :
""" Check weather keystore is configured correctly and raise exception if not. """
nosigningkey = False
if ' repo_keyalias ' not in config :
nosigningkey = True
logging . critical ( _ ( " ' repo_keyalias ' not found in config.py! " ) )
if ' keystore ' not in config :
nosigningkey = True
logging . critical ( _ ( " ' keystore ' not found in config.py! " ) )
elif not os . path . exists ( config [ ' keystore ' ] ) :
nosigningkey = True
logging . critical ( " ' " + config [ ' keystore ' ] + " ' does not exist! " )
if ' keystorepass ' not in config :
nosigningkey = True
logging . critical ( _ ( " ' keystorepass ' not found in config.py! " ) )
if ' keypass ' not in config :
nosigningkey = True
logging . critical ( _ ( " ' keypass ' not found in config.py! " ) )
if nosigningkey :
raise FDroidException ( " This command requires a signing key, " +
" you can create one using: fdroid update --create-key " )
2014-12-09 14:12:41 +01:00
def find_sdk_tools_cmd ( cmd ) :
''' find a working path to a tool from the Android SDK '''
tooldirs = [ ]
2014-12-09 15:20:29 +01:00
if config is not None and ' sdk_path ' in config and os . path . exists ( config [ ' sdk_path ' ] ) :
2014-12-09 14:12:41 +01:00
# try to find a working path to this command, in all the recent possible paths
if ' build_tools ' in config :
build_tools = os . path . join ( config [ ' sdk_path ' ] , ' build-tools ' )
# if 'build_tools' was manually set and exists, check only that one
configed_build_tools = os . path . join ( build_tools , config [ ' build_tools ' ] )
if os . path . exists ( configed_build_tools ) :
tooldirs . append ( configed_build_tools )
else :
# no configed version, so hunt known paths for it
for f in sorted ( os . listdir ( build_tools ) , reverse = True ) :
if os . path . isdir ( os . path . join ( build_tools , f ) ) :
tooldirs . append ( os . path . join ( build_tools , f ) )
tooldirs . append ( build_tools )
sdk_tools = os . path . join ( config [ ' sdk_path ' ] , ' tools ' )
if os . path . exists ( sdk_tools ) :
tooldirs . append ( sdk_tools )
sdk_platform_tools = os . path . join ( config [ ' sdk_path ' ] , ' platform-tools ' )
if os . path . exists ( sdk_platform_tools ) :
tooldirs . append ( sdk_platform_tools )
tooldirs . append ( ' /usr/bin ' )
for d in tooldirs :
2016-11-15 14:56:11 +01:00
path = os . path . join ( d , cmd )
if os . path . isfile ( path ) :
if cmd == ' aapt ' :
test_aapt_version ( path )
return path
2014-12-09 14:12:41 +01:00
# did not find the command, exit with error message
ensure_build_tools_exists ( config )
2016-11-15 14:56:11 +01:00
def test_aapt_version ( aapt ) :
''' Check whether the version of aapt is new enough '''
output = subprocess . check_output ( [ aapt , ' version ' ] , universal_newlines = True )
if output is None or output == ' ' :
2017-09-15 23:20:29 +02:00
logging . error ( _ ( " ' {path} ' failed to execute! " ) . format ( path = aapt ) )
2016-11-15 14:56:11 +01:00
else :
m = re . match ( r ' .*v([0-9]+) \ .([0-9]+)[.-]?([0-9.-]*) ' , output )
if m :
major = m . group ( 1 )
minor = m . group ( 2 )
bugfix = m . group ( 3 )
# the Debian package has the version string like "v0.2-23.0.2"
2017-12-04 21:22:16 +01:00
too_old = False
if ' . ' in bugfix :
2017-12-22 17:28:25 +01:00
if LooseVersion ( bugfix ) < LooseVersion ( MINIMUM_AAPT_VERSION ) :
2017-12-04 21:22:16 +01:00
too_old = True
2017-12-22 17:28:25 +01:00
elif LooseVersion ( ' . ' . join ( ( major , minor , bugfix ) ) ) < LooseVersion ( ' 0.2.4062713 ' ) :
2017-12-04 21:22:16 +01:00
too_old = True
if too_old :
2017-12-22 17:28:25 +01:00
logging . warning ( _ ( " ' {aapt} ' is too old, fdroid requires build-tools- {version} or newer! " )
. format ( aapt = aapt , version = MINIMUM_AAPT_VERSION ) )
2016-11-15 14:56:11 +01:00
else :
2017-09-15 23:20:29 +02:00
logging . warning ( _ ( ' Unknown version of aapt, might cause problems: ' ) + output )
2016-11-15 14:56:11 +01:00
2014-09-17 00:12:24 +02:00
def test_sdk_exists ( thisconfig ) :
2014-12-09 14:12:41 +01:00
if ' sdk_path ' not in thisconfig :
2014-12-09 15:20:29 +01:00
if ' aapt ' in thisconfig and os . path . isfile ( thisconfig [ ' aapt ' ] ) :
2016-11-15 14:56:11 +01:00
test_aapt_version ( thisconfig [ ' aapt ' ] )
2014-12-09 15:20:29 +01:00
return True
else :
2017-09-15 23:20:29 +02:00
logging . error ( _ ( " ' sdk_path ' not set in ' config.py ' ! " ) )
2014-12-09 15:20:29 +01:00
return False
2014-09-17 00:12:24 +02:00
if thisconfig [ ' sdk_path ' ] == default_config [ ' sdk_path ' ] :
2017-09-15 23:20:29 +02:00
logging . error ( _ ( ' No Android SDK found! ' ) )
logging . error ( _ ( ' You can use ANDROID_HOME to set the path to your SDK, i.e.: ' ) )
2014-07-03 00:43:03 +02:00
logging . error ( ' \t export ANDROID_HOME=/opt/android-sdk ' )
2014-04-23 18:44:37 +02:00
return False
2014-09-17 00:12:24 +02:00
if not os . path . exists ( thisconfig [ ' sdk_path ' ] ) :
2017-09-15 23:20:29 +02:00
logging . critical ( _ ( " Android SDK path ' {path} ' does not exist! " )
. format ( path = thisconfig [ ' sdk_path ' ] ) )
2014-04-23 18:44:37 +02:00
return False
2014-09-17 00:12:24 +02:00
if not os . path . isdir ( thisconfig [ ' sdk_path ' ] ) :
2017-09-15 23:20:29 +02:00
logging . critical ( _ ( " Android SDK path ' {path} ' is not a directory! " )
. format ( path = thisconfig [ ' sdk_path ' ] ) )
2014-04-23 18:44:37 +02:00
return False
2014-07-03 18:23:35 +02:00
for d in [ ' build-tools ' , ' platform-tools ' , ' tools ' ] :
2014-09-17 00:12:24 +02:00
if not os . path . isdir ( os . path . join ( thisconfig [ ' sdk_path ' ] , d ) ) :
2017-09-15 23:20:29 +02:00
logging . critical ( _ ( " Android SDK ' {path} ' does not have ' {dirname} ' installed! " )
. format ( path = thisconfig [ ' sdk_path ' ] , dirname = d ) )
2014-07-03 18:23:35 +02:00
return False
2014-04-23 18:44:37 +02:00
return True
2014-05-02 05:39:33 +02:00
2014-12-09 14:12:41 +01:00
def ensure_build_tools_exists ( thisconfig ) :
2014-09-17 00:12:24 +02:00
if not test_sdk_exists ( thisconfig ) :
2017-09-15 23:20:29 +02:00
raise FDroidException ( _ ( " Android SDK not found! " ) )
2014-09-17 00:12:24 +02:00
build_tools = os . path . join ( thisconfig [ ' sdk_path ' ] , ' build-tools ' )
versioned_build_tools = os . path . join ( build_tools , thisconfig [ ' build_tools ' ] )
2014-05-30 03:43:16 +02:00
if not os . path . isdir ( versioned_build_tools ) :
2017-05-22 21:33:52 +02:00
raise FDroidException (
2017-10-19 21:07:50 +02:00
_ ( " Android build-tools path ' {path} ' does not exist! " )
2017-09-15 23:20:29 +02:00
. format ( path = versioned_build_tools ) )
2014-05-30 03:43:16 +02:00
2015-08-05 15:18:22 +02:00
def get_local_metadata_files ( ) :
''' get any metadata files local to an app ' s source repo
This tries to ignore anything that does not count as app metdata ,
including emacs cruft ending in ~ and the . fdroid . key * pass . txt files .
'''
return glob . glob ( ' .fdroid.[a-jl-z]*[a-rt-z] ' )
2017-11-28 10:40:11 +01:00
def read_pkg_args ( appid_versionCode_pairs , allow_vercodes = False ) :
2016-11-07 21:27:21 +01:00
"""
2017-11-28 10:40:11 +01:00
: param appids : arguments in the form of multiple appid : [ vc ] strings
2017-05-16 15:04:37 +02:00
: returns : a dictionary with the set of vercodes specified for each package
2016-11-07 21:27:21 +01:00
"""
2013-12-11 19:08:15 +01:00
vercodes = { }
2017-11-28 10:40:11 +01:00
if not appid_versionCode_pairs :
2013-12-19 22:28:38 +01:00
return vercodes
2017-11-28 10:40:11 +01:00
for p in appid_versionCode_pairs :
2013-12-11 19:37:38 +01:00
if allow_vercodes and ' : ' in p :
2013-12-11 19:08:15 +01:00
package , vercode = p . split ( ' : ' )
else :
package , vercode = p , None
if package not in vercodes :
vercodes [ package ] = [ vercode ] if vercode else [ ]
continue
2013-12-11 19:37:38 +01:00
elif vercode and vercode not in vercodes [ package ] :
2013-12-11 19:08:15 +01:00
vercodes [ package ] + = [ vercode ] if vercode else [ ]
2013-12-19 17:51:16 +01:00
return vercodes
2014-05-02 05:39:33 +02:00
2017-11-28 10:40:11 +01:00
def read_app_args ( appid_versionCode_pairs , allapps , allow_vercodes = False ) :
""" Build a list of App instances for processing
On top of what read_pkg_args does , this returns the whole app
metadata , but limiting the builds list to the builds matching the
appid_versionCode_pairs and vercodes specified . If no appid_versionCode_pairs are specified , then
all App and Build instances are returned .
2016-11-07 21:27:21 +01:00
"""
2013-12-19 22:28:38 +01:00
2017-11-28 10:40:11 +01:00
vercodes = read_pkg_args ( appid_versionCode_pairs , allow_vercodes )
2013-12-19 22:28:38 +01:00
if not vercodes :
return allapps
2013-12-19 17:51:16 +01:00
2014-08-16 12:46:02 +02:00
apps = { }
2016-01-04 17:02:28 +01:00
for appid , app in allapps . items ( ) :
2014-08-16 12:46:02 +02:00
if appid in vercodes :
apps [ appid ] = app
2013-12-19 22:28:38 +01:00
2013-12-19 17:51:16 +01:00
if len ( apps ) != len ( vercodes ) :
for p in vercodes :
2014-08-25 00:27:46 +02:00
if p not in allapps :
2017-09-15 23:20:29 +02:00
logging . critical ( _ ( " No such package: %s " ) % p )
raise FDroidException ( _ ( " Found invalid appids in arguments " ) )
2014-02-15 00:40:25 +01:00
if not apps :
2017-09-15 23:20:29 +02:00
raise FDroidException ( _ ( " No packages specified " ) )
2013-12-11 19:08:15 +01:00
2013-12-11 17:29:38 +01:00
error = False
2016-01-04 17:02:28 +01:00
for appid , app in apps . items ( ) :
2014-08-16 12:46:02 +02:00
vc = vercodes [ appid ]
2013-12-19 22:28:38 +01:00
if not vc :
continue
2016-11-23 17:52:04 +01:00
app . builds = [ b for b in app . builds if b . versionCode in vc ]
2015-11-28 13:09:47 +01:00
if len ( app . builds ) != len ( vercodes [ appid ] ) :
2013-12-19 22:28:38 +01:00
error = True
2016-11-23 17:52:04 +01:00
allvcs = [ b . versionCode for b in app . builds ]
2014-08-16 12:46:02 +02:00
for v in vercodes [ appid ] :
2013-12-19 22:28:38 +01:00
if v not in allvcs :
2017-09-15 23:20:29 +02:00
logging . critical ( _ ( " No such versionCode {versionCode} for app {appid} " )
. format ( versionCode = v , appid = appid ) )
2013-12-11 17:29:38 +01:00
if error :
2017-09-15 23:20:29 +02:00
raise FDroidException ( _ ( " Found invalid versionCodes for some apps " ) )
2013-12-11 17:29:38 +01:00
return apps
2014-05-02 05:39:33 +02:00
2015-09-14 07:11:53 +02:00
def get_extension ( filename ) :
2015-10-04 09:03:02 +02:00
base , ext = os . path . splitext ( filename )
2015-09-14 07:11:53 +02:00
if not ext :
2015-10-04 09:03:02 +02:00
return base , ' '
return base , ext . lower ( ) [ 1 : ]
2015-09-14 07:11:53 +02:00
def has_extension ( filename , ext ) :
2017-10-24 16:48:42 +02:00
_ignored , f_ext = get_extension ( filename )
2015-10-04 18:01:32 +02:00
return ext == f_ext
2015-09-14 07:11:53 +02:00
2013-12-30 11:33:37 +01:00
2016-12-07 11:48:05 +01:00
publish_name_regex = re . compile ( r " ^(.+)_([0-9]+) \ .(apk|zip)$ " )
2013-12-19 17:51:16 +01:00
2014-05-02 05:39:33 +02:00
2014-07-03 02:57:47 +02:00
def clean_description ( description ) :
' Remove unneeded newlines and spaces from a block of description text '
returnstring = ' '
# this is split up by paragraph to make removing the newlines easier
for paragraph in re . split ( r ' \ n \ n ' , description ) :
paragraph = re . sub ( ' \r ' , ' ' , paragraph )
paragraph = re . sub ( ' \n ' , ' ' , paragraph )
paragraph = re . sub ( ' { 2,} ' , ' ' , paragraph )
paragraph = re . sub ( ' ^ \ s*( \ w) ' , r ' \ 1 ' , paragraph )
returnstring + = paragraph + ' \n \n '
return returnstring . rstrip ( ' \n ' )
2016-12-07 11:48:05 +01:00
def publishednameinfo ( filename ) :
2013-12-19 17:58:10 +01:00
filename = os . path . basename ( filename )
2016-12-07 11:48:05 +01:00
m = publish_name_regex . match ( filename )
2013-12-19 17:51:16 +01:00
try :
result = ( m . group ( 1 ) , m . group ( 2 ) )
except AttributeError :
2017-09-15 23:20:29 +02:00
raise FDroidException ( _ ( " Invalid name for published file: %s " ) % filename )
2013-12-19 17:51:16 +01:00
return result
2013-12-11 17:29:38 +01:00
2014-05-02 05:39:33 +02:00
2017-08-09 17:14:51 +02:00
apk_release_filename = re . compile ( ' (?P<appid>[a-zA-Z0-9_ \ .]+)_(?P<vercode>[0-9]+) \ .apk ' )
apk_release_filename_with_sigfp = re . compile ( ' (?P<appid>[a-zA-Z0-9_ \ .]+)_(?P<vercode>[0-9]+)_(?P<sigfp>[0-9a-f] {7} ) \ .apk ' )
2017-09-23 13:10:52 +02:00
def apk_parse_release_filename ( apkname ) :
2017-09-20 00:58:19 +02:00
""" Parses the name of an APK file according the F-Droids APK naming
scheme and returns the tokens .
WARNING : Returned values don ' t necessarily represent the APKs actual
properties , the are just paresed from the file name .
: returns : A triplet containing ( appid , versionCode , signer ) , where appid
should be the package name , versionCode should be the integer
represion of the APKs version and signer should be the first 7 hex
digists of the sha256 signing key fingerprint which was used to sign
this APK .
"""
2017-09-23 13:10:52 +02:00
m = apk_release_filename_with_sigfp . match ( apkname )
if m :
return m . group ( ' appid ' ) , m . group ( ' vercode ' ) , m . group ( ' sigfp ' )
m = apk_release_filename . match ( apkname )
if m :
return m . group ( ' appid ' ) , m . group ( ' vercode ' ) , None
return None , None , None
2016-10-31 16:51:34 +01:00
def get_release_filename ( app , build ) :
if build . output :
2016-11-23 17:52:04 +01:00
return " %s _ %s . %s " % ( app . id , build . versionCode , get_file_extension ( build . output ) )
2016-10-31 16:51:34 +01:00
else :
2016-11-23 17:52:04 +01:00
return " %s _ %s .apk " % ( app . id , build . versionCode )
2013-11-19 15:35:16 +01:00
2014-05-02 05:39:33 +02:00
2017-03-21 23:51:15 +01:00
def get_toolsversion_logname ( app , build ) :
return " %s _ %s _toolsversion.log " % ( app . id , build . versionCode )
2013-11-19 15:35:16 +01:00
def getsrcname ( app , build ) :
2016-11-23 17:52:04 +01:00
return " %s _ %s _src.tar.gz " % ( app . id , build . versionCode )
2013-10-31 16:37:39 +01:00
2014-05-02 05:39:33 +02:00
2013-12-02 15:28:30 +01:00
def getappname ( app ) :
2015-11-28 13:09:47 +01:00
if app . Name :
return app . Name
if app . AutoName :
return app . AutoName
return app . id
2013-12-02 15:28:30 +01:00
2014-05-02 05:39:33 +02:00
2013-12-02 15:28:30 +01:00
def getcvname ( app ) :
2015-11-28 13:09:47 +01:00
return ' %s ( %s ) ' % ( app . CurrentVersion , app . CurrentVersionCode )
2013-12-02 15:28:30 +01:00
2014-05-02 05:39:33 +02:00
2016-11-07 21:47:53 +01:00
def get_build_dir ( app ) :
''' get the dir that this app will be built in '''
if app . RepoType == ' srclib ' :
return os . path . join ( ' build ' , ' srclib ' , app . Repo )
return os . path . join ( ' build ' , app . id )
def setup_vcs ( app ) :
''' checkout code from VCS and return instance of vcs and the build dir '''
build_dir = get_build_dir ( app )
# Set up vcs interface and make sure we have the latest code...
logging . debug ( " Getting {0} vcs interface for {1} "
. format ( app . RepoType , app . Repo ) )
2016-11-08 16:26:22 +01:00
if app . RepoType == ' git ' and os . path . exists ( ' .fdroid.yml ' ) :
remote = os . getcwd ( )
else :
remote = app . Repo
vcs = getvcs ( app . RepoType , remote , build_dir )
2016-11-07 21:47:53 +01:00
return vcs , build_dir
2015-01-05 00:29:27 +01:00
def getvcs ( vcstype , remote , local ) :
2011-08-07 17:14:54 +02:00
if vcstype == ' git ' :
2013-11-08 20:44:27 +01:00
return vcs_git ( remote , local )
2013-04-08 12:20:21 +02:00
if vcstype == ' git-svn ' :
2013-11-08 20:44:27 +01:00
return vcs_gitsvn ( remote , local )
2013-04-08 12:20:21 +02:00
if vcstype == ' hg ' :
2013-11-08 20:44:27 +01:00
return vcs_hg ( remote , local )
2013-04-08 12:20:21 +02:00
if vcstype == ' bzr ' :
2013-11-08 20:44:27 +01:00
return vcs_bzr ( remote , local )
2013-04-08 12:20:21 +02:00
if vcstype == ' srclib ' :
2014-07-18 12:21:52 +02:00
if local != os . path . join ( ' build ' , ' srclib ' , remote ) :
2013-10-09 11:11:34 +02:00
raise VCSException ( " Error: srclib paths are hard-coded! " )
2015-01-05 00:29:27 +01:00
return getsrclib ( remote , os . path . join ( ' build ' , ' srclib ' ) , raw = True )
2014-07-18 12:39:24 +02:00
if vcstype == ' svn ' :
raise VCSException ( " Deprecated vcs type ' svn ' - please use ' git-svn ' instead " )
2012-01-02 12:51:14 +01:00
raise VCSException ( " Invalid vcs type " + vcstype )
2011-08-07 17:14:54 +02:00
2014-05-02 05:39:33 +02:00
2013-05-24 23:35:56 +02:00
def getsrclibvcs ( name ) :
2016-01-04 16:57:57 +01:00
if name not in fdroidserver . metadata . srclibs :
2013-05-31 08:50:47 +02:00
raise VCSException ( " Missing srclib " + name )
2016-01-04 16:57:57 +01:00
return fdroidserver . metadata . srclibs [ name ] [ ' Repo Type ' ]
2013-05-24 23:35:56 +02:00
2014-05-02 05:39:33 +02:00
2011-08-07 17:14:54 +02:00
class vcs :
2014-12-31 16:42:26 +01:00
2013-11-08 20:44:27 +01:00
def __init__ ( self , remote , local ) :
2011-08-07 17:14:54 +02:00
2013-11-05 19:42:29 +01:00
# svn, git-svn and bzr may require auth
2013-10-02 11:25:26 +02:00
self . username = None
2014-07-18 12:39:24 +02:00
if self . repotype ( ) in ( ' git-svn ' , ' bzr ' ) :
2013-11-05 19:42:29 +01:00
if ' @ ' in remote :
2015-01-11 18:46:10 +01:00
if self . repotype == ' git-svn ' :
raise VCSException ( " Authentication is not supported for git-svn " )
2013-11-05 19:42:29 +01:00
self . username , remote = remote . split ( ' @ ' )
if ' : ' not in self . username :
2017-09-15 23:20:29 +02:00
raise VCSException ( _ ( " Password required with username " ) )
2013-11-05 19:42:29 +01:00
self . username , self . password = self . username . split ( ' : ' )
2011-08-07 17:14:54 +02:00
self . remote = remote
self . local = local
2014-07-02 17:21:45 +02:00
self . clone_failed = False
2012-01-23 15:15:40 +01:00
self . refreshed = False
2012-01-30 22:11:50 +01:00
self . srclib = None
2012-01-08 19:13:15 +01:00
2014-04-02 23:37:29 +02:00
def repotype ( self ) :
return None
2017-10-06 08:37:47 +02:00
def clientversion ( self ) :
versionstr = FDroidPopen ( self . clientversioncmd ( ) ) . output
return versionstr [ 0 : versionstr . find ( ' \n ' ) ]
def clientversioncmd ( self ) :
return None
2015-07-14 12:32:39 +02:00
def gotorevision ( self , rev , refresh = True ) :
2017-09-20 17:22:56 +02:00
""" Take the local repository to a clean version of the given
revision , which is specificed in the VCS ' s native
format . Beforehand , the repository can be dirty , or even
non - existent . If the repository does already exist locally , it
will be updated from the origin , but only once in the lifetime
of the vcs object . None is acceptable for ' rev ' if you know
you are cloning a clean copy of the repo - otherwise it must
specify a valid revision .
"""
2012-08-13 18:59:03 +02:00
2014-07-02 17:21:45 +02:00
if self . clone_failed :
2017-09-15 23:20:29 +02:00
raise VCSException ( _ ( " Downloading the repository already failed once, not trying again. " ) )
2014-07-02 17:21:45 +02:00
2012-08-13 18:59:03 +02:00
# The .fdroidvcs-id file for a repo tells us what VCS type
# and remote that directory was created from, allowing us to drop it
# automatically if either of those things changes.
fdpath = os . path . join ( self . local , ' .. ' ,
2014-05-06 19:50:52 +02:00
' .fdroidvcs- ' + os . path . basename ( self . local ) )
2016-01-15 13:16:01 +01:00
fdpath = os . path . normpath ( fdpath )
2012-08-13 18:59:03 +02:00
cdata = self . repotype ( ) + ' ' + self . remote
writeback = True
2013-06-05 13:54:47 +02:00
deleterepo = False
2012-08-13 18:59:03 +02:00
if os . path . exists ( self . local ) :
if os . path . exists ( fdpath ) :
with open ( fdpath , ' r ' ) as f :
2014-03-05 13:22:58 +01:00
fsdata = f . read ( ) . strip ( )
2012-08-13 18:59:03 +02:00
if fsdata == cdata :
writeback = False
else :
2013-06-05 13:54:47 +02:00
deleterepo = True
2014-12-31 16:42:26 +01:00
logging . info ( " Repository details for %s changed - deleting " % (
self . local ) )
2013-06-05 13:54:47 +02:00
else :
deleterepo = True
2014-07-03 00:43:03 +02:00
logging . info ( " Repository details for %s missing - deleting " % (
self . local ) )
2013-06-05 13:54:47 +02:00
if deleterepo :
shutil . rmtree ( self . local )
2012-08-13 18:59:03 +02:00
2014-07-09 10:30:28 +02:00
exc = None
2015-07-14 12:32:39 +02:00
if not refresh :
self . refreshed = True
2014-07-09 10:30:28 +02:00
try :
self . gotorevisionx ( rev )
2015-09-17 13:25:08 +02:00
except FDroidException as e :
2014-07-09 10:30:28 +02:00
exc = e
2012-08-13 18:59:03 +02:00
# If necessary, write the .fdroidvcs file.
2014-07-09 10:30:28 +02:00
if writeback and not self . clone_failed :
2016-01-15 13:16:01 +01:00
os . makedirs ( os . path . dirname ( fdpath ) , exist_ok = True )
with open ( fdpath , ' w+ ' ) as f :
2012-08-13 18:59:03 +02:00
f . write ( cdata )
2014-07-09 10:30:28 +02:00
if exc is not None :
raise exc
2017-04-13 12:30:04 +02:00
def gotorevisionx ( self , rev ) : # pylint: disable=unused-argument
2017-09-20 17:22:56 +02:00
""" Derived classes need to implement this.
It ' s called once basic checking has been performed.
"""
2012-08-13 18:59:03 +02:00
raise VCSException ( " This VCS type doesn ' t define gotorevisionx " )
2011-08-07 17:14:54 +02:00
# Initialise and update submodules
def initsubmodules ( self ) :
2012-01-23 15:15:40 +01:00
raise VCSException ( ' Submodules not supported for this vcs type ' )
2011-08-07 17:14:54 +02:00
2012-08-23 15:25:39 +02:00
# Get a list of all known tags
def gettags ( self ) :
2015-01-10 16:15:01 +01:00
if not self . _gettags :
raise VCSException ( ' gettags not supported for this vcs type ' )
rtags = [ ]
for tag in self . _gettags ( ) :
2015-09-12 08:17:14 +02:00
if re . match ( ' [-A-Za-z0-9_. /]+$ ' , tag ) :
2015-01-10 16:15:01 +01:00
rtags . append ( tag )
return rtags
2012-08-23 15:25:39 +02:00
2016-03-02 12:30:40 +01:00
def latesttags ( self ) :
2017-09-20 17:22:56 +02:00
""" Get a list of all the known tags, sorted from newest to oldest """
2014-04-17 21:05:18 +02:00
raise VCSException ( ' latesttags not supported for this vcs type ' )
2013-10-17 23:27:55 +02:00
def getref ( self ) :
2017-09-20 17:22:56 +02:00
""" Get current commit reference (hash, revision, etc) """
2013-10-17 23:27:55 +02:00
raise VCSException ( ' getref not supported for this vcs type ' )
2012-01-30 22:11:50 +01:00
def getsrclib ( self ) :
2017-09-20 17:22:56 +02:00
""" Returns the srclib (name, path) used in setting up the current revision, or None. """
2012-01-30 22:11:50 +01:00
return self . srclib
2014-05-02 05:39:33 +02:00
2011-08-07 17:14:54 +02:00
class vcs_git ( vcs ) :
2012-08-13 18:59:03 +02:00
def repotype ( self ) :
return ' git '
2017-10-06 08:37:47 +02:00
def clientversioncmd ( self ) :
return [ ' git ' , ' --version ' ]
2017-12-04 17:49:59 +01:00
def git ( self , args , envs = dict ( ) , cwd = None , output = True ) :
2017-11-23 17:08:57 +01:00
''' Prevent git fetch/clone/submodule from hanging at the username/password prompt
While fetch / pull / clone respect the command line option flags ,
it seems that submodule commands do not . They do seem to
follow whatever is in env vars , if the version of git is new
enough . So we just throw the kitchen sink at it to see what
sticks .
2017-12-04 17:49:59 +01:00
Also , because of CVE - 2017 - 1000117 , block all SSH URLs .
2017-11-23 17:08:57 +01:00
'''
2017-12-04 17:49:59 +01:00
#
# supported in git >= 2.3
git_config = [
' -c ' , ' core.sshCommand=false ' ,
' -c ' , ' url.https://.insteadOf=ssh:// ' ,
]
2017-11-23 17:08:57 +01:00
for domain in ( ' bitbucket.org ' , ' github.com ' , ' gitlab.com ' ) :
git_config . append ( ' -c ' )
git_config . append ( ' url.https://u:p@ ' + domain + ' /.insteadOf=git@ ' + domain + ' : ' )
git_config . append ( ' -c ' )
git_config . append ( ' url.https://u:p@ ' + domain + ' .insteadOf=git:// ' + domain )
git_config . append ( ' -c ' )
git_config . append ( ' url.https://u:p@ ' + domain + ' .insteadOf=https:// ' + domain )
2017-11-23 21:19:45 +01:00
envs . update ( {
' GIT_TERMINAL_PROMPT ' : ' 0 ' ,
2017-12-04 17:49:59 +01:00
' GIT_SSH ' : ' false ' , # for git < 2.3
2017-11-23 21:19:45 +01:00
} )
2017-12-04 17:49:59 +01:00
return FDroidPopen ( [ ' git ' , ] + git_config + args ,
2017-11-23 17:08:57 +01:00
envs = envs , cwd = cwd , output = output )
2012-01-08 14:43:59 +01:00
def checkrepo ( self ) :
2017-09-20 17:22:56 +02:00
""" If the local directory exists, but is somehow not a git repository,
git will traverse up the directory tree until it finds one
that is ( i . e . fdroidserver ) and then we ' ll proceed to destroy
it ! This is called as a safety check .
"""
2014-12-14 15:32:20 +01:00
p = FDroidPopen ( [ ' git ' , ' rev-parse ' , ' --show-toplevel ' ] , cwd = self . local , output = False )
2014-07-01 18:04:41 +02:00
result = p . output . rstrip ( )
2012-01-08 15:13:41 +01:00
if not result . endswith ( self . local ) :
2012-01-08 14:43:59 +01:00
raise VCSException ( ' Repository mismatch ' )
2012-08-13 18:59:03 +02:00
def gotorevisionx ( self , rev ) :
2012-01-23 15:15:40 +01:00
if not os . path . exists ( self . local ) :
2014-01-27 15:59:40 +01:00
# Brand new checkout
2018-01-23 22:42:32 +01:00
p = self . git ( [ ' clone ' , ' -- ' , self . remote , self . local ] )
2014-02-17 13:25:55 +01:00
if p . returncode != 0 :
2014-07-02 17:21:45 +02:00
self . clone_failed = True
2014-07-02 15:30:05 +02:00
raise VCSException ( " Git clone failed " , p . output )
2012-01-23 15:15:40 +01:00
self . checkrepo ( )
else :
self . checkrepo ( )
2014-01-27 15:59:40 +01:00
# Discard any working tree changes
2015-06-29 04:37:28 +02:00
p = FDroidPopen ( [ ' git ' , ' submodule ' , ' foreach ' , ' --recursive ' ,
' git ' , ' reset ' , ' --hard ' ] , cwd = self . local , output = False )
2014-02-17 13:25:55 +01:00
if p . returncode != 0 :
2017-09-15 23:20:29 +02:00
raise VCSException ( _ ( " Git reset failed " ) , p . output )
2012-01-23 15:15:40 +01:00
# Remove untracked files now, in case they're tracked in the target
2014-01-27 15:59:40 +01:00
# revision (it happens!)
2015-06-29 04:37:28 +02:00
p = FDroidPopen ( [ ' git ' , ' submodule ' , ' foreach ' , ' --recursive ' ,
' git ' , ' clean ' , ' -dffx ' ] , cwd = self . local , output = False )
2014-02-17 13:25:55 +01:00
if p . returncode != 0 :
2017-09-15 23:20:29 +02:00
raise VCSException ( _ ( " Git clean failed " ) , p . output )
2012-01-23 15:15:40 +01:00
if not self . refreshed :
2014-01-27 15:59:40 +01:00
# Get latest commits and tags from remote
2017-12-04 17:49:59 +01:00
p = self . git ( [ ' fetch ' , ' origin ' ] , cwd = self . local )
2014-02-17 13:25:55 +01:00
if p . returncode != 0 :
2017-09-15 23:20:29 +02:00
raise VCSException ( _ ( " Git fetch failed " ) , p . output )
2017-12-04 17:49:59 +01:00
p = self . git ( [ ' fetch ' , ' --prune ' , ' --tags ' , ' origin ' ] , output = False , cwd = self . local )
2014-02-17 13:25:55 +01:00
if p . returncode != 0 :
2017-09-15 23:20:29 +02:00
raise VCSException ( _ ( " Git fetch failed " ) , p . output )
2014-06-26 13:03:51 +02:00
# Recreate origin/HEAD as git clone would do it, in case it disappeared
2014-12-14 15:32:20 +01:00
p = FDroidPopen ( [ ' git ' , ' remote ' , ' set-head ' , ' origin ' , ' --auto ' ] , cwd = self . local , output = False )
2014-06-26 13:03:51 +02:00
if p . returncode != 0 :
2014-07-03 16:25:24 +02:00
lines = p . output . splitlines ( )
if ' Multiple remote HEAD branches ' not in lines [ 0 ] :
2017-09-15 23:20:29 +02:00
raise VCSException ( _ ( " Git remote set-head failed " ) , p . output )
2014-07-03 16:25:24 +02:00
branch = lines [ 1 ] . split ( ' ' ) [ - 1 ]
2018-01-23 22:42:32 +01:00
p2 = FDroidPopen ( [ ' git ' , ' remote ' , ' set-head ' , ' origin ' , ' -- ' , branch ] ,
cwd = self . local , output = False )
2014-07-03 16:25:24 +02:00
if p2 . returncode != 0 :
2017-09-15 23:20:29 +02:00
raise VCSException ( _ ( " Git remote set-head failed " ) , p . output + ' \n ' + p2 . output )
2012-01-23 15:15:40 +01:00
self . refreshed = True
2014-06-25 11:12:53 +02:00
# origin/HEAD is the HEAD of the remote, e.g. the "default branch" on
# a github repo. Most of the time this is the same as origin/master.
2014-07-03 17:35:28 +02:00
rev = rev or ' origin/HEAD '
2014-12-14 15:32:20 +01:00
p = FDroidPopen ( [ ' git ' , ' checkout ' , ' -f ' , rev ] , cwd = self . local , output = False )
2014-02-17 13:25:55 +01:00
if p . returncode != 0 :
2017-09-15 23:20:29 +02:00
raise VCSException ( _ ( " Git checkout of ' %s ' failed " ) % rev , p . output )
2014-01-27 15:59:40 +01:00
# Get rid of any uncontrolled files left behind
2014-12-14 15:32:20 +01:00
p = FDroidPopen ( [ ' git ' , ' clean ' , ' -dffx ' ] , cwd = self . local , output = False )
2014-02-17 13:25:55 +01:00
if p . returncode != 0 :
2017-09-15 23:20:29 +02:00
raise VCSException ( _ ( " Git clean failed " ) , p . output )
2011-08-07 17:14:54 +02:00
def initsubmodules ( self ) :
2012-01-08 14:43:59 +01:00
self . checkrepo ( )
2014-03-27 18:38:12 +01:00
submfile = os . path . join ( self . local , ' .gitmodules ' )
if not os . path . isfile ( submfile ) :
2017-12-04 15:10:21 +01:00
raise NoSubmodulesException ( _ ( " No git submodules available " ) )
2014-03-27 18:38:12 +01:00
# fix submodules not accessible without an account and public key auth
with open ( submfile , ' r ' ) as f :
lines = f . readlines ( )
with open ( submfile , ' w ' ) as f :
for line in lines :
2017-11-23 17:08:57 +01:00
for domain in ( ' bitbucket.org ' , ' github.com ' , ' gitlab.com ' ) :
line = re . sub ( ' git@ ' + domain + ' : ' , ' https://u:p@ ' + domain + ' / ' , line )
2014-03-27 18:38:12 +01:00
f . write ( line )
2014-12-14 15:32:20 +01:00
p = FDroidPopen ( [ ' git ' , ' submodule ' , ' sync ' ] , cwd = self . local , output = False )
2014-04-29 12:00:03 +02:00
if p . returncode != 0 :
2017-09-15 23:20:29 +02:00
raise VCSException ( _ ( " Git submodule sync failed " ) , p . output )
2017-12-04 17:49:59 +01:00
p = self . git ( [ ' submodule ' , ' update ' , ' --init ' , ' --force ' , ' --recursive ' ] , cwd = self . local )
2014-02-17 13:25:55 +01:00
if p . returncode != 0 :
2017-09-15 23:20:29 +02:00
raise VCSException ( _ ( " Git submodule update failed " ) , p . output )
2011-08-07 17:14:54 +02:00
2015-01-10 16:15:01 +01:00
def _gettags ( self ) :
2012-08-23 15:25:39 +02:00
self . checkrepo ( )
2014-12-14 15:32:20 +01:00
p = FDroidPopen ( [ ' git ' , ' tag ' ] , cwd = self . local , output = False )
2014-07-01 18:04:41 +02:00
return p . output . splitlines ( )
2012-08-23 15:25:39 +02:00
2016-03-16 17:40:26 +01:00
tag_format = re . compile ( r ' tag: ([^),]*) ' )
2016-03-02 12:30:40 +01:00
def latesttags ( self ) :
2014-04-17 21:05:18 +02:00
self . checkrepo ( )
2016-03-02 12:30:40 +01:00
p = FDroidPopen ( [ ' git ' , ' log ' , ' --tags ' ,
' --simplify-by-decoration ' , ' --pretty=format: %d ' ] ,
cwd = self . local , output = False )
tags = [ ]
for line in p . output . splitlines ( ) :
2016-03-16 17:40:26 +01:00
for tag in self . tag_format . findall ( line ) :
tags . append ( tag )
2016-03-02 12:30:40 +01:00
return tags
2014-04-17 21:05:18 +02:00
2013-03-27 00:25:41 +01:00
2012-01-04 22:37:11 +01:00
class vcs_gitsvn ( vcs ) :
2012-08-13 18:59:03 +02:00
def repotype ( self ) :
return ' git-svn '
2017-10-06 08:37:47 +02:00
def clientversioncmd ( self ) :
return [ ' git ' , ' svn ' , ' --version ' ]
2012-01-08 14:43:59 +01:00
def checkrepo ( self ) :
2017-09-20 17:22:56 +02:00
""" If the local directory exists, but is somehow not a git repository,
git will traverse up the directory tree until it finds one that
is ( i . e . fdroidserver ) and then we ' ll proceed to destory it!
This is called as a safety check .
"""
2014-12-14 15:32:20 +01:00
p = FDroidPopen ( [ ' git ' , ' rev-parse ' , ' --show-toplevel ' ] , cwd = self . local , output = False )
2014-07-01 18:04:41 +02:00
result = p . output . rstrip ( )
2012-01-08 15:16:42 +01:00
if not result . endswith ( self . local ) :
2012-01-08 14:43:59 +01:00
raise VCSException ( ' Repository mismatch ' )
2017-12-04 17:49:59 +01:00
def git ( self , args , envs = dict ( ) , cwd = None , output = True ) :
''' Prevent git fetch/clone/submodule from hanging at the username/password prompt
'''
# CVE-2017-1000117 block all SSH URLs (supported in git >= 2.3)
config = [ ' -c ' , ' core.sshCommand=false ' ]
envs . update ( {
' GIT_TERMINAL_PROMPT ' : ' 0 ' ,
' GIT_SSH ' : ' false ' , # for git < 2.3
' SVN_SSH ' : ' false ' ,
} )
return FDroidPopen ( [ ' git ' , ] + config + args ,
envs = envs , cwd = cwd , output = output )
2012-08-13 18:59:03 +02:00
def gotorevisionx ( self , rev ) :
2012-01-23 15:15:40 +01:00
if not os . path . exists ( self . local ) :
2014-01-27 15:59:40 +01:00
# Brand new checkout
2017-12-04 17:49:59 +01:00
gitsvn_args = [ ' svn ' , ' clone ' ]
2013-10-31 11:53:12 +01:00
if ' ; ' in self . remote :
remote_split = self . remote . split ( ' ; ' )
2013-04-05 21:55:34 +02:00
for i in remote_split [ 1 : ] :
if i . startswith ( ' trunk= ' ) :
2015-01-11 18:46:10 +01:00
gitsvn_args . extend ( [ ' -T ' , i [ 6 : ] ] )
2013-04-05 21:55:34 +02:00
elif i . startswith ( ' tags= ' ) :
2015-01-11 18:46:10 +01:00
gitsvn_args . extend ( [ ' -t ' , i [ 5 : ] ] )
2013-05-27 15:00:35 +02:00
elif i . startswith ( ' branches= ' ) :
2015-01-11 18:46:10 +01:00
gitsvn_args . extend ( [ ' -b ' , i [ 9 : ] ] )
gitsvn_args . extend ( [ remote_split [ 0 ] , self . local ] )
2017-12-04 17:49:59 +01:00
p = self . git ( gitsvn_args , output = False )
2014-08-18 11:25:39 +02:00
if p . returncode != 0 :
2014-07-02 17:21:45 +02:00
self . clone_failed = True
2014-08-18 11:25:39 +02:00
raise VCSException ( " Git svn clone failed " , p . output )
2013-04-05 21:55:34 +02:00
else :
2015-01-13 16:15:11 +01:00
gitsvn_args . extend ( [ self . remote , self . local ] )
2017-12-04 17:49:59 +01:00
p = self . git ( gitsvn_args , output = False )
2014-08-18 11:25:39 +02:00
if p . returncode != 0 :
2014-07-02 17:21:45 +02:00
self . clone_failed = True
2014-08-18 11:25:39 +02:00
raise VCSException ( " Git svn clone failed " , p . output )
2012-01-23 15:15:40 +01:00
self . checkrepo ( )
2012-01-04 22:37:11 +01:00
else :
2012-01-23 15:15:40 +01:00
self . checkrepo ( )
2014-01-27 15:59:40 +01:00
# Discard any working tree changes
2017-12-04 17:49:59 +01:00
p = self . git ( [ ' reset ' , ' --hard ' ] , cwd = self . local , output = False )
2014-02-17 13:25:55 +01:00
if p . returncode != 0 :
2014-07-02 15:30:05 +02:00
raise VCSException ( " Git reset failed " , p . output )
2012-01-23 15:15:40 +01:00
# Remove untracked files now, in case they're tracked in the target
2014-01-27 15:59:40 +01:00
# revision (it happens!)
2017-12-04 17:49:59 +01:00
p = self . git ( [ ' clean ' , ' -dffx ' ] , cwd = self . local , output = False )
2014-02-17 13:25:55 +01:00
if p . returncode != 0 :
2014-07-02 15:30:05 +02:00
raise VCSException ( " Git clean failed " , p . output )
2012-01-23 15:15:40 +01:00
if not self . refreshed :
2014-02-18 10:34:49 +01:00
# Get new commits, branches and tags from repo
2017-12-04 17:49:59 +01:00
p = self . git ( [ ' svn ' , ' fetch ' ] , cwd = self . local , output = False )
2014-02-18 10:34:49 +01:00
if p . returncode != 0 :
raise VCSException ( " Git svn fetch failed " )
2017-12-04 17:49:59 +01:00
p = self . git ( [ ' svn ' , ' rebase ' ] , cwd = self . local , output = False )
2014-02-17 13:25:55 +01:00
if p . returncode != 0 :
2014-07-02 15:30:05 +02:00
raise VCSException ( " Git svn rebase failed " , p . output )
2012-01-23 15:15:40 +01:00
self . refreshed = True
2013-09-15 23:20:27 +02:00
2014-07-03 17:35:28 +02:00
rev = rev or ' master '
2012-02-05 12:02:01 +01:00
if rev :
2013-06-03 11:20:49 +02:00
nospaces_rev = rev . replace ( ' ' , ' % 20 ' )
2013-05-28 16:25:23 +02:00
# Try finding a svn tag
2014-07-08 12:04:24 +02:00
for treeish in [ ' origin/ ' , ' ' ] :
2017-12-04 17:49:59 +01:00
p = self . git ( [ ' checkout ' , treeish + ' tags/ ' + nospaces_rev ] , cwd = self . local , output = False )
2014-07-08 12:04:24 +02:00
if p . returncode == 0 :
break
2014-01-27 16:07:30 +01:00
if p . returncode != 0 :
2013-05-28 16:25:23 +02:00
# No tag found, normal svn rev translation
# Translate svn rev into git format
2014-01-24 10:10:40 +01:00
rev_split = rev . split ( ' / ' )
2014-07-05 12:15:20 +02:00
p = None
for treeish in [ ' origin/ ' , ' ' ] :
if len ( rev_split ) > 1 :
treeish + = rev_split [ 0 ]
svn_rev = rev_split [ 1 ]
else :
# if no branch is specified, then assume trunk (i.e. 'master' branch):
treeish + = ' master '
svn_rev = rev
2014-07-09 10:41:35 +02:00
svn_rev = svn_rev if svn_rev [ 0 ] == ' r ' else ' r ' + svn_rev
2017-12-04 17:49:59 +01:00
p = self . git ( [ ' svn ' , ' find-rev ' , ' --before ' , svn_rev , treeish ] , cwd = self . local , output = False )
2014-07-05 12:15:20 +02:00
git_rev = p . output . rstrip ( )
2014-01-24 10:10:40 +01:00
2014-07-05 12:15:20 +02:00
if p . returncode == 0 and git_rev :
break
2014-01-24 10:10:40 +01:00
2013-11-05 19:42:29 +01:00
if p . returncode != 0 or not git_rev :
2013-05-28 16:36:52 +02:00
# Try a plain git checkout as a last resort
2017-12-04 17:49:59 +01:00
p = self . git ( [ ' checkout ' , rev ] , cwd = self . local , output = False )
2014-01-27 16:07:30 +01:00
if p . returncode != 0 :
2014-07-02 15:30:05 +02:00
raise VCSException ( " No git treeish found and direct git checkout of ' %s ' failed " % rev , p . output )
2013-06-03 11:20:49 +02:00
else :
# Check out the git rev equivalent to the svn rev
2017-12-04 17:49:59 +01:00
p = self . git ( [ ' checkout ' , git_rev ] , cwd = self . local , output = False )
2014-01-27 16:07:30 +01:00
if p . returncode != 0 :
2017-09-15 23:20:29 +02:00
raise VCSException ( _ ( " Git checkout of ' %s ' failed " ) % rev , p . output )
2014-01-24 10:10:40 +01:00
2014-01-27 15:59:40 +01:00
# Get rid of any uncontrolled files left behind
2017-12-04 17:49:59 +01:00
p = self . git ( [ ' clean ' , ' -dffx ' ] , cwd = self . local , output = False )
2014-02-17 13:25:55 +01:00
if p . returncode != 0 :
2017-09-15 23:20:29 +02:00
raise VCSException ( _ ( " Git clean failed " ) , p . output )
2012-01-04 22:37:11 +01:00
2015-01-10 16:15:01 +01:00
def _gettags ( self ) :
2013-04-05 15:43:12 +02:00
self . checkrepo ( )
2014-07-18 12:12:16 +02:00
for treeish in [ ' origin/ ' , ' ' ] :
d = os . path . join ( self . local , ' .git ' , ' svn ' , ' refs ' , ' remotes ' , treeish , ' tags ' )
if os . path . isdir ( d ) :
return os . listdir ( d )
2013-04-05 15:43:12 +02:00
2013-10-17 23:27:55 +02:00
def getref ( self ) :
self . checkrepo ( )
2014-12-14 15:32:20 +01:00
p = FDroidPopen ( [ ' git ' , ' svn ' , ' find-rev ' , ' HEAD ' ] , cwd = self . local , output = False )
2014-01-27 16:07:30 +01:00
if p . returncode != 0 :
return None
2014-07-01 18:04:41 +02:00
return p . output . strip ( )
2013-10-17 23:27:55 +02:00
2014-05-02 05:39:33 +02:00
2011-08-07 17:14:54 +02:00
class vcs_hg ( vcs ) :
2012-08-13 18:59:03 +02:00
def repotype ( self ) :
return ' hg '
2017-10-06 08:37:47 +02:00
def clientversioncmd ( self ) :
return [ ' hg ' , ' --version ' ]
2012-08-13 18:59:03 +02:00
def gotorevisionx ( self , rev ) :
2012-01-23 15:15:40 +01:00
if not os . path . exists ( self . local ) :
2018-01-23 22:42:32 +01:00
p = FDroidPopen ( [ ' hg ' , ' clone ' , ' --ssh ' , ' false ' , ' -- ' , self . remote , self . local ] ,
output = False )
2014-02-17 13:25:55 +01:00
if p . returncode != 0 :
2014-07-02 17:21:45 +02:00
self . clone_failed = True
2014-07-02 15:30:05 +02:00
raise VCSException ( " Hg clone failed " , p . output )
2011-08-07 17:14:54 +02:00
else :
2015-01-26 19:14:29 +01:00
p = FDroidPopen ( [ ' hg ' , ' status ' , ' -uS ' ] , cwd = self . local , output = False )
2014-02-17 13:25:55 +01:00
if p . returncode != 0 :
2015-01-26 19:14:29 +01:00
raise VCSException ( " Hg status failed " , p . output )
for line in p . output . splitlines ( ) :
if not line . startswith ( ' ? ' ) :
raise VCSException ( " Unexpected output from hg status -uS: " + line )
2018-01-23 22:42:32 +01:00
FDroidPopen ( [ ' rm ' , ' -rf ' , ' -- ' , line [ 2 : ] ] , cwd = self . local , output = False )
2012-01-23 15:15:40 +01:00
if not self . refreshed :
2017-12-05 16:49:28 +01:00
p = FDroidPopen ( [ ' hg ' , ' pull ' , ' --ssh ' , ' false ' ] , cwd = self . local , output = False )
2014-02-17 13:25:55 +01:00
if p . returncode != 0 :
2014-07-02 15:30:05 +02:00
raise VCSException ( " Hg pull failed " , p . output )
2012-01-23 15:15:40 +01:00
self . refreshed = True
2013-09-15 23:20:27 +02:00
2014-07-03 17:35:28 +02:00
rev = rev or ' default '
2013-11-01 19:12:22 +01:00
if not rev :
return
2018-01-23 22:42:32 +01:00
p = FDroidPopen ( [ ' hg ' , ' update ' , ' -C ' , ' -- ' , rev ] , cwd = self . local , output = False )
2014-02-17 13:25:55 +01:00
if p . returncode != 0 :
2014-07-02 15:30:05 +02:00
raise VCSException ( " Hg checkout of ' %s ' failed " % rev , p . output )
2014-12-14 15:32:20 +01:00
p = FDroidPopen ( [ ' hg ' , ' purge ' , ' --all ' ] , cwd = self . local , output = False )
2014-01-15 16:40:27 +01:00
# Also delete untracked files, we have to enable purge extension for that:
2014-07-01 18:04:41 +02:00
if " ' purge ' is provided by the following extension " in p . output :
2014-07-18 12:21:52 +02:00
with open ( os . path . join ( self . local , ' .hg ' , ' hgrc ' ) , " a " ) as myfile :
2014-01-20 00:33:31 +01:00
myfile . write ( " \n [extensions] \n hgext.purge= \n " )
2014-12-14 15:32:20 +01:00
p = FDroidPopen ( [ ' hg ' , ' purge ' , ' --all ' ] , cwd = self . local , output = False )
2014-02-17 13:25:55 +01:00
if p . returncode != 0 :
2014-07-02 15:30:05 +02:00
raise VCSException ( " HG purge failed " , p . output )
2014-01-20 16:14:00 +01:00
elif p . returncode != 0 :
2014-07-02 15:30:05 +02:00
raise VCSException ( " HG purge failed " , p . output )
2011-08-07 17:14:54 +02:00
2015-01-10 16:15:01 +01:00
def _gettags ( self ) :
2014-12-14 15:32:20 +01:00
p = FDroidPopen ( [ ' hg ' , ' tags ' , ' -q ' ] , cwd = self . local , output = False )
2014-07-01 18:04:41 +02:00
return p . output . splitlines ( ) [ 1 : ]
2013-09-06 20:33:47 +02:00
2013-03-27 00:25:41 +01:00
2011-08-07 17:14:54 +02:00
class vcs_bzr ( vcs ) :
2012-08-13 18:59:03 +02:00
def repotype ( self ) :
return ' bzr '
2017-10-06 08:37:47 +02:00
def clientversioncmd ( self ) :
return [ ' bzr ' , ' --version ' ]
2017-12-04 17:49:59 +01:00
def bzr ( self , args , envs = dict ( ) , cwd = None , output = True ) :
''' Prevent bzr from ever using SSH to avoid security vulns '''
envs . update ( {
' BZR_SSH ' : ' false ' ,
} )
return FDroidPopen ( [ ' bzr ' , ] + args , envs = envs , cwd = cwd , output = output )
2012-08-13 18:59:03 +02:00
def gotorevisionx ( self , rev ) :
2012-01-23 15:15:40 +01:00
if not os . path . exists ( self . local ) :
2017-12-04 17:49:59 +01:00
p = self . bzr ( [ ' branch ' , self . remote , self . local ] , output = False )
2014-02-17 13:25:55 +01:00
if p . returncode != 0 :
2014-07-02 17:21:45 +02:00
self . clone_failed = True
2014-07-02 15:30:05 +02:00
raise VCSException ( " Bzr branch failed " , p . output )
2011-08-07 17:14:54 +02:00
else :
2017-12-04 17:49:59 +01:00
p = self . bzr ( [ ' clean-tree ' , ' --force ' , ' --unknown ' , ' --ignored ' ] , cwd = self . local , output = False )
2014-02-17 13:25:55 +01:00
if p . returncode != 0 :
2014-07-02 15:30:05 +02:00
raise VCSException ( " Bzr revert failed " , p . output )
2012-01-23 15:15:40 +01:00
if not self . refreshed :
2017-12-04 17:49:59 +01:00
p = self . bzr ( [ ' pull ' ] , cwd = self . local , output = False )
2014-02-17 13:25:55 +01:00
if p . returncode != 0 :
2014-07-02 15:30:05 +02:00
raise VCSException ( " Bzr update failed " , p . output )
2012-01-23 15:15:40 +01:00
self . refreshed = True
2013-09-15 23:20:27 +02:00
revargs = list ( [ ' -r ' , rev ] if rev else [ ] )
2017-12-04 17:49:59 +01:00
p = self . bzr ( [ ' revert ' ] + revargs , cwd = self . local , output = False )
2014-02-17 13:25:55 +01:00
if p . returncode != 0 :
2014-07-02 15:30:05 +02:00
raise VCSException ( " Bzr revert of ' %s ' failed " % rev , p . output )
2011-08-07 17:14:54 +02:00
2015-01-10 16:15:01 +01:00
def _gettags ( self ) :
2017-12-04 17:49:59 +01:00
p = self . bzr ( [ ' tags ' ] , cwd = self . local , output = False )
2013-10-30 21:54:09 +01:00
return [ tag . split ( ' ' ) [ 0 ] . strip ( ) for tag in
2014-07-01 18:04:41 +02:00
p . output . splitlines ( ) ]
2013-10-30 21:54:09 +01:00
2014-05-02 05:39:33 +02:00
2015-06-04 15:56:20 +02:00
def unescape_string ( string ) :
2015-10-01 01:12:45 +02:00
if len ( string ) < 2 :
return string
2015-06-04 15:56:20 +02:00
if string [ 0 ] == ' " ' and string [ - 1 ] == ' " ' :
return string [ 1 : - 1 ]
return string . replace ( " \\ ' " , " ' " )
2014-02-23 20:09:42 +01:00
def retrieve_string ( app_dir , string , xmlfiles = None ) :
2014-03-16 09:43:54 +01:00
2015-10-01 01:12:45 +02:00
if not string . startswith ( ' @string/ ' ) :
return unescape_string ( string )
2014-02-23 20:09:42 +01:00
if xmlfiles is None :
xmlfiles = [ ]
2015-06-03 15:42:45 +02:00
for res_dir in [
os . path . join ( app_dir , ' res ' ) ,
2015-06-03 15:43:55 +02:00
os . path . join ( app_dir , ' src ' , ' main ' , ' res ' ) ,
2015-06-03 15:42:45 +02:00
] :
2017-09-14 08:44:43 +02:00
for root , dirs , files in os . walk ( res_dir ) :
if os . path . basename ( root ) == ' values ' :
xmlfiles + = [ os . path . join ( root , x ) for x in files if x . endswith ( ' .xml ' ) ]
2014-02-23 20:09:42 +01:00
2015-06-03 15:42:45 +02:00
name = string [ len ( ' @string/ ' ) : ]
2015-10-01 01:12:45 +02:00
def element_content ( element ) :
if element . text is None :
return " "
2015-10-26 00:28:29 +01:00
s = XMLElementTree . tostring ( element , encoding = ' utf-8 ' , method = ' text ' )
2016-01-04 18:24:58 +01:00
return s . decode ( ' utf-8 ' ) . strip ( )
2015-10-01 01:12:45 +02:00
2015-06-03 15:42:45 +02:00
for path in xmlfiles :
if not os . path . isfile ( path ) :
continue
xml = parse_xml ( path )
2015-06-03 18:05:17 +02:00
element = xml . find ( ' string[@name= " ' + name + ' " ] ' )
2015-10-01 01:12:45 +02:00
if element is not None :
content = element_content ( element )
return retrieve_string ( app_dir , content , xmlfiles )
2013-12-02 15:09:59 +01:00
2015-06-03 15:42:45 +02:00
return ' '
2013-03-27 00:25:41 +01:00
2014-05-02 05:39:33 +02:00
2015-06-18 17:54:56 +02:00
def retrieve_string_singleline ( app_dir , string , xmlfiles = None ) :
return retrieve_string ( app_dir , string , xmlfiles ) . replace ( ' \n ' , ' ' ) . strip ( )
2014-09-13 13:01:08 +02:00
def manifest_paths ( app_dir , flavours ) :
2016-11-07 21:27:21 +01:00
''' Return list of existing files that will be used to find the highest vercode '''
2013-08-03 16:44:14 +02:00
2014-05-06 19:50:52 +02:00
possible_manifests = \
[ os . path . join ( app_dir , ' AndroidManifest.xml ' ) ,
os . path . join ( app_dir , ' src ' , ' main ' , ' AndroidManifest.xml ' ) ,
os . path . join ( app_dir , ' src ' , ' AndroidManifest.xml ' ) ,
os . path . join ( app_dir , ' build.gradle ' ) ]
2013-08-09 17:15:27 +02:00
2014-09-13 13:04:24 +02:00
for flavour in flavours :
2014-09-15 17:03:45 +02:00
if flavour == ' yes ' :
continue
2014-09-13 13:04:24 +02:00
possible_manifests . append (
os . path . join ( app_dir , ' src ' , flavour , ' AndroidManifest.xml ' ) )
2013-12-30 17:04:16 +01:00
2013-08-13 12:02:48 +02:00
return [ path for path in possible_manifests if os . path . isfile ( path ) ]
2013-08-03 16:44:14 +02:00
2014-05-02 05:39:33 +02:00
2014-09-13 13:01:08 +02:00
def fetch_real_name ( app_dir , flavours ) :
2016-11-07 21:27:21 +01:00
''' Retrieve the package name. Returns the name, or None if not found. '''
2015-06-03 14:35:50 +02:00
for path in manifest_paths ( app_dir , flavours ) :
if not has_extension ( path , ' xml ' ) or not os . path . isfile ( path ) :
2013-08-13 15:58:43 +02:00
continue
2015-06-03 14:35:50 +02:00
logging . debug ( " fetch_real_name: Checking manifest at " + path )
2015-06-03 15:42:45 +02:00
xml = parse_xml ( path )
2015-06-03 14:35:50 +02:00
app = xml . find ( ' application ' )
2015-11-16 16:21:19 +01:00
if app is None :
continue
2015-06-03 18:30:31 +02:00
if " { http://schemas.android.com/apk/res/android}label " not in app . attrib :
continue
2016-01-04 18:24:58 +01:00
label = app . attrib [ " { http://schemas.android.com/apk/res/android}label " ]
2015-06-18 17:54:56 +02:00
result = retrieve_string_singleline ( app_dir , label )
2015-06-03 14:35:50 +02:00
if result :
result = result . strip ( )
return result
2014-03-16 23:12:37 +01:00
return None
2013-03-27 00:25:41 +01:00
2014-05-02 05:39:33 +02:00
2014-02-10 23:03:02 +01:00
def get_library_references ( root_dir ) :
libraries = [ ]
2013-11-04 17:03:43 +01:00
proppath = os . path . join ( root_dir , ' project.properties ' )
if not os . path . isfile ( proppath ) :
2014-02-10 23:03:02 +01:00
return libraries
2016-06-07 13:26:40 +02:00
with open ( proppath , ' r ' , encoding = ' iso-8859-1 ' ) as f :
2016-01-04 17:59:47 +01:00
for line in f :
if not line . startswith ( ' android.library.reference. ' ) :
continue
path = line . split ( ' = ' ) [ 1 ] . strip ( )
relpath = os . path . join ( root_dir , path )
if not os . path . isdir ( relpath ) :
continue
logging . debug ( " Found subproject at %s " % path )
libraries . append ( path )
2014-02-10 23:03:02 +01:00
return libraries
2014-05-02 05:39:33 +02:00
2014-02-10 23:03:02 +01:00
def ant_subprojects ( root_dir ) :
subprojects = get_library_references ( root_dir )
for subpath in subprojects :
subrelpath = os . path . join ( root_dir , subpath )
for p in get_library_references ( subrelpath ) :
2014-05-02 04:16:32 +02:00
relp = os . path . normpath ( os . path . join ( subpath , p ) )
2014-02-10 23:03:02 +01:00
if relp not in subprojects :
subprojects . insert ( 0 , relp )
2013-11-04 17:03:43 +01:00
return subprojects
2014-05-02 05:39:33 +02:00
2014-02-11 17:56:36 +01:00
def remove_debuggable_flags ( root_dir ) :
# Remove forced debuggable flags
2014-07-05 15:25:39 +02:00
logging . debug ( " Removing debuggable flags from %s " % root_dir )
2014-02-11 17:56:36 +01:00
for root , dirs , files in os . walk ( root_dir ) :
2017-10-24 15:19:38 +02:00
if ' AndroidManifest.xml ' in files and os . path . isfile ( os . path . join ( root , ' AndroidManifest.xml ' ) ) :
2015-07-30 22:13:12 +02:00
regsub_file ( r ' android:debuggable= " [^ " ]* " ' ,
' ' ,
os . path . join ( root , ' AndroidManifest.xml ' ) )
2014-02-11 17:56:36 +01:00
2014-05-02 05:39:33 +02:00
2017-02-27 14:00:42 +01:00
vcsearch_g = re . compile ( r ''' .*[Vv]ersionCode[ =]+[ " ' ]*([0-9]+)[ " ' ]* ''' ) . search
vnsearch_g = re . compile ( r ' .*[Vv]ersionName *=* *([ " \' ])((?:(?=( \\ ?)) \ 3.)*?) \ 1.* ' ) . search
2015-10-25 19:10:49 +01:00
psearch_g = re . compile ( r ' .*(packageName|applicationId) *=* *[ " \' ]([^ " ]+)[ " \' ].* ' ) . search
2015-10-30 19:03:53 +01:00
def app_matches_packagename ( app , package ) :
if not package :
return False
2015-11-28 13:09:47 +01:00
appid = app . UpdateCheckName or app . id
2015-11-19 00:33:30 +01:00
if appid is None or appid == " Ignore " :
2015-10-30 19:03:53 +01:00
return True
return appid == package
def parse_androidmanifests ( paths , app ) :
2016-11-07 21:27:21 +01:00
"""
Extract some information from the AndroidManifest . xml at the given path .
Returns ( version , vercode , package ) , any or all of which might be None .
All values returned are strings .
"""
2013-03-27 00:25:41 +01:00
2015-11-28 13:09:47 +01:00
ignoreversions = app . UpdateCheckIgnore
2015-10-25 19:10:49 +01:00
ignoresearch = re . compile ( ignoreversions ) . search if ignoreversions else None
2013-08-14 15:07:26 +02:00
if not paths :
return ( None , None , None )
2013-08-13 12:02:48 +02:00
max_version = None
max_vercode = None
max_package = None
for path in paths :
2015-04-03 00:05:22 +02:00
if not os . path . isfile ( path ) :
continue
2017-09-15 23:20:29 +02:00
logging . debug ( _ ( " Parsing manifest at ' {path} ' " ) . format ( path = path ) )
2014-06-20 11:10:52 +02:00
version = None
vercode = None
2015-09-08 22:12:28 +02:00
package = None
2013-08-13 12:02:48 +02:00
2017-12-23 14:36:38 +01:00
flavour = None
if app . builds and ' gradle ' in app . builds [ - 1 ] and app . builds [ - 1 ] . gradle :
flavour = app . builds [ - 1 ] . gradle [ - 1 ]
2017-02-27 13:38:59 +01:00
if has_extension ( path , ' gradle ' ) :
2016-01-04 17:59:47 +01:00
with open ( path , ' r ' ) as f :
2017-12-23 14:36:38 +01:00
inside_flavour_group = 0
inside_required_flavour = 0
2017-12-23 12:43:16 +01:00
for line in f :
if gradle_comment . match ( line ) :
continue
2017-12-23 14:36:38 +01:00
if inside_flavour_group > 0 :
if inside_required_flavour > 0 :
matches = psearch_g ( line )
if matches :
s = matches . group ( 2 )
if app_matches_packagename ( app , s ) :
package = s
matches = vnsearch_g ( line )
if matches :
version = matches . group ( 2 )
matches = vcsearch_g ( line )
if matches :
vercode = matches . group ( 1 )
if ' { ' in line :
inside_required_flavour + = 1
if ' } ' in line :
inside_required_flavour - = 1
else :
if flavour and ( flavour in line ) :
inside_required_flavour = 1
if ' { ' in line :
inside_flavour_group + = 1
if ' } ' in line :
inside_flavour_group - = 1
else :
if " productFlavors " in line :
inside_flavour_group = 1
if not package :
matches = psearch_g ( line )
if matches :
s = matches . group ( 2 )
if app_matches_packagename ( app , s ) :
package = s
if not version :
matches = vnsearch_g ( line )
if matches :
version = matches . group ( 2 )
if not vercode :
matches = vcsearch_g ( line )
if matches :
vercode = matches . group ( 1 )
2015-06-03 15:23:18 +02:00
else :
2016-01-26 23:00:00 +01:00
try :
xml = parse_xml ( path )
if " package " in xml . attrib :
2016-01-04 18:24:58 +01:00
s = xml . attrib [ " package " ]
2016-01-26 23:00:00 +01:00
if app_matches_packagename ( app , s ) :
package = s
if " { http://schemas.android.com/apk/res/android}versionName " in xml . attrib :
2016-01-04 18:24:58 +01:00
version = xml . attrib [ " { http://schemas.android.com/apk/res/android}versionName " ]
2016-01-26 23:00:00 +01:00
base_dir = os . path . dirname ( path )
version = retrieve_string_singleline ( base_dir , version )
if " { http://schemas.android.com/apk/res/android}versionCode " in xml . attrib :
2016-01-04 18:24:58 +01:00
a = xml . attrib [ " { http://schemas.android.com/apk/res/android}versionCode " ]
2016-01-26 23:00:00 +01:00
if string_is_integer ( a ) :
vercode = a
except Exception :
2017-09-15 23:20:29 +02:00
logging . warning ( _ ( " Problem with xml at ' {path} ' " ) . format ( path = path ) )
2013-08-13 12:02:48 +02:00
2015-09-08 22:12:28 +02:00
# Remember package name, may be defined separately from version+vercode
if package is None :
package = max_package
2015-01-10 16:15:23 +01:00
logging . debug ( " ..got package= {0} , version= {1} , vercode= {2} "
. format ( package , version , vercode ) )
2014-06-22 17:36:00 +02:00
# Always grab the package name and version name in case they are not
# together with the highest version code
if max_package is None and package is not None :
2013-08-13 15:25:47 +02:00
max_package = package
2014-06-22 17:36:00 +02:00
if max_version is None and version is not None :
max_version = version
2013-08-13 15:25:47 +02:00
2017-02-27 13:38:59 +01:00
if vercode is not None \
and ( max_vercode is None or vercode > max_vercode ) :
2014-05-13 21:04:22 +02:00
if not ignoresearch or not ignoresearch ( version ) :
2014-06-22 17:36:00 +02:00
if version is not None :
max_version = version
if vercode is not None :
max_vercode = vercode
if package is not None :
max_package = package
2014-05-13 21:04:22 +02:00
else :
max_version = " Ignore "
2013-08-13 12:02:48 +02:00
2013-08-15 16:01:33 +02:00
if max_version is None :
2014-06-20 11:10:52 +02:00
max_version = " Unknown "
2013-08-15 16:01:33 +02:00
2015-01-27 08:13:21 +01:00
if max_package and not is_valid_package_name ( max_package ) :
2017-09-15 23:20:29 +02:00
raise FDroidException ( _ ( " Invalid package name {0} " ) . format ( max_package ) )
2015-01-26 19:29:39 +01:00
2013-08-13 12:02:48 +02:00
return ( max_version , max_vercode , max_package )
2012-03-10 13:50:34 +01:00
2014-05-02 05:39:33 +02:00
2015-01-26 19:29:39 +01:00
def is_valid_package_name ( name ) :
return re . match ( " [A-Za-z_][A-Za-z_0-9.]+$ " , name )
2015-06-03 13:51:41 +02:00
def getsrclib ( spec , srclib_dir , subdir = None , basepath = False ,
common: never pass None to replace_config_vars
This fixes fdroid build com.umang.dashnotifier:18:
ERROR: Could not build app com.umang.dashnotifier due to unknown error: Traceback (most recent call last):
File "/home/mvdan/git/fsr/fdroidserver/build.py", line 1121, in main
options.onserver, options.refresh):
File "/home/mvdan/git/fsr/fdroidserver/build.py", line 950, in trybuild
build_local(app, build, vcs, build_dir, output_dir, srclib_dir, extlib_dir, tmp_dir, force, onserver, refresh)
File "/home/mvdan/git/fsr/fdroidserver/build.py", line 480, in build_local
extlib_dir, onserver, refresh)
File "/home/mvdan/git/fsr/fdroidserver/common.py", line 1335, in prepare_source
srclibpaths.append(getsrclib(lib, srclib_dir, build, preponly=onserver, refresh=refresh))
File "/home/mvdan/git/fsr/fdroidserver/common.py", line 1258, in getsrclib
cmd = replace_config_vars(srclib["Prepare"], None)
File "/home/mvdan/git/fsr/fdroidserver/common.py", line 1822, in replace_config_vars
cmd = cmd.replace('$$NDK$$', build.ndk_path())
AttributeError: 'NoneType' object has no attribute 'ndk_path'
2016-04-02 14:43:49 +02:00
raw = False , prepare = True , preponly = False , refresh = True ,
build = None ) :
2017-09-20 17:22:56 +02:00
""" Get the specified source library.
2013-03-01 18:59:01 +01:00
2017-09-20 17:22:56 +02:00
Returns the path to it . Normally this is the path to be used when
referencing it , which may be a subdirectory of the actual project . If
you want the base directory of the project , pass ' basepath=True ' .
"""
2013-11-18 22:31:52 +01:00
number = None
subdir = None
2013-05-24 23:35:56 +02:00
if raw :
name = spec
ref = None
else :
name , ref = spec . split ( ' @ ' )
2013-11-18 22:31:52 +01:00
if ' : ' in name :
number , name = name . split ( ' : ' , 1 )
if ' / ' in name :
2014-05-02 04:16:32 +02:00
name , subdir = name . split ( ' / ' , 1 )
2013-03-01 20:39:30 +01:00
2016-01-04 16:57:57 +01:00
if name not in fdroidserver . metadata . srclibs :
2014-07-03 13:53:54 +02:00
raise VCSException ( ' srclib ' + name + ' not found. ' )
2013-03-15 16:29:29 +01:00
2016-01-04 16:57:57 +01:00
srclib = fdroidserver . metadata . srclibs [ name ]
2013-04-07 20:39:53 +02:00
2013-05-20 13:34:03 +02:00
sdir = os . path . join ( srclib_dir , name )
2013-04-23 21:11:10 +02:00
2013-06-04 23:42:18 +02:00
if not preponly :
2015-01-05 00:29:27 +01:00
vcs = getvcs ( srclib [ " Repo Type " ] , srclib [ " Repo " ] , sdir )
2013-11-18 22:31:52 +01:00
vcs . srclib = ( name , number , sdir )
2013-10-23 16:57:02 +02:00
if ref :
2015-07-14 12:32:39 +02:00
vcs . gotorevision ( ref , refresh )
2013-06-04 23:42:18 +02:00
if raw :
return vcs
2013-05-24 23:35:56 +02:00
2013-05-20 13:16:06 +02:00
libdir = None
2013-12-06 12:15:13 +01:00
if subdir :
2013-11-16 12:54:23 +01:00
libdir = os . path . join ( sdir , subdir )
2013-12-06 12:15:13 +01:00
elif srclib [ " Subdir " ] :
2013-05-20 13:16:06 +02:00
for subdir in srclib [ " Subdir " ] :
libdir_candidate = os . path . join ( sdir , subdir )
if os . path . exists ( libdir_candidate ) :
libdir = libdir_candidate
break
2013-04-28 19:52:27 +02:00
2013-05-20 13:16:06 +02:00
if libdir is None :
libdir = sdir
2013-05-03 16:53:37 +02:00
2014-02-11 17:56:36 +01:00
remove_signing_keys ( sdir )
remove_debuggable_flags ( sdir )
2013-06-04 23:42:18 +02:00
if prepare :
2013-12-06 12:15:13 +01:00
if srclib [ " Prepare " ] :
common: never pass None to replace_config_vars
This fixes fdroid build com.umang.dashnotifier:18:
ERROR: Could not build app com.umang.dashnotifier due to unknown error: Traceback (most recent call last):
File "/home/mvdan/git/fsr/fdroidserver/build.py", line 1121, in main
options.onserver, options.refresh):
File "/home/mvdan/git/fsr/fdroidserver/build.py", line 950, in trybuild
build_local(app, build, vcs, build_dir, output_dir, srclib_dir, extlib_dir, tmp_dir, force, onserver, refresh)
File "/home/mvdan/git/fsr/fdroidserver/build.py", line 480, in build_local
extlib_dir, onserver, refresh)
File "/home/mvdan/git/fsr/fdroidserver/common.py", line 1335, in prepare_source
srclibpaths.append(getsrclib(lib, srclib_dir, build, preponly=onserver, refresh=refresh))
File "/home/mvdan/git/fsr/fdroidserver/common.py", line 1258, in getsrclib
cmd = replace_config_vars(srclib["Prepare"], None)
File "/home/mvdan/git/fsr/fdroidserver/common.py", line 1822, in replace_config_vars
cmd = cmd.replace('$$NDK$$', build.ndk_path())
AttributeError: 'NoneType' object has no attribute 'ndk_path'
2016-04-02 14:43:49 +02:00
cmd = replace_config_vars ( srclib [ " Prepare " ] , build )
2013-06-09 23:15:46 +02:00
2018-01-23 22:42:32 +01:00
p = FDroidPopen ( [ ' bash ' , ' -x ' , ' -c ' , ' -- ' , cmd ] , cwd = libdir )
2013-06-04 23:42:18 +02:00
if p . returncode != 0 :
2013-10-16 23:17:51 +02:00
raise BuildException ( " Error running prepare command for srclib %s "
2014-07-01 18:04:41 +02:00
% name , p . output )
2013-12-30 17:04:16 +01:00
2013-05-20 13:16:06 +02:00
if basepath :
2013-11-20 19:00:22 +01:00
libdir = sdir
return ( name , number , libdir )
2012-01-28 01:05:30 +01:00
2016-11-15 21:55:06 +01:00
2015-10-25 19:10:49 +01:00
gradle_version_regex = re . compile ( r " [^/]* ' com \ .android \ .tools \ .build:gradle:([^ \ .]+ \ .[^ \ .]+).* ' .* " )
2013-03-27 00:25:41 +01:00
2015-07-14 12:32:39 +02:00
def prepare_source ( vcs , app , build , build_dir , srclib_dir , extlib_dir , onserver = False , refresh = True ) :
2017-06-28 23:01:45 +02:00
""" Prepare the source code for a particular build
: param vcs : the appropriate vcs object for the application
: param app : the application details from the metadata
: param build : the build details from the metadata
: param build_dir : the path to the build directory , usually ' build/app.id '
: param srclib_dir : the path to the source libraries directory , usually ' build/srclib '
: param extlib_dir : the path to the external libraries directory , usually ' build/extlib '
Returns the ( root , srclibpaths ) where :
: param root : is the root directory , which may be the same as ' build_dir ' or may
be a subdirectory of it .
: param srclibpaths : is information on the srclibs being used
"""
2013-03-27 00:25:41 +01:00
2014-01-27 15:59:40 +01:00
# Optionally, the actual app source can be in a subdirectory
2015-11-28 17:55:27 +01:00
if build . subdir :
root_dir = os . path . join ( build_dir , build . subdir )
2012-01-03 22:39:30 +01:00
else :
root_dir = build_dir
2014-01-27 15:59:40 +01:00
# Get a working copy of the right revision
2015-11-28 17:55:27 +01:00
logging . info ( " Getting source for revision " + build . commit )
vcs . gotorevision ( build . commit , refresh )
2012-01-03 22:39:30 +01:00
2015-06-29 04:37:28 +02:00
# Initialise submodules if required
2015-11-28 17:55:27 +01:00
if build . submodules :
2017-09-15 23:20:29 +02:00
logging . info ( _ ( " Initialising submodules " ) )
2012-01-03 22:39:30 +01:00
vcs . initsubmodules ( )
2014-02-09 19:11:15 +01:00
# Check that a subdir (if we're using one) exists. This has to happen
# after the checkout, since it might not exist elsewhere
if not os . path . exists ( root_dir ) :
raise BuildException ( ' Missing subdir ' + root_dir )
2014-01-27 15:59:40 +01:00
# Run an init command if one is required
2015-11-28 17:55:27 +01:00
if build . init :
cmd = replace_config_vars ( build . init , build )
2014-01-27 15:59:40 +01:00
logging . info ( " Running ' init ' commands in %s " % root_dir )
2013-10-09 23:36:24 +02:00
2018-01-23 22:42:32 +01:00
p = FDroidPopen ( [ ' bash ' , ' -x ' , ' -c ' , ' -- ' , cmd ] , cwd = root_dir )
2013-10-09 23:36:24 +02:00
if p . returncode != 0 :
raise BuildException ( " Error running init command for %s : %s " %
2016-11-23 17:52:04 +01:00
( app . id , build . versionName ) , p . output )
2012-02-04 22:19:07 +01:00
2014-01-23 10:29:04 +01:00
# Apply patches if any
2015-11-28 17:55:27 +01:00
if build . patch :
2014-05-31 23:10:16 +02:00
logging . info ( " Applying patches " )
2015-11-28 17:55:27 +01:00
for patch in build . patch :
2014-01-23 10:29:04 +01:00
patch = patch . strip ( )
2014-01-27 15:59:40 +01:00
logging . info ( " Applying " + patch )
2015-11-28 13:09:47 +01:00
patch_path = os . path . join ( ' metadata ' , app . id , patch )
2014-02-17 13:25:55 +01:00
p = FDroidPopen ( [ ' patch ' , ' -p1 ' , ' -i ' , os . path . abspath ( patch_path ) ] , cwd = build_dir )
if p . returncode != 0 :
2014-01-23 10:29:04 +01:00
raise BuildException ( " Failed to apply patch %s " % patch_path )
2014-01-27 15:59:40 +01:00
# Get required source libraries
2014-01-23 10:29:04 +01:00
srclibpaths = [ ]
2015-11-28 17:55:27 +01:00
if build . srclibs :
2014-01-27 15:59:40 +01:00
logging . info ( " Collecting source libraries " )
2015-11-28 17:55:27 +01:00
for lib in build . srclibs :
common: never pass None to replace_config_vars
This fixes fdroid build com.umang.dashnotifier:18:
ERROR: Could not build app com.umang.dashnotifier due to unknown error: Traceback (most recent call last):
File "/home/mvdan/git/fsr/fdroidserver/build.py", line 1121, in main
options.onserver, options.refresh):
File "/home/mvdan/git/fsr/fdroidserver/build.py", line 950, in trybuild
build_local(app, build, vcs, build_dir, output_dir, srclib_dir, extlib_dir, tmp_dir, force, onserver, refresh)
File "/home/mvdan/git/fsr/fdroidserver/build.py", line 480, in build_local
extlib_dir, onserver, refresh)
File "/home/mvdan/git/fsr/fdroidserver/common.py", line 1335, in prepare_source
srclibpaths.append(getsrclib(lib, srclib_dir, build, preponly=onserver, refresh=refresh))
File "/home/mvdan/git/fsr/fdroidserver/common.py", line 1258, in getsrclib
cmd = replace_config_vars(srclib["Prepare"], None)
File "/home/mvdan/git/fsr/fdroidserver/common.py", line 1822, in replace_config_vars
cmd = cmd.replace('$$NDK$$', build.ndk_path())
AttributeError: 'NoneType' object has no attribute 'ndk_path'
2016-04-02 14:43:49 +02:00
srclibpaths . append ( getsrclib ( lib , srclib_dir , build , preponly = onserver ,
refresh = refresh , build = build ) )
2014-01-23 10:29:04 +01:00
for name , number , libpath in srclibpaths :
place_srclib ( root_dir , int ( number ) if number else None , libpath )
basesrclib = vcs . getsrclib ( )
# If one was used for the main source, add that too.
if basesrclib :
srclibpaths . append ( basesrclib )
2014-01-27 15:59:40 +01:00
# Update the local.properties file
2014-05-02 04:24:48 +02:00
localprops = [ os . path . join ( build_dir , ' local.properties ' ) ]
2015-11-28 17:55:27 +01:00
if build . subdir :
parts = build . subdir . split ( os . sep )
2015-10-26 16:49:11 +01:00
cur = build_dir
for d in parts :
cur = os . path . join ( cur , d )
localprops + = [ os . path . join ( cur , ' local.properties ' ) ]
2014-01-21 10:14:37 +01:00
for path in localprops :
2014-09-25 18:11:56 +02:00
props = " "
if os . path . isfile ( path ) :
logging . info ( " Updating local.properties file at %s " % path )
2016-06-07 13:26:40 +02:00
with open ( path , ' r ' , encoding = ' iso-8859-1 ' ) as f :
2015-08-29 03:37:23 +02:00
props + = f . read ( )
2014-09-25 18:11:56 +02:00
props + = ' \n '
else :
logging . info ( " Creating local.properties file at %s " % path )
2014-01-21 10:14:37 +01:00
# Fix old-fashioned 'sdk-location' by copying
2014-01-27 15:59:40 +01:00
# from sdk.dir, if necessary
2015-11-28 17:55:27 +01:00
if build . oldsdkloc :
2014-01-21 10:14:37 +01:00
sdkloc = re . match ( r " .*^sdk.dir=( \ S+)$.* " , props ,
2014-05-06 19:50:52 +02:00
re . S | re . M ) . group ( 1 )
2014-01-21 10:14:37 +01:00
props + = " sdk-location= %s \n " % sdkloc
else :
props + = " sdk.dir= %s \n " % config [ ' sdk_path ' ]
2014-02-10 18:26:33 +01:00
props + = " sdk-location= %s \n " % config [ ' sdk_path ' ]
2015-11-28 17:55:27 +01:00
ndk_path = build . ndk_path ( )
2016-07-28 08:26:51 +02:00
# if for any reason the path isn't valid or the directory
# doesn't exist, some versions of Gradle will error with a
# cryptic message (even if the NDK is not even necessary).
2016-07-25 13:07:46 +02:00
# https://gitlab.com/fdroid/fdroidserver/issues/171
2016-07-28 08:26:51 +02:00
if ndk_path and os . path . exists ( ndk_path ) :
2014-01-27 15:59:40 +01:00
# Add ndk location
2015-11-28 17:55:27 +01:00
props + = " ndk.dir= %s \n " % ndk_path
props + = " ndk-location= %s \n " % ndk_path
2014-01-27 15:59:40 +01:00
# Add java.encoding if necessary
2015-11-28 17:55:27 +01:00
if build . encoding :
props + = " java.encoding= %s \n " % build . encoding
2016-06-07 13:26:40 +02:00
with open ( path , ' w ' , encoding = ' iso-8859-1 ' ) as f :
2015-08-29 03:37:23 +02:00
f . write ( props )
2014-01-21 10:14:37 +01:00
2014-09-13 13:04:24 +02:00
flavours = [ ]
2016-02-15 13:01:38 +01:00
if build . build_method ( ) == ' gradle ' :
2015-11-28 17:55:27 +01:00
flavours = build . gradle
2013-10-30 17:17:44 +01:00
2015-11-28 17:55:27 +01:00
if build . target :
n = build . target . split ( ' - ' ) [ 1 ]
2015-07-30 22:13:12 +02:00
regsub_file ( r ' compileSdkVersion[ =]+[0-9]+ ' ,
r ' compileSdkVersion %s ' % n ,
os . path . join ( root_dir , ' build.gradle ' ) )
2014-01-28 14:01:32 +01:00
2013-10-30 17:17:44 +01:00
# Remove forced debuggable flags
2014-02-11 17:56:36 +01:00
remove_debuggable_flags ( root_dir )
2013-10-30 17:17:44 +01:00
2014-01-27 15:59:40 +01:00
# Insert version code and number into the manifest if necessary
2015-11-28 17:55:27 +01:00
if build . forceversion :
2014-01-27 15:59:40 +01:00
logging . info ( " Changing the version name " )
2014-09-13 13:01:08 +02:00
for path in manifest_paths ( root_dir , flavours ) :
2013-10-20 13:43:15 +02:00
if not os . path . isfile ( path ) :
continue
2013-12-30 11:33:37 +01:00
if has_extension ( path , ' xml ' ) :
2015-07-30 22:13:12 +02:00
regsub_file ( r ' android:versionName= " [^ " ]* " ' ,
2016-11-23 17:52:04 +01:00
r ' android:versionName= " %s " ' % build . versionName ,
2015-07-30 22:13:12 +02:00
path )
2013-12-30 11:33:37 +01:00
elif has_extension ( path , ' gradle ' ) :
2015-07-30 22:13:12 +02:00
regsub_file ( r """ ( \ s*)versionName[ \ s ' " =]+.* """ ,
2016-11-23 17:52:04 +01:00
r """ \ 1versionName ' %s ' """ % build . versionName ,
2015-07-30 22:13:12 +02:00
path )
2015-11-28 17:55:27 +01:00
if build . forcevercode :
2014-01-27 15:59:40 +01:00
logging . info ( " Changing the version code " )
2014-09-13 13:01:08 +02:00
for path in manifest_paths ( root_dir , flavours ) :
2013-10-20 13:43:15 +02:00
if not os . path . isfile ( path ) :
continue
2013-12-30 11:33:37 +01:00
if has_extension ( path , ' xml ' ) :
2015-07-30 22:13:12 +02:00
regsub_file ( r ' android:versionCode= " [^ " ]* " ' ,
2016-11-23 17:52:04 +01:00
r ' android:versionCode= " %s " ' % build . versionCode ,
2015-07-30 22:13:12 +02:00
path )
2013-12-30 11:33:37 +01:00
elif has_extension ( path , ' gradle ' ) :
2015-07-30 22:13:12 +02:00
regsub_file ( r ' versionCode[ =]+[0-9]+ ' ,
2016-11-23 17:52:04 +01:00
r ' versionCode %s ' % build . versionCode ,
2015-07-30 22:13:12 +02:00
path )
2012-01-03 22:39:30 +01:00
2014-01-27 15:59:40 +01:00
# Delete unwanted files
2015-11-28 17:55:27 +01:00
if build . rm :
2017-09-15 23:20:29 +02:00
logging . info ( _ ( " Removing specified files " ) )
2015-11-28 17:55:27 +01:00
for part in getpaths ( build_dir , build . rm ) :
2014-02-17 14:59:55 +01:00
dest = os . path . join ( build_dir , part )
logging . info ( " Removing {0} " . format ( part ) )
if os . path . lexists ( dest ) :
2017-11-25 03:05:59 +01:00
# rmtree can only handle directories that are not symlinks, so catch anything else
if not os . path . isdir ( dest ) or os . path . islink ( dest ) :
os . remove ( dest )
2013-11-24 11:29:28 +01:00
else :
2017-11-25 03:05:59 +01:00
shutil . rmtree ( dest )
2013-11-24 11:29:28 +01:00
else :
2014-01-27 15:59:40 +01:00
logging . info ( " ...but it didn ' t exist " )
2012-01-03 22:39:30 +01:00
2013-11-24 10:39:12 +01:00
remove_signing_keys ( build_dir )
2014-01-27 15:59:40 +01:00
# Add required external libraries
2015-11-28 17:55:27 +01:00
if build . extlibs :
2014-01-27 15:59:40 +01:00
logging . info ( " Collecting prebuilt libraries " )
2012-01-27 23:10:08 +01:00
libsdir = os . path . join ( root_dir , ' libs ' )
if not os . path . exists ( libsdir ) :
os . mkdir ( libsdir )
2015-11-28 17:55:27 +01:00
for lib in build . extlibs :
2013-09-11 13:45:02 +02:00
lib = lib . strip ( )
2014-01-27 15:59:40 +01:00
logging . info ( " ...installing extlib {0} " . format ( lib ) )
2012-01-27 23:10:08 +01:00
libf = os . path . basename ( lib )
2013-11-12 21:14:16 +01:00
libsrc = os . path . join ( extlib_dir , lib )
if not os . path . exists ( libsrc ) :
raise BuildException ( " Missing extlib file {0} " . format ( libsrc ) )
shutil . copyfile ( libsrc , os . path . join ( libsdir , libf ) )
2012-01-27 23:10:08 +01:00
2014-01-27 15:59:40 +01:00
# Run a pre-build command if one is required
2015-11-28 17:55:27 +01:00
if build . prebuild :
2014-05-31 23:10:16 +02:00
logging . info ( " Running ' prebuild ' commands in %s " % root_dir )
2015-11-28 17:55:27 +01:00
cmd = replace_config_vars ( build . prebuild , build )
2013-08-26 23:52:04 +02:00
2014-01-27 15:59:40 +01:00
# Substitute source library paths into prebuild commands
2013-11-15 20:42:17 +01:00
for name , number , libpath in srclibpaths :
2012-01-28 01:05:30 +01:00
libpath = os . path . relpath ( libpath , root_dir )
2013-10-09 23:36:24 +02:00
cmd = cmd . replace ( ' $$ ' + name + ' $$ ' , libpath )
2013-11-08 20:44:27 +01:00
2018-01-23 22:42:32 +01:00
p = FDroidPopen ( [ ' bash ' , ' -x ' , ' -c ' , ' -- ' , cmd ] , cwd = root_dir )
2012-09-24 15:06:15 +02:00
if p . returncode != 0 :
2013-10-09 23:36:24 +02:00
raise BuildException ( " Error running prebuild command for %s : %s " %
2016-11-23 17:52:04 +01:00
( app . id , build . versionName ) , p . output )
2012-01-03 22:39:30 +01:00
2014-02-11 16:30:49 +01:00
# Generate (or update) the ant build file, build.xml...
convert metadata.Build to a subclass of dict
Like with the App class in the commit before, this makes it a lot
easier to work with this data when converting between the internal
formats and external formats like YAML, JSON, MsgPack, protobuf, etc.
The one unfortunate thing here is Build.update. It becomes
dict.update(), which is a method not an attribute.
build.get('update') or build['update'] could be used, but that would
be oddly inconsistent. So instead the field is renamed to
'androidupdate', except for in the .txt v0 metadata files. This better
describes what field does anyway, since it runs `android update`.
Build.update is only referenced in two places right next to each other
for the ant builds, so this change still seems worthwhile.
2016-11-29 13:26:32 +01:00
if build . build_method ( ) == ' ant ' and build . androidupdate != [ ' no ' ] :
2014-12-09 15:15:36 +01:00
parms = [ ' android ' , ' update ' , ' lib-project ' ]
lparms = [ ' android ' , ' update ' , ' project ' ]
2014-02-11 16:30:49 +01:00
2015-11-28 17:55:27 +01:00
if build . target :
parms + = [ ' -t ' , build . target ]
lparms + = [ ' -t ' , build . target ]
convert metadata.Build to a subclass of dict
Like with the App class in the commit before, this makes it a lot
easier to work with this data when converting between the internal
formats and external formats like YAML, JSON, MsgPack, protobuf, etc.
The one unfortunate thing here is Build.update. It becomes
dict.update(), which is a method not an attribute.
build.get('update') or build['update'] could be used, but that would
be oddly inconsistent. So instead the field is renamed to
'androidupdate', except for in the .txt v0 metadata files. This better
describes what field does anyway, since it runs `android update`.
Build.update is only referenced in two places right next to each other
for the ant builds, so this change still seems worthwhile.
2016-11-29 13:26:32 +01:00
if build . androidupdate :
update_dirs = build . androidupdate
2014-02-11 16:30:49 +01:00
else :
2015-11-28 17:55:27 +01:00
update_dirs = ant_subprojects ( root_dir ) + [ ' . ' ]
2014-02-11 16:30:49 +01:00
for d in update_dirs :
subdir = os . path . join ( root_dir , d )
if d == ' . ' :
2014-07-05 15:25:39 +02:00
logging . debug ( " Updating main project " )
2014-02-11 16:30:49 +01:00
cmd = parms + [ ' -p ' , d ]
else :
2014-07-05 15:25:39 +02:00
logging . debug ( " Updating subproject %s " % d )
2014-02-11 16:30:49 +01:00
cmd = lparms + [ ' -p ' , d ]
2014-12-09 15:15:36 +01:00
p = SdkToolsPopen ( cmd , cwd = root_dir )
2014-02-11 16:30:49 +01:00
# Check to see whether an error was returned without a proper exit
# code (this is the case for the 'no target set or target invalid'
# error)
2014-07-01 18:04:41 +02:00
if p . returncode != 0 or p . output . startswith ( " Error: " ) :
raise BuildException ( " Failed to update project at %s " % d , p . output )
2014-02-11 16:30:49 +01:00
# Clean update dirs via ant
2014-02-13 09:19:26 +01:00
if d != ' . ' :
2014-02-11 16:30:49 +01:00
logging . info ( " Cleaning subproject %s " % d )
2014-02-13 09:19:26 +01:00
p = FDroidPopen ( [ ' ant ' , ' clean ' ] , cwd = subdir )
2014-02-11 16:30:49 +01:00
2013-03-20 10:30:56 +01:00
return ( root_dir , srclibpaths )
2012-01-03 22:39:30 +01:00
2014-05-02 05:39:33 +02:00
2015-10-04 01:52:23 +02:00
def getpaths_map ( build_dir , globpaths ) :
2017-09-20 17:22:56 +02:00
""" Extend via globbing the paths from a field and return them as a map from original path to resulting paths """
2015-10-04 01:52:23 +02:00
paths = dict ( )
for p in globpaths :
2014-04-15 23:53:44 +02:00
p = p . strip ( )
full_path = os . path . join ( build_dir , p )
full_path = os . path . normpath ( full_path )
2015-10-04 01:52:23 +02:00
paths [ p ] = [ r [ len ( build_dir ) + 1 : ] for r in glob . glob ( full_path ) ]
2015-11-14 13:05:16 +01:00
if not paths [ p ] :
raise FDroidException ( " glob path ' %s ' did not match any files/dirs " % p )
2015-10-04 01:52:23 +02:00
return paths
def getpaths ( build_dir , globpaths ) :
2017-09-20 17:22:56 +02:00
""" Extend via globbing the paths from a field and return them as a set """
2015-10-04 01:52:23 +02:00
paths_map = getpaths_map ( build_dir , globpaths )
paths = set ( )
2016-01-04 17:02:28 +01:00
for k , v in paths_map . items ( ) :
2015-10-04 01:52:23 +02:00
for p in v :
paths . add ( p )
2014-04-15 23:53:44 +02:00
return paths
2014-05-02 05:39:33 +02:00
2015-08-29 03:28:39 +02:00
def natural_key ( s ) :
return [ int ( sp ) if sp . isdigit ( ) else sp for sp in re . split ( r ' ( \ d+) ' , s ) ]
2017-11-29 11:21:34 +01:00
def check_system_clock ( dt_obj , path ) :
""" Check if system clock is updated based on provided date
If an APK has files newer than the system time , suggest updating
the system clock . This is useful for offline systems , used for
signing , which do not have another source of clock sync info . It
has to be more than 24 hours newer because ZIP / APK files do not
store timezone info
"""
checkdt = dt_obj - timedelta ( 1 )
if datetime . today ( ) < checkdt :
logging . warning ( _ ( ' System clock is older than date in {path} ! ' ) . format ( path = path )
+ ' \n ' + _ ( ' Set clock to that time using: ' ) + ' \n '
+ ' sudo date -s " ' + str ( dt_obj ) + ' " ' )
2012-01-17 18:25:28 +01:00
class KnownApks :
2017-06-01 10:27:35 +02:00
""" permanent store of existing APKs with the date they were added
This is currently the only way to permanently store the " updated "
date of APKs .
"""
2012-01-17 18:25:28 +01:00
def __init__ ( self ) :
2017-09-19 10:57:29 +02:00
''' Load filename/date info about previously seen APKs
Since the appid and date strings both will never have spaces ,
this is parsed as a list from the end to allow the filename to
have any combo of spaces .
'''
2012-01-17 18:25:28 +01:00
self . path = os . path . join ( ' stats ' , ' known_apks.txt ' )
self . apks = { }
2015-04-03 00:05:22 +02:00
if os . path . isfile ( self . path ) :
2016-06-07 13:35:13 +02:00
with open ( self . path , ' r ' , encoding = ' utf8 ' ) as f :
2016-01-04 17:59:47 +01:00
for line in f :
t = line . rstrip ( ) . split ( ' ' )
if len ( t ) == 2 :
self . apks [ t [ 0 ] ] = ( t [ 1 ] , None )
else :
2017-09-19 10:57:29 +02:00
appid = t [ - 2 ]
date = datetime . strptime ( t [ - 1 ] , ' % Y- % m- %d ' )
filename = line [ 0 : line . rfind ( appid ) - 1 ]
self . apks [ filename ] = ( appid , date )
2017-11-29 11:21:34 +01:00
check_system_clock ( date , self . path )
2012-01-17 18:25:28 +01:00
self . changed = False
def writeifchanged ( self ) :
2015-08-29 03:26:23 +02:00
if not self . changed :
return
if not os . path . exists ( ' stats ' ) :
os . mkdir ( ' stats ' )
lst = [ ]
2016-01-04 17:02:28 +01:00
for apk , app in self . apks . items ( ) :
2015-08-29 03:26:23 +02:00
appid , added = app
line = apk + ' ' + appid
if added :
2016-11-28 21:10:58 +01:00
line + = ' ' + added . strftime ( ' % Y- % m- %d ' )
2015-08-29 03:26:23 +02:00
lst . append ( line )
2016-06-07 13:35:13 +02:00
with open ( self . path , ' w ' , encoding = ' utf8 ' ) as f :
2015-08-29 03:28:39 +02:00
for line in sorted ( lst , key = natural_key ) :
2012-01-19 15:14:14 +01:00
f . write ( line + ' \n ' )
2012-01-17 18:25:28 +01:00
2017-06-01 10:27:35 +02:00
def recordapk ( self , apkName , app , default_date = None ) :
2016-11-28 21:10:58 +01:00
'''
Record an apk ( if it ' s new, otherwise does nothing)
Returns the date it was added as a datetime instance
'''
2017-06-01 10:27:35 +02:00
if apkName not in self . apks :
2016-06-26 17:18:50 +02:00
if default_date is None :
2016-11-28 21:10:58 +01:00
default_date = datetime . utcnow ( )
2017-06-01 10:27:35 +02:00
self . apks [ apkName ] = ( app , default_date )
2012-01-17 18:25:28 +01:00
self . changed = True
2017-10-24 16:48:42 +02:00
_ignored , added = self . apks [ apkName ]
2012-07-12 22:48:59 +02:00
return added
2012-01-17 18:25:28 +01:00
def getapp ( self , apkname ) :
2017-09-20 17:22:56 +02:00
""" Look up information - given the ' apkname ' , returns (app id, date added/None).
Or returns None for an unknown apk .
"""
2012-01-17 18:25:28 +01:00
if apkname in self . apks :
return self . apks [ apkname ]
return None
2012-01-22 15:03:56 +01:00
def getlatest ( self , num ) :
2017-09-20 17:22:56 +02:00
""" Get the most recent ' num ' apps added to the repo, as a list of package ids with the most recent first """
2012-01-22 15:03:56 +01:00
apps = { }
2016-01-04 17:02:28 +01:00
for apk , app in self . apks . items ( ) :
2012-01-22 15:03:56 +01:00
appid , added = app
if added :
if appid in apps :
if apps [ appid ] > added :
apps [ appid ] = added
else :
apps [ appid ] = added
2016-01-04 17:02:28 +01:00
sortedapps = sorted ( apps . items ( ) , key = operator . itemgetter ( 1 ) ) [ - num : ]
2017-09-15 21:48:45 +02:00
lst = [ app for app , _ignored in sortedapps ]
2012-01-26 22:36:23 +01:00
lst . reverse ( )
2012-01-22 15:03:56 +01:00
return lst
2014-05-02 05:39:33 +02:00
2016-10-13 18:24:58 +02:00
def get_file_extension ( filename ) :
""" get the normalized file extension, can be blank string but never None """
2017-04-03 20:24:00 +02:00
if isinstance ( filename , bytes ) :
filename = filename . decode ( ' utf-8 ' )
2016-10-13 18:24:58 +02:00
return os . path . splitext ( filename ) [ 1 ] . lower ( ) [ 1 : ]
2017-04-13 14:18:48 +02:00
def get_apk_debuggable_aapt ( apkfile ) :
2014-12-31 16:34:11 +01:00
p = SdkToolsPopen ( [ ' aapt ' , ' dump ' , ' xmltree ' , apkfile , ' AndroidManifest.xml ' ] ,
output = False )
2013-04-15 14:04:13 +02:00
if p . returncode != 0 :
2017-09-15 23:20:29 +02:00
raise FDroidException ( _ ( " Failed to get APK manifest information " ) )
2014-07-01 18:04:41 +02:00
for line in p . output . splitlines ( ) :
2013-12-20 09:34:03 +01:00
if ' android:debuggable ' in line and not line . endswith ( ' 0x0 ' ) :
2013-04-15 14:04:13 +02:00
return True
return False
2017-04-13 14:18:48 +02:00
def get_apk_debuggable_androguard ( apkfile ) :
try :
from androguard . core . bytecodes . apk import APK
except ImportError :
2017-05-22 21:33:52 +02:00
raise FDroidException ( " androguard library is not installed and aapt not present " )
2017-04-13 14:18:48 +02:00
apkobject = APK ( apkfile )
if apkobject . is_valid_APK ( ) :
debuggable = apkobject . get_element ( " application " , " debuggable " )
if debuggable is not None :
return bool ( strtobool ( debuggable ) )
return False
2017-04-13 12:30:04 +02:00
def isApkAndDebuggable ( apkfile ) :
2017-04-13 14:18:48 +02:00
""" Returns True if the given file is an APK and is debuggable
: param apkfile : full path to the apk to check """
if get_file_extension ( apkfile ) != ' apk ' :
return False
2017-05-18 17:20:24 +02:00
if SdkToolsPopen ( [ ' aapt ' , ' version ' ] , output = False ) :
2017-04-13 14:18:48 +02:00
return get_apk_debuggable_aapt ( apkfile )
else :
return get_apk_debuggable_androguard ( apkfile )
2017-09-06 15:54:16 +02:00
def get_apk_id_aapt ( apkfile ) :
""" Extrat identification information from APK using aapt.
: param apkfile : path to an APK file .
: returns : triplet ( appid , version code , version name )
"""
r = re . compile ( " package: name= ' (?P<appid>.*) ' versionCode= ' (?P<vercode>.*) ' versionName= ' (?P<vername>.*) ' platformBuildVersionName= ' .* ' " )
p = SdkToolsPopen ( [ ' aapt ' , ' dump ' , ' badging ' , apkfile ] , output = False )
for line in p . output . splitlines ( ) :
m = r . match ( line )
if m :
return m . group ( ' appid ' ) , m . group ( ' vercode ' ) , m . group ( ' vername ' )
2017-09-15 23:20:29 +02:00
raise FDroidException ( _ ( " Reading packageName/versionCode/versionName failed, APK invalid: ' {apkfilename} ' " )
. format ( apkfilename = apkfile ) )
2017-09-06 15:54:16 +02:00
2017-11-30 21:10:41 +01:00
def get_minSdkVersion_aapt ( apkfile ) :
""" Extract the minimum supported Android SDK from an APK using aapt
: param apkfile : path to an APK file .
: returns : the integer representing the SDK version
"""
r = re . compile ( r " ^sdkVersion: ' ([0-9]+) ' " )
p = SdkToolsPopen ( [ ' aapt ' , ' dump ' , ' badging ' , apkfile ] , output = False )
for line in p . output . splitlines ( ) :
m = r . match ( line )
if m :
return int ( m . group ( 1 ) )
raise FDroidException ( _ ( ' Reading minSdkVersion failed: " {apkfilename} " ' )
. format ( apkfilename = apkfile ) )
2013-10-16 23:17:51 +02:00
class PopenResult :
2016-01-04 18:59:19 +01:00
def __init__ ( self ) :
self . returncode = None
self . output = None
2013-10-16 23:17:51 +02:00
2014-05-02 05:39:33 +02:00
2015-01-26 19:14:29 +01:00
def SdkToolsPopen ( commands , cwd = None , output = True ) :
2014-12-09 14:12:41 +01:00
cmd = commands [ 0 ]
if cmd not in config :
config [ cmd ] = find_sdk_tools_cmd ( commands [ 0 ] )
2015-10-24 17:02:53 +02:00
abscmd = config [ cmd ]
if abscmd is None :
2017-09-15 23:20:29 +02:00
raise FDroidException ( _ ( " Could not find ' {command} ' on your system " ) . format ( command = cmd ) )
2016-11-15 14:56:11 +01:00
if cmd == ' aapt ' :
test_aapt_version ( config [ ' aapt ' ] )
2015-10-24 18:37:57 +02:00
return FDroidPopen ( [ abscmd ] + commands [ 1 : ] ,
2015-01-26 19:14:29 +01:00
cwd = cwd , output = output )
2014-02-17 13:12:25 +01:00
2014-05-02 05:39:33 +02:00
2017-04-11 21:34:49 +02:00
def FDroidPopenBytes ( commands , cwd = None , envs = None , output = True , stderr_to_stdout = True ) :
2014-02-17 13:12:25 +01:00
"""
2016-01-04 18:59:19 +01:00
Run a command and capture the possibly huge output as bytes .
2014-02-17 13:12:25 +01:00
: param commands : command and argument list like in subprocess . Popen
: param cwd : optionally specifies a working directory
2017-04-11 21:34:49 +02:00
: param envs : a optional dictionary of environment variables and their values
2014-02-17 13:12:25 +01:00
: returns : A PopenResult .
"""
2014-07-01 21:03:50 +02:00
global env
2015-08-05 14:39:58 +02:00
if env is None :
set_FDroidPopen_env ( )
2014-07-01 21:03:50 +02:00
2017-04-11 21:34:49 +02:00
process_env = env . copy ( )
if envs is not None and len ( envs ) > 0 :
process_env . update ( envs )
2014-06-25 10:25:47 +02:00
if cwd :
cwd = os . path . normpath ( cwd )
logging . debug ( " Directory: %s " % cwd )
logging . debug ( " > %s " % ' ' . join ( commands ) )
2014-02-16 00:27:19 +01:00
2016-02-17 19:22:57 +01:00
stderr_param = subprocess . STDOUT if stderr_to_stdout else subprocess . PIPE
2013-10-16 23:17:51 +02:00
result = PopenResult ( )
2014-09-11 23:08:51 +02:00
p = None
try :
2017-04-11 21:34:49 +02:00
p = subprocess . Popen ( commands , cwd = cwd , shell = False , env = process_env ,
2018-02-05 13:34:42 +01:00
stdin = subprocess . DEVNULL , stdout = subprocess . PIPE ,
stderr = stderr_param )
2015-09-17 13:25:08 +02:00
except OSError as e :
2015-01-26 19:14:29 +01:00
raise BuildException ( " OSError while trying to execute " +
' ' . join ( commands ) + ' : ' + str ( e ) )
2013-12-30 17:04:16 +01:00
2017-12-02 13:24:13 +01:00
# TODO are these AsynchronousFileReader threads always exiting?
2016-02-17 19:22:57 +01:00
if not stderr_to_stdout and options . verbose :
stderr_queue = Queue ( )
stderr_reader = AsynchronousFileReader ( p . stderr , stderr_queue )
while not stderr_reader . eof ( ) :
while not stderr_queue . empty ( ) :
line = stderr_queue . get ( )
2016-01-04 18:59:19 +01:00
sys . stderr . buffer . write ( line )
2016-02-17 19:22:57 +01:00
sys . stderr . flush ( )
time . sleep ( 0.1 )
2015-09-17 13:33:19 +02:00
stdout_queue = Queue ( )
2013-10-16 23:17:51 +02:00
stdout_reader = AsynchronousFileReader ( p . stdout , stdout_queue )
2016-01-04 18:59:19 +01:00
buf = io . BytesIO ( )
2013-12-30 17:04:16 +01:00
2014-01-16 11:17:22 +01:00
# Check the queue for output (until there is no more to get)
while not stdout_reader . eof ( ) :
2013-10-16 23:17:51 +02:00
while not stdout_queue . empty ( ) :
line = stdout_queue . get ( )
2014-07-09 11:11:41 +02:00
if output and options . verbose :
2013-10-16 23:17:51 +02:00
# Output directly to console
2016-01-04 17:55:37 +01:00
sys . stderr . buffer . write ( line )
2014-07-05 14:10:26 +02:00
sys . stderr . flush ( )
2016-01-04 18:59:19 +01:00
buf . write ( line )
2013-10-16 23:17:51 +02:00
2013-12-19 17:58:10 +01:00
time . sleep ( 0.1 )
2013-10-16 23:17:51 +02:00
2014-08-10 12:28:19 +02:00
result . returncode = p . wait ( )
2016-01-04 18:59:19 +01:00
result . output = buf . getvalue ( )
buf . close ( )
2017-09-07 02:36:58 +02:00
# make sure all filestreams of the subprocess are closed
for streamvar in [ ' stdin ' , ' stdout ' , ' stderr ' ] :
if hasattr ( p , streamvar ) :
stream = getattr ( p , streamvar )
if stream :
stream . close ( )
2016-01-04 18:59:19 +01:00
return result
2017-04-11 21:34:49 +02:00
def FDroidPopen ( commands , cwd = None , envs = None , output = True , stderr_to_stdout = True ) :
2016-01-04 18:59:19 +01:00
"""
Run a command and capture the possibly huge output as a str .
: param commands : command and argument list like in subprocess . Popen
: param cwd : optionally specifies a working directory
2017-04-11 21:34:49 +02:00
: param envs : a optional dictionary of environment variables and their values
2016-01-04 18:59:19 +01:00
: returns : A PopenResult .
"""
2017-04-11 21:34:49 +02:00
result = FDroidPopenBytes ( commands , cwd , envs , output , stderr_to_stdout )
common: don't assume build script output is utf-8
FDroidPopen is used for running many commands - from git to gradle to
custom commands via flags like build=. When any of these invoke calls to
custom build systems or upstream programs/scripts, it's not safe to
assume that the output will be utf8.
Unfortunately, this currently leads to crashes and failed builds:
ERROR: Could not build app org.kiwix.kiwixmobile due to unknown error: Traceback (most recent call last):
File "/home/vagrant/fdroidserver/fdroidserver/build.py", line 1155, in main
options.onserver, options.refresh):
File "/home/vagrant/fdroidserver/fdroidserver/build.py", line 951, in trybuild
build_local(app, build, vcs, build_dir, output_dir, srclib_dir, extlib_dir, tmp_dir, force, onserver, refresh)
File "/home/vagrant/fdroidserver/fdroidserver/build.py", line 648, in build_local
p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=root_dir)
File "/home/vagrant/fdroidserver/fdroidserver/common.py", line 1786, in FDroidPopen
result.output = result.output.decode('utf-8')
UnicodeDecodeError: 'utf-8' codec can't decode byte 0xb7 in position 5397290: invalid start byte
One way to fix this would be to use one of the python libraries that
guess an encoding. But a much safer option is to tell the decode method
to ignore non-utf8 bytes, as opposed to crashing on them.
2017-03-12 20:36:44 +01:00
result . output = result . output . decode ( ' utf-8 ' , ' ignore ' )
2013-10-16 23:17:51 +02:00
return result
2013-10-27 23:43:38 +01:00
2014-05-02 05:39:33 +02:00
2015-09-24 03:52:59 +02:00
gradle_comment = re . compile ( r ' [ ]*// ' )
2015-10-25 19:10:49 +01:00
gradle_signing_configs = re . compile ( r ' ^[ \ t ]*signingConfigs[ \ t]* { [ \ t]*$ ' )
gradle_line_matches = [
re . compile ( r ' ^[ \ t ]*signingConfig [^ ]*$ ' ) ,
re . compile ( r ' .*android \ .signingConfigs \ .[^ { ]*$ ' ) ,
re . compile ( r ' .* \ .readLine \ (.* ' ) ,
]
2015-09-24 03:52:59 +02:00
2013-11-14 14:09:37 +01:00
def remove_signing_keys ( build_dir ) :
for root , dirs , files in os . walk ( build_dir ) :
if ' build.gradle ' in files :
path = os . path . join ( root , ' build.gradle ' )
2013-10-27 23:43:38 +01:00
use UTF8 as default instead of ASCII for .java .gradle pom.xml
.java .gradle and XML files all can use any encoding. Most code is ASCII,
but authors' names, etc. can easily be non-ASCII. UTF-8 is by far the most
common file encoding. While UTF-8 is the default encoding inside the code
in Python 3, it still has to deal with the real world, so the encoding
needs to be explicitly set when reading and writing files. So this switches
fdroidserver to expect UTF-8 instead of ASCII when parsing these files. For
now, this commit means that we only support UTF-8 encoded *.java, pom.xml
or *.gradle files. Ideally, the code would detect the encoding and use the
actual one, but that's a lot more work, and its something that will not
happen often. We can cross that bridge when we come to it.
One approach, which is taken in the commit when possible, is to keep the
data as `bytes`, in which case the encoding doesn't matter.
This also fixes this crash when parsing gradle and maven files with
non-ASCII chars:
ERROR: test_adapt_gradle (__main__.BuildTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/var/lib/jenkins/workspace/fdroidserver-eighthave/tests/build.TestCase", line 59, in test_adapt_gradle
fdroidserver.build.adapt_gradle(testsdir)
File "/var/lib/jenkins/workspace/fdroidserver-eighthave/fdroidserver/build.py", line 445, in adapt_gradle
path)
File "/var/lib/jenkins/workspace/fdroidserver-eighthave/fdroidserver/common.py", line 188, in regsub_file
text = f.read()
File "/usr/lib/python3.4/encodings/ascii.py", line 26, in decode
return codecs.ascii_decode(input, self.errors)[0]
UnicodeDecodeError: 'ascii' codec can't decode byte 0xe2 in position 9460: ordinal not in range(128)
2016-06-07 20:13:54 +02:00
with open ( path , " r " , encoding = ' utf8 ' ) as o :
2013-11-14 14:09:37 +01:00
lines = o . readlines ( )
2013-12-30 17:04:16 +01:00
2014-06-22 21:34:14 +02:00
changed = False
2013-11-14 14:09:37 +01:00
opened = 0
2015-01-05 12:59:33 +01:00
i = 0
use UTF8 as default instead of ASCII for .java .gradle pom.xml
.java .gradle and XML files all can use any encoding. Most code is ASCII,
but authors' names, etc. can easily be non-ASCII. UTF-8 is by far the most
common file encoding. While UTF-8 is the default encoding inside the code
in Python 3, it still has to deal with the real world, so the encoding
needs to be explicitly set when reading and writing files. So this switches
fdroidserver to expect UTF-8 instead of ASCII when parsing these files. For
now, this commit means that we only support UTF-8 encoded *.java, pom.xml
or *.gradle files. Ideally, the code would detect the encoding and use the
actual one, but that's a lot more work, and its something that will not
happen often. We can cross that bridge when we come to it.
One approach, which is taken in the commit when possible, is to keep the
data as `bytes`, in which case the encoding doesn't matter.
This also fixes this crash when parsing gradle and maven files with
non-ASCII chars:
ERROR: test_adapt_gradle (__main__.BuildTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/var/lib/jenkins/workspace/fdroidserver-eighthave/tests/build.TestCase", line 59, in test_adapt_gradle
fdroidserver.build.adapt_gradle(testsdir)
File "/var/lib/jenkins/workspace/fdroidserver-eighthave/fdroidserver/build.py", line 445, in adapt_gradle
path)
File "/var/lib/jenkins/workspace/fdroidserver-eighthave/fdroidserver/common.py", line 188, in regsub_file
text = f.read()
File "/usr/lib/python3.4/encodings/ascii.py", line 26, in decode
return codecs.ascii_decode(input, self.errors)[0]
UnicodeDecodeError: 'ascii' codec can't decode byte 0xe2 in position 9460: ordinal not in range(128)
2016-06-07 20:13:54 +02:00
with open ( path , " w " , encoding = ' utf8 ' ) as o :
2015-01-05 12:59:33 +01:00
while i < len ( lines ) :
line = lines [ i ]
i + = 1
while line . endswith ( ' \\ \n ' ) :
line = line . rstrip ( ' \\ \n ' ) + lines [ i ]
i + = 1
2015-09-24 03:52:59 +02:00
if gradle_comment . match ( line ) :
2015-09-22 20:52:16 +02:00
o . write ( line )
2014-02-28 10:54:14 +01:00
continue
if opened > 0 :
opened + = line . count ( ' { ' )
opened - = line . count ( ' } ' )
continue
2015-10-25 19:10:49 +01:00
if gradle_signing_configs . match ( line ) :
2014-06-22 21:34:14 +02:00
changed = True
2014-02-28 10:54:14 +01:00
opened + = 1
continue
2015-10-25 19:10:49 +01:00
if any ( s . match ( line ) for s in gradle_line_matches ) :
2014-06-22 21:34:14 +02:00
changed = True
2014-02-28 10:54:14 +01:00
continue
if opened == 0 :
2013-11-14 14:09:37 +01:00
o . write ( line )
2014-06-22 21:34:14 +02:00
if changed :
logging . info ( " Cleaned build.gradle of keysigning configs at %s " % path )
2013-11-20 19:08:59 +01:00
2014-03-13 10:31:22 +01:00
for propfile in [
' project.properties ' ,
' build.properties ' ,
' default.properties ' ,
2015-01-20 18:01:29 +01:00
' ant.properties ' , ] :
2013-11-14 14:09:37 +01:00
if propfile in files :
path = os . path . join ( root , propfile )
2013-11-15 12:42:39 +01:00
2016-06-07 13:26:40 +02:00
with open ( path , " r " , encoding = ' iso-8859-1 ' ) as o :
2013-11-16 12:54:35 +01:00
lines = o . readlines ( )
2014-06-22 21:34:14 +02:00
changed = False
2016-06-07 13:26:40 +02:00
with open ( path , " w " , encoding = ' iso-8859-1 ' ) as o :
2013-11-16 12:54:35 +01:00
for line in lines :
2014-06-22 21:34:14 +02:00
if any ( line . startswith ( s ) for s in ( ' key.store ' , ' key.alias ' ) ) :
changed = True
2014-03-13 10:31:22 +01:00
continue
2014-06-22 21:34:14 +02:00
2014-03-13 10:31:22 +01:00
o . write ( line )
2013-10-27 23:43:38 +01:00
2014-06-22 21:34:14 +02:00
if changed :
logging . info ( " Cleaned %s of keysigning configs at %s " % ( propfile , path ) )
2013-11-15 12:42:39 +01:00
2014-05-02 05:39:33 +02:00
2015-08-05 14:39:58 +02:00
def set_FDroidPopen_env ( build = None ) :
2015-08-05 14:42:41 +02:00
'''
set up the environment variables for the build environment
There is only a weak standard , the variables used by gradle , so also set
2016-06-23 17:11:49 +02:00
up the most commonly used environment variables for SDK and NDK . Also , if
there is no locale set , this will set the locale ( e . g . LANG ) to en_US . UTF - 8.
2015-08-05 14:42:41 +02:00
'''
2015-01-06 19:41:55 +01:00
global env , orig_path
2015-08-05 14:42:41 +02:00
2015-08-05 14:39:58 +02:00
if env is None :
env = os . environ
orig_path = env [ ' PATH ' ]
for n in [ ' ANDROID_HOME ' , ' ANDROID_SDK ' ] :
env [ n ] = config [ ' sdk_path ' ]
2015-08-05 14:42:41 +02:00
for k , v in config [ ' java_paths ' ] . items ( ) :
env [ ' JAVA %s _HOME ' % k ] = v
2015-01-06 19:41:55 +01:00
2016-06-23 17:11:49 +02:00
missinglocale = True
for k , v in env . items ( ) :
if k == ' LANG ' and v != ' C ' :
missinglocale = False
elif k == ' LC_ALL ' :
missinglocale = False
if missinglocale :
env [ ' LANG ' ] = ' en_US.UTF-8 '
2015-08-05 14:39:58 +02:00
if build is not None :
path = build . ndk_path ( )
paths = orig_path . split ( os . pathsep )
2016-06-16 22:36:31 +02:00
if path not in paths :
paths = [ path ] + paths
env [ ' PATH ' ] = os . pathsep . join ( paths )
2015-08-05 14:39:58 +02:00
for n in [ ' ANDROID_NDK ' , ' NDK ' , ' ANDROID_NDK_HOME ' ] :
env [ n ] = build . ndk_path ( )
2015-01-06 19:41:55 +01:00
2017-03-16 09:23:28 +01:00
def replace_build_vars ( cmd , build ) :
cmd = cmd . replace ( ' $$COMMIT$$ ' , build . commit )
cmd = cmd . replace ( ' $$VERSION$$ ' , build . versionName )
cmd = cmd . replace ( ' $$VERCODE$$ ' , build . versionCode )
return cmd
2015-05-10 13:53:06 +02:00
def replace_config_vars ( cmd , build ) :
2013-11-08 20:44:27 +01:00
cmd = cmd . replace ( ' $$SDK$$ ' , config [ ' sdk_path ' ] )
2016-03-30 23:54:37 +02:00
cmd = cmd . replace ( ' $$NDK$$ ' , build . ndk_path ( ) )
2013-11-08 20:44:27 +01:00
cmd = cmd . replace ( ' $$MVN3$$ ' , config [ ' mvn3 ' ] )
2015-05-10 13:53:06 +02:00
if build is not None :
2017-03-16 09:23:28 +01:00
cmd = replace_build_vars ( cmd , build )
2013-11-08 20:44:27 +01:00
return cmd
2014-05-02 05:39:33 +02:00
2013-11-15 20:42:17 +01:00
def place_srclib ( root_dir , number , libpath ) :
if not number :
return
relpath = os . path . relpath ( libpath , root_dir )
2013-11-17 23:20:58 +01:00
proppath = os . path . join ( root_dir , ' project.properties ' )
2014-01-27 22:34:34 +01:00
lines = [ ]
2015-04-17 00:58:20 +02:00
if os . path . isfile ( proppath ) :
2016-06-07 13:26:40 +02:00
with open ( proppath , " r " , encoding = ' iso-8859-1 ' ) as o :
2015-04-17 00:58:20 +02:00
lines = o . readlines ( )
2013-11-17 23:20:58 +01:00
2016-06-07 13:26:40 +02:00
with open ( proppath , " w " , encoding = ' iso-8859-1 ' ) as o :
2013-11-17 23:20:58 +01:00
placed = False
for line in lines :
if line . startswith ( ' android.library.reference. %d = ' % number ) :
2014-05-02 04:16:32 +02:00
o . write ( ' android.library.reference. %d = %s \n ' % ( number , relpath ) )
2013-11-17 23:20:58 +01:00
placed = True
else :
o . write ( line )
if not placed :
2014-05-02 04:16:32 +02:00
o . write ( ' android.library.reference. %d = %s \n ' % ( number , relpath ) )
2014-10-24 22:04:15 +02:00
2016-11-15 21:55:06 +01:00
2016-06-17 12:58:52 +02:00
apk_sigfile = re . compile ( r ' META-INF/[0-9A-Za-z]+ \ .(SF|RSA|DSA|EC) ' )
2015-10-25 19:10:49 +01:00
2014-10-24 22:04:15 +02:00
2017-09-23 09:36:22 +02:00
def signer_fingerprint_short ( sig ) :
""" Obtain shortened sha256 signing-key fingerprint for pkcs7 signature.
Extracts the first 7 hexadecimal digits of sha256 signing - key fingerprint
for a given pkcs7 signature .
2017-09-20 00:58:19 +02:00
: param sig : Contents of an APK signing certificate .
2017-09-23 09:36:22 +02:00
: returns : shortened signing - key fingerprint .
"""
return signer_fingerprint ( sig ) [ : 7 ]
2017-09-23 09:02:50 +02:00
def signer_fingerprint ( sig ) :
""" Obtain sha256 signing-key fingerprint for pkcs7 signature.
Extracts hexadecimal sha256 signing - key fingerprint string
for a given pkcs7 signature .
: param : Contents of an APK signature .
: returns : shortened signature fingerprint .
"""
cert_encoded = get_certificate ( sig )
return hashlib . sha256 ( cert_encoded ) . hexdigest ( )
def apk_signer_fingerprint ( apk_path ) :
""" Obtain sha256 signing-key fingerprint for APK.
Extracts hexadecimal sha256 signing - key fingerprint string
for a given APK .
: param apkpath : path to APK
: returns : signature fingerprint
"""
with zipfile . ZipFile ( apk_path , ' r ' ) as apk :
certs = [ n for n in apk . namelist ( ) if CERT_PATH_REGEX . match ( n ) ]
if len ( certs ) < 1 :
logging . error ( " Found no signing certificates on %s " % apk_path )
return None
if len ( certs ) > 1 :
logging . error ( " Found multiple signing certificates on %s " % apk_path )
return None
cert = apk . read ( certs [ 0 ] )
return signer_fingerprint ( cert )
2017-09-23 09:36:22 +02:00
def apk_signer_fingerprint_short ( apk_path ) :
""" Obtain shortened sha256 signing-key fingerprint for APK.
Extracts the first 7 hexadecimal digits of sha256 signing - key fingerprint
for a given pkcs7 APK .
: param apk_path : path to APK
: returns : shortened signing - key fingerprint
"""
return apk_signer_fingerprint ( apk_path ) [ : 7 ]
2017-09-06 15:54:16 +02:00
def metadata_get_sigdir ( appid , vercode = None ) :
""" Get signature directory for app """
if vercode :
return os . path . join ( ' metadata ' , appid , ' signatures ' , vercode )
else :
return os . path . join ( ' metadata ' , appid , ' signatures ' )
2017-09-14 16:46:43 +02:00
def metadata_find_developer_signature ( appid , vercode = None ) :
""" Tires to find the developer signature for given appid.
This picks the first signature file found in metadata an returns its
signature .
: returns : sha256 signing key fingerprint of the developer signing key .
None in case no signature can not be found . """
# fetch list of dirs for all versions of signatures
appversigdirs = [ ]
if vercode :
appversigdirs . append ( metadata_get_sigdir ( appid , vercode ) )
else :
appsigdir = metadata_get_sigdir ( appid )
if os . path . isdir ( appsigdir ) :
numre = re . compile ( ' [0-9]+ ' )
for ver in os . listdir ( appsigdir ) :
if numre . match ( ver ) :
appversigdir = os . path . join ( appsigdir , ver )
appversigdirs . append ( appversigdir )
for sigdir in appversigdirs :
sigs = glob . glob ( os . path . join ( sigdir , ' *.DSA ' ) ) + \
glob . glob ( os . path . join ( sigdir , ' *.EC ' ) ) + \
glob . glob ( os . path . join ( sigdir , ' *.RSA ' ) )
if len ( sigs ) > 1 :
raise FDroidException ( ' ambiguous signatures, please make sure there is only one signature in \' {} \' . (The signature has to be the App maintainers signature for version of the APK.) ' . format ( sigdir ) )
for sig in sigs :
with open ( sig , ' rb ' ) as f :
return signer_fingerprint ( f . read ( ) )
return None
2017-09-23 09:36:22 +02:00
def metadata_find_signing_files ( appid , vercode ) :
""" Gets a list of singed manifests and signatures.
2017-06-12 01:48:29 +02:00
: param appid : app id string
: param vercode : app version code
2017-09-23 09:36:22 +02:00
: returns : a list of triplets for each signing key with following paths :
( signature_file , singed_file , manifest_file )
"""
ret = [ ]
sigdir = metadata_get_sigdir ( appid , vercode )
sigs = glob . glob ( os . path . join ( sigdir , ' *.DSA ' ) ) + \
glob . glob ( os . path . join ( sigdir , ' *.EC ' ) ) + \
glob . glob ( os . path . join ( sigdir , ' *.RSA ' ) )
extre = re . compile ( ' ( \ .DSA| \ .EC| \ .RSA)$ ' )
for sig in sigs :
sf = extre . sub ( ' .SF ' , sig )
if os . path . isfile ( sf ) :
mf = os . path . join ( sigdir , ' MANIFEST.MF ' )
if os . path . isfile ( mf ) :
ret . append ( ( sig , sf , mf ) )
return ret
def metadata_find_developer_signing_files ( appid , vercode ) :
""" Get developer signature files for specified app from metadata.
: returns : A triplet of paths for signing files from metadata :
( signature_file , singed_file , manifest_file )
"""
allsigningfiles = metadata_find_signing_files ( appid , vercode )
if allsigningfiles and len ( allsigningfiles ) == 1 :
return allsigningfiles [ 0 ]
else :
return None
def apk_strip_signatures ( signed_apk , strip_manifest = False ) :
""" Removes signatures from APK.
: param signed_apk : path to apk file .
: param strip_manifest : when set to True also the manifest file will
be removed from the APK .
"""
with tempfile . TemporaryDirectory ( ) as tmpdir :
tmp_apk = os . path . join ( tmpdir , ' tmp.apk ' )
2017-12-01 14:08:42 +01:00
shutil . move ( signed_apk , tmp_apk )
2017-09-23 09:36:22 +02:00
with ZipFile ( tmp_apk , ' r ' ) as in_apk :
with ZipFile ( signed_apk , ' w ' ) as out_apk :
2017-10-20 22:07:28 +02:00
for info in in_apk . infolist ( ) :
if not apk_sigfile . match ( info . filename ) :
2017-09-23 09:36:22 +02:00
if strip_manifest :
2017-10-20 22:07:28 +02:00
if info . filename != ' META-INF/MANIFEST.MF ' :
buf = in_apk . read ( info . filename )
out_apk . writestr ( info , buf )
2017-09-23 09:36:22 +02:00
else :
2017-10-20 22:07:28 +02:00
buf = in_apk . read ( info . filename )
out_apk . writestr ( info , buf )
2017-09-23 09:36:22 +02:00
def apk_implant_signatures ( apkpath , signaturefile , signedfile , manifest ) :
2017-09-20 00:58:19 +02:00
""" Implats a signature from metadata into an APK.
2017-09-23 09:36:22 +02:00
Note : this changes there supplied APK in place . So copy it if you
need the original to be preserved .
: param apkpath : location of the apk
"""
# get list of available signature files in metadata
with tempfile . TemporaryDirectory ( ) as tmpdir :
apkwithnewsig = os . path . join ( tmpdir , ' newsig.apk ' )
with ZipFile ( apkpath , ' r ' ) as in_apk :
with ZipFile ( apkwithnewsig , ' w ' ) as out_apk :
for sig_file in [ signaturefile , signedfile , manifest ] :
2017-10-20 22:07:28 +02:00
with open ( sig_file , ' rb ' ) as fp :
buf = fp . read ( )
info = zipfile . ZipInfo ( ' META-INF/ ' + os . path . basename ( sig_file ) )
info . compress_type = zipfile . ZIP_DEFLATED
info . create_system = 0 # "Windows" aka "FAT", what Android SDK uses
out_apk . writestr ( info , buf )
for info in in_apk . infolist ( ) :
if not apk_sigfile . match ( info . filename ) :
if info . filename != ' META-INF/MANIFEST.MF ' :
buf = in_apk . read ( info . filename )
out_apk . writestr ( info , buf )
2017-09-23 09:36:22 +02:00
os . remove ( apkpath )
p = SdkToolsPopen ( [ ' zipalign ' , ' -v ' , ' 4 ' , apkwithnewsig , apkpath ] )
if p . returncode != 0 :
raise BuildException ( " Failed to align application " )
2017-09-06 15:54:16 +02:00
def apk_extract_signatures ( apkpath , outdir , manifest = True ) :
""" Extracts a signature files from APK and puts them into target directory.
: param apkpath : location of the apk
: param outdir : folder where the extracted signature files will be stored
: param manifest : ( optionally ) disable extracting manifest file
"""
with ZipFile ( apkpath , ' r ' ) as in_apk :
for f in in_apk . infolist ( ) :
if apk_sigfile . match ( f . filename ) or \
( manifest and f . filename == ' META-INF/MANIFEST.MF ' ) :
newpath = os . path . join ( outdir , os . path . basename ( f . filename ) )
with open ( newpath , ' wb ' ) as out_file :
out_file . write ( in_apk . read ( f . filename ) )
2017-11-30 21:10:41 +01:00
def sign_apk ( unsigned_path , signed_path , keyalias ) :
""" Sign and zipalign an unsigned APK, then save to a new file, deleting the unsigned
android - 18 ( 4.3 ) finally added support for reasonable hash
algorithms , like SHA - 256 , before then , the only options were MD5
and SHA1 : - / This aims to use SHA - 256 when the APK does not target
older Android versions , and is therefore safe to do so .
https : / / issuetracker . google . com / issues / 36956587
https : / / android - review . googlesource . com / c / platform / libcore / + / 44491
"""
if get_minSdkVersion_aapt ( unsigned_path ) < 18 :
signature_algorithm = [ ' -sigalg ' , ' SHA1withRSA ' , ' -digestalg ' , ' SHA1 ' ]
else :
2017-12-22 17:49:36 +01:00
signature_algorithm = [ ' -sigalg ' , ' SHA256withRSA ' , ' -digestalg ' , ' SHA-256 ' ]
2017-11-30 21:10:41 +01:00
p = FDroidPopen ( [ config [ ' jarsigner ' ] , ' -keystore ' , config [ ' keystore ' ] ,
' -storepass:env ' , ' FDROID_KEY_STORE_PASS ' ,
' -keypass:env ' , ' FDROID_KEY_PASS ' ]
+ signature_algorithm + [ unsigned_path , keyalias ] ,
envs = {
' FDROID_KEY_STORE_PASS ' : config [ ' keystorepass ' ] ,
' FDROID_KEY_PASS ' : config [ ' keypass ' ] , } )
if p . returncode != 0 :
raise BuildException ( _ ( " Failed to sign application " ) , p . output )
p = SdkToolsPopen ( [ ' zipalign ' , ' -v ' , ' 4 ' , unsigned_path , signed_path ] )
if p . returncode != 0 :
raise BuildException ( _ ( " Failed to zipalign application " ) )
os . remove ( unsigned_path )
2017-04-20 12:44:22 +02:00
def verify_apks ( signed_apk , unsigned_apk , tmp_dir ) :
2015-01-31 16:55:18 +01:00
""" Verify that two apks are the same
One of the inputs is signed , the other is unsigned . The signature metadata
is transferred from the signed to the unsigned apk , and then jarsigner is
used to verify that the signature from the signed apk is also varlid for
2016-12-19 16:54:32 +01:00
the unsigned one . If the APK given as unsigned actually does have a
signature , it will be stripped out and ignored .
2017-01-09 15:10:54 +01:00
There are two SHA1 git commit IDs that fdroidserver includes in the builds
it makes : fdroidserverid and buildserverid . Originally , these were inserted
into AndroidManifest . xml , but that makes the build not reproducible . So
instead they are included as separate files in the APK ' s META-INF/ folder.
If those files exist in the signed APK , they will be part of the signature
and need to also be included in the unsigned APK for it to validate .
2015-01-31 16:58:08 +01:00
: param signed_apk : Path to a signed apk file
: param unsigned_apk : Path to an unsigned apk file expected to match it
: param tmp_dir : Path to directory for temporary files
: returns : None if the verification is successful , otherwise a string
describing what went wrong .
2015-01-31 16:55:18 +01:00
"""
2016-12-19 16:54:32 +01:00
2017-06-12 01:48:29 +02:00
if not os . path . isfile ( signed_apk ) :
return ' can not verify: file does not exists: {} ' . format ( signed_apk )
if not os . path . isfile ( unsigned_apk ) :
return ' can not verify: file does not exists: {} ' . format ( unsigned_apk )
with ZipFile ( signed_apk , ' r ' ) as signed :
meta_inf_files = [ ' META-INF/MANIFEST.MF ' ]
for f in signed . namelist ( ) :
if apk_sigfile . match ( f ) \
or f in [ ' META-INF/fdroidserverid ' , ' META-INF/buildserverid ' ] :
meta_inf_files . append ( f )
if len ( meta_inf_files ) < 3 :
return " Signature files missing from {0} " . format ( signed_apk )
tmp_apk = os . path . join ( tmp_dir , ' sigcp_ ' + os . path . basename ( unsigned_apk ) )
with ZipFile ( unsigned_apk , ' r ' ) as unsigned :
# only read the signature from the signed APK, everything else from unsigned
with ZipFile ( tmp_apk , ' w ' ) as tmp :
for filename in meta_inf_files :
tmp . writestr ( signed . getinfo ( filename ) , signed . read ( filename ) )
for info in unsigned . infolist ( ) :
if info . filename in meta_inf_files :
logging . warning ( ' Ignoring %s from %s ' ,
info . filename , unsigned_apk )
continue
if info . filename in tmp . namelist ( ) :
return " duplicate filename found: " + info . filename
tmp . writestr ( info , unsigned . read ( info . filename ) )
2016-12-19 16:54:32 +01:00
2017-01-09 17:35:58 +01:00
verified = verify_apk_signature ( tmp_apk )
if not verified :
logging . info ( " ...NOT verified - {0} " . format ( tmp_apk ) )
2017-04-20 12:38:50 +02:00
return compare_apks ( signed_apk , tmp_apk , tmp_dir ,
2017-04-20 12:44:22 +02:00
os . path . dirname ( unsigned_apk ) )
2016-12-19 16:54:32 +01:00
2015-01-31 16:55:18 +01:00
logging . info ( " ...successfully verified " )
2015-01-31 16:58:08 +01:00
return None
2015-01-31 16:55:18 +01:00
2016-11-15 21:55:06 +01:00
2017-09-19 15:07:19 +02:00
def verify_jar_signature ( jar ) :
""" Verifies the signature of a given JAR file.
jarsigner is very shitty : unsigned JARs pass as " verified " ! So
this has to turn on - strict then check for result 4 , since this
does not expect the signature to be from a CA - signed certificate .
: raises : VerificationException ( ) if the JAR ' s signature could not be verified
"""
2017-12-07 17:32:14 +01:00
error = _ ( ' JAR signature failed to verify: {path} ' ) . format ( path = jar )
try :
output = subprocess . check_output ( [ config [ ' jarsigner ' ] , ' -strict ' , ' -verify ' , jar ] ,
stderr = subprocess . STDOUT )
raise VerificationException ( error + ' \n ' + output . decode ( ' utf-8 ' ) )
except subprocess . CalledProcessError as e :
if e . returncode == 4 :
logging . debug ( _ ( ' JAR signature verified: {path} ' ) . format ( path = jar ) )
else :
raise VerificationException ( error + ' \n ' + e . output . decode ( ' utf-8 ' ) )
2017-09-19 15:07:19 +02:00
def verify_apk_signature ( apk , min_sdk_version = None ) :
2017-01-09 17:35:58 +01:00
""" verify the signature on an APK
Try to use apksigner whenever possible since jarsigner is very
2017-09-19 15:07:19 +02:00
shitty : unsigned APKs pass as " verified " ! Warning , this does
not work on JARs with apksigner > = 0.7 ( build - tools 26.0 .1 )
2017-09-20 17:22:56 +02:00
: returns : boolean whether the APK was verified
2017-01-09 17:35:58 +01:00
"""
if set_command_in_config ( ' apksigner ' ) :
2017-03-29 23:33:09 +02:00
args = [ config [ ' apksigner ' ] , ' verify ' ]
2017-09-19 15:07:19 +02:00
if min_sdk_version :
args + = [ ' --min-sdk-version= ' + min_sdk_version ]
2017-12-07 17:32:14 +01:00
if options . verbose :
args + = [ ' --verbose ' ]
try :
output = subprocess . check_output ( args + [ apk ] )
if options . verbose :
logging . debug ( apk + ' : ' + output . decode ( ' utf-8 ' ) )
return True
except subprocess . CalledProcessError as e :
logging . error ( ' \n ' + apk + ' : ' + e . output . decode ( ' utf-8 ' ) )
2017-01-09 17:35:58 +01:00
else :
2017-12-07 17:32:14 +01:00
if not config . get ( ' jarsigner_warning_displayed ' ) :
config [ ' jarsigner_warning_displayed ' ] = True
logging . warning ( _ ( " Using Java ' s jarsigner, not recommended for verifying APKs! Use apksigner " ) )
2017-09-19 15:07:19 +02:00
try :
verify_jar_signature ( apk )
return True
2017-12-07 17:32:14 +01:00
except Exception as e :
logging . error ( e )
2017-09-19 15:07:19 +02:00
return False
2017-01-09 17:35:58 +01:00
2017-06-27 09:54:35 +02:00
def verify_old_apk_signature ( apk ) :
""" verify the signature on an archived APK, supporting deprecated algorithms
F - Droid aims to keep every single binary that it ever published . Therefore ,
it needs to be able to verify APK signatures that include deprecated / removed
algorithms . For example , jarsigner treats an MD5 signature as unsigned .
jarsigner passes unsigned APKs as " verified " ! So this has to turn
on - strict then check for result 4.
2017-09-20 17:22:56 +02:00
: returns : boolean whether the APK was verified
2017-06-27 09:54:35 +02:00
"""
_java_security = os . path . join ( os . getcwd ( ) , ' .java.security ' )
with open ( _java_security , ' w ' ) as fp :
fp . write ( ' jdk.jar.disabledAlgorithms=MD2, RSA keySize < 1024 ' )
2017-12-07 17:32:14 +01:00
try :
cmd = [
config [ ' jarsigner ' ] ,
' -J-Djava.security.properties= ' + _java_security ,
' -strict ' , ' -verify ' , apk
]
output = subprocess . check_output ( cmd , stderr = subprocess . STDOUT )
except subprocess . CalledProcessError as e :
if e . returncode != 4 :
output = e . output
else :
logging . debug ( _ ( ' JAR signature verified: {path} ' ) . format ( path = apk ) )
return True
logging . error ( _ ( ' Old APK signature failed to verify: {path} ' ) . format ( path = apk )
+ ' \n ' + output . decode ( ' utf-8 ' ) )
return False
2017-06-27 09:54:35 +02:00
2015-10-25 19:10:49 +01:00
apk_badchars = re . compile ( ''' [/ :; ' " ] ''' )
2015-01-31 16:55:18 +01:00
2017-04-20 12:44:22 +02:00
def compare_apks ( apk1 , apk2 , tmp_dir , log_dir = None ) :
2014-10-24 22:04:15 +02:00
""" Compare two apks
Returns None if the apk content is the same ( apart from the signing key ) ,
otherwise a string describing what ' s different, or what went wrong when
trying to do the comparison .
"""
2017-01-09 17:35:58 +01:00
if not log_dir :
log_dir = tmp_dir
2016-12-14 15:54:01 +01:00
absapk1 = os . path . abspath ( apk1 )
absapk2 = os . path . abspath ( apk2 )
2017-01-09 15:21:05 +01:00
if set_command_in_config ( ' diffoscope ' ) :
2017-01-09 17:35:58 +01:00
logfilename = os . path . join ( log_dir , os . path . basename ( absapk1 ) )
htmlfile = logfilename + ' .diffoscope.html '
textfile = logfilename + ' .diffoscope.txt '
2016-12-14 15:54:01 +01:00
if subprocess . call ( [ config [ ' diffoscope ' ] ,
' --max-report-size ' , ' 12345678 ' , ' --max-diff-block-lines ' , ' 100 ' ,
' --html ' , htmlfile , ' --text ' , textfile ,
absapk1 , absapk2 ] ) != 0 :
return ( " Failed to unpack " + apk1 )
2015-10-25 19:10:49 +01:00
apk1dir = os . path . join ( tmp_dir , apk_badchars . sub ( ' _ ' , apk1 [ 0 : - 4 ] ) ) # trim .apk
apk2dir = os . path . join ( tmp_dir , apk_badchars . sub ( ' _ ' , apk2 [ 0 : - 4 ] ) ) # trim .apk
2015-01-07 19:55:26 +01:00
for d in [ apk1dir , apk2dir ] :
2014-10-24 22:04:15 +02:00
if os . path . exists ( d ) :
shutil . rmtree ( d )
os . mkdir ( d )
2015-01-07 19:56:55 +01:00
os . mkdir ( os . path . join ( d , ' jar-xf ' ) )
2014-10-24 22:04:15 +02:00
if subprocess . call ( [ ' jar ' , ' xf ' ,
os . path . abspath ( apk1 ) ] ,
2015-01-07 19:56:55 +01:00
cwd = os . path . join ( apk1dir , ' jar-xf ' ) ) != 0 :
2014-10-24 22:04:15 +02:00
return ( " Failed to unpack " + apk1 )
if subprocess . call ( [ ' jar ' , ' xf ' ,
os . path . abspath ( apk2 ) ] ,
2015-01-07 19:56:55 +01:00
cwd = os . path . join ( apk2dir , ' jar-xf ' ) ) != 0 :
2014-10-24 22:04:15 +02:00
return ( " Failed to unpack " + apk2 )
2017-01-09 15:21:05 +01:00
if set_command_in_config ( ' apktool ' ) :
2015-01-07 20:08:15 +01:00
if subprocess . call ( [ config [ ' apktool ' ] , ' d ' , os . path . abspath ( apk1 ) , ' --output ' , ' apktool ' ] ,
cwd = apk1dir ) != 0 :
return ( " Failed to unpack " + apk1 )
if subprocess . call ( [ config [ ' apktool ' ] , ' d ' , os . path . abspath ( apk2 ) , ' --output ' , ' apktool ' ] ,
cwd = apk2dir ) != 0 :
return ( " Failed to unpack " + apk2 )
2015-01-07 19:55:26 +01:00
p = FDroidPopen ( [ ' diff ' , ' -r ' , apk1dir , apk2dir ] , output = False )
2014-10-24 22:04:15 +02:00
lines = p . output . splitlines ( )
if len ( lines ) != 1 or ' META-INF ' not in lines [ 0 ] :
2017-04-20 12:44:22 +02:00
if set_command_in_config ( ' meld ' ) :
p = FDroidPopen ( [ config [ ' meld ' ] , apk1dir , apk2dir ] , output = False )
2014-10-24 22:04:15 +02:00
return ( " Unexpected diff output - " + p . output )
2015-01-12 10:51:54 +01:00
# since everything verifies, delete the comparison to keep cruft down
shutil . rmtree ( apk1dir )
shutil . rmtree ( apk2dir )
2014-10-24 22:04:15 +02:00
# If we get here, it seems like they're the same!
return None
2015-01-07 20:08:15 +01:00
2017-01-09 15:21:05 +01:00
def set_command_in_config ( command ) :
''' Try to find specified command in the path, if it hasn ' t been
manually set in config . py . If found , it is added to the config
dict . The return value says whether the command is available .
'''
if command in config :
return True
else :
tmp = find_command ( command )
if tmp is not None :
config [ command ] = tmp
return True
return False
2015-01-07 20:08:15 +01:00
def find_command ( command ) :
''' find the full path of a command, or None if it can ' t be found in the PATH '''
def is_exe ( fpath ) :
return os . path . isfile ( fpath ) and os . access ( fpath , os . X_OK )
fpath , fname = os . path . split ( command )
if fpath :
if is_exe ( command ) :
return command
else :
for path in os . environ [ " PATH " ] . split ( os . pathsep ) :
path = path . strip ( ' " ' )
exe_file = os . path . join ( path , command )
if is_exe ( exe_file ) :
return exe_file
return None
2015-04-21 01:14:58 +02:00
def genpassword ( ) :
''' generate a random password for when generating keys '''
h = hashlib . sha256 ( )
h . update ( os . urandom ( 16 ) ) # salt
2016-01-04 17:32:58 +01:00
h . update ( socket . getfqdn ( ) . encode ( ' utf-8 ' ) )
passwd = base64 . b64encode ( h . digest ( ) ) . strip ( )
return passwd . decode ( ' utf-8 ' )
2015-04-21 01:14:58 +02:00
2015-04-21 02:27:38 +02:00
def genkeystore ( localconfig ) :
2017-03-22 19:44:35 +01:00
"""
Generate a new key with password provided in : param localconfig and add it to new keystore
: return : hexed public key , public key fingerprint
"""
2015-04-21 02:27:38 +02:00
logging . info ( ' Generating a new key in " ' + localconfig [ ' keystore ' ] + ' " ... ' )
keystoredir = os . path . dirname ( localconfig [ ' keystore ' ] )
if keystoredir is None or keystoredir == ' ' :
keystoredir = os . path . join ( os . getcwd ( ) , keystoredir )
if not os . path . exists ( keystoredir ) :
os . makedirs ( keystoredir , mode = 0o700 )
2017-04-11 21:34:49 +02:00
env_vars = {
' FDROID_KEY_STORE_PASS ' : localconfig [ ' keystorepass ' ] ,
' FDROID_KEY_PASS ' : localconfig [ ' keypass ' ] ,
}
2016-02-11 20:43:55 +01:00
p = FDroidPopen ( [ config [ ' keytool ' ] , ' -genkey ' ,
2015-04-21 02:27:38 +02:00
' -keystore ' , localconfig [ ' keystore ' ] ,
' -alias ' , localconfig [ ' repo_keyalias ' ] ,
2015-04-21 01:14:58 +02:00
' -keyalg ' , ' RSA ' , ' -keysize ' , ' 4096 ' ,
' -sigalg ' , ' SHA256withRSA ' ,
' -validity ' , ' 10000 ' ,
2017-04-11 21:34:49 +02:00
' -storepass:env ' , ' FDROID_KEY_STORE_PASS ' ,
' -keypass:env ' , ' FDROID_KEY_PASS ' ,
' -dname ' , localconfig [ ' keydname ' ] ] , envs = env_vars )
2015-04-21 01:14:58 +02:00
if p . returncode != 0 :
raise BuildException ( " Failed to generate key " , p . output )
2015-07-31 15:54:50 +02:00
os . chmod ( localconfig [ ' keystore ' ] , 0o0600 )
2017-03-22 19:44:35 +01:00
if not options . quiet :
# now show the lovely key that was just generated
p = FDroidPopen ( [ config [ ' keytool ' ] , ' -list ' , ' -v ' ,
' -keystore ' , localconfig [ ' keystore ' ] ,
' -alias ' , localconfig [ ' repo_keyalias ' ] ,
2017-04-11 21:34:49 +02:00
' -storepass:env ' , ' FDROID_KEY_STORE_PASS ' ] , envs = env_vars )
2017-03-22 19:44:35 +01:00
logging . info ( p . output . strip ( ) + ' \n \n ' )
# get the public key
p = FDroidPopenBytes ( [ config [ ' keytool ' ] , ' -exportcert ' ,
' -keystore ' , localconfig [ ' keystore ' ] ,
' -alias ' , localconfig [ ' repo_keyalias ' ] ,
2017-04-11 21:34:49 +02:00
' -storepass:env ' , ' FDROID_KEY_STORE_PASS ' ]
2017-03-22 19:44:35 +01:00
+ config [ ' smartcardoptions ' ] ,
2017-04-11 21:34:49 +02:00
envs = env_vars , output = False , stderr_to_stdout = False )
2017-03-22 19:44:35 +01:00
if p . returncode != 0 or len ( p . output ) < 20 :
raise BuildException ( " Failed to get public key " , p . output )
pubkey = p . output
fingerprint = get_cert_fingerprint ( pubkey )
return hexlify ( pubkey ) , fingerprint
def get_cert_fingerprint ( pubkey ) :
"""
Generate a certificate fingerprint the same way keytool does it
( but with slightly different formatting )
"""
digest = hashlib . sha256 ( pubkey ) . digest ( )
ret = [ ' ' . join ( " %02X " % b for b in bytearray ( digest ) ) ]
return " " . join ( ret )
2015-04-21 02:27:38 +02:00
2017-04-03 14:23:06 +02:00
def get_certificate ( certificate_file ) :
"""
Extracts a certificate from the given file .
: param certificate_file : file bytes ( as string ) representing the certificate
: return : A binary representation of the certificate ' s public key, or None in case of error
"""
content = decoder . decode ( certificate_file , asn1Spec = rfc2315 . ContentInfo ( ) ) [ 0 ]
if content . getComponentByName ( ' contentType ' ) != rfc2315 . signedData :
return None
content = decoder . decode ( content . getComponentByName ( ' content ' ) ,
asn1Spec = rfc2315 . SignedData ( ) ) [ 0 ]
try :
certificates = content . getComponentByName ( ' certificates ' )
cert = certificates [ 0 ] . getComponentByName ( ' certificate ' )
except PyAsn1Error :
logging . error ( " Certificates not found. " )
return None
return encoder . encode ( cert )
2017-09-19 16:03:11 +02:00
def load_stats_fdroid_signing_key_fingerprints ( ) :
""" Load list of signing-key fingerprints stored by fdroid publish from file.
: returns : list of dictionanryies containing the singing - key fingerprints .
"""
jar_file = os . path . join ( ' stats ' , ' publishsigkeys.jar ' )
if not os . path . isfile ( jar_file ) :
return { }
cmd = [ config [ ' jarsigner ' ] , ' -strict ' , ' -verify ' , jar_file ]
p = FDroidPopen ( cmd , output = False )
if p . returncode != 4 :
raise FDroidException ( " Signature validation of ' {} ' failed! "
" Please run publish again to rebuild this file. " . format ( jar_file ) )
jar_sigkey = apk_signer_fingerprint ( jar_file )
repo_key_sig = config . get ( ' repo_key_sha256 ' )
if repo_key_sig :
if jar_sigkey != repo_key_sig :
raise FDroidException ( " Signature key fingerprint of file ' {} ' does not match repo_key_sha256 in config.py (found fingerprint: ' {} ' ) " . format ( jar_file , jar_sigkey ) )
else :
logging . warning ( " repo_key_sha256 not in config.py, setting it to the signature key fingerprint of ' {} ' " . format ( jar_file ) )
config [ ' repo_key_sha256 ' ] = jar_sigkey
write_to_config ( config , ' repo_key_sha256 ' )
with zipfile . ZipFile ( jar_file , ' r ' ) as f :
return json . loads ( str ( f . read ( ' publishsigkeys.json ' ) , ' utf-8 ' ) )
2017-04-02 12:08:01 +02:00
def write_to_config ( thisconfig , key , value = None , config_file = None ) :
''' write a key/value to the local config.py
NOTE : only supports writing string variables .
: param thisconfig : config dictionary
: param key : variable name in config . py to be overwritten / added
: param value : optional value to be written , instead of fetched
from ' thisconfig ' dictionary .
'''
2015-04-21 02:27:38 +02:00
if value is None :
origkey = key + ' _orig '
value = thisconfig [ origkey ] if origkey in thisconfig else thisconfig [ key ]
2017-04-02 12:08:01 +02:00
cfg = config_file if config_file else ' config.py '
2017-09-14 02:13:49 +02:00
# load config file, create one if it doesn't exist
if not os . path . exists ( cfg ) :
create blank config.py using cross-platform technique
os.mknod() fails on OSX with:
Traceback (most recent call last):
File "/Users/travis/build/fdroidtravis/fdroidserver/tests/../fdroid", line 154, in <module>
main()
File "/Users/travis/build/fdroidtravis/fdroidserver/tests/../fdroid", line 130, in main
mod.main()
File "/Users/travis/build/fdroidtravis/fdroidserver/fdroidserver/update.py", line 1768, in main
common.write_to_config(config, 'repo_keyalias', config['repo_keyalias'])
File "/Users/travis/build/fdroidtravis/fdroidserver/fdroidserver/common.py", line 2328, in write_to_config
os.mknod(cfg)
PermissionError: [Errno 1] Operation not permitted
2017-09-17 21:54:21 +02:00
open ( cfg , ' a ' ) . close ( )
2017-09-14 02:13:49 +02:00
logging . info ( " Creating empty " + cfg )
2017-04-02 12:08:01 +02:00
with open ( cfg , ' r ' , encoding = " utf-8 " ) as f :
lines = f . readlines ( )
2015-04-21 04:27:31 +02:00
# make sure the file ends with a carraige return
2017-04-02 12:08:01 +02:00
if len ( lines ) > 0 :
if not lines [ - 1 ] . endswith ( ' \n ' ) :
lines [ - 1 ] + = ' \n '
# regex for finding and replacing python string variable
# definitions/initializations
pattern = re . compile ( ' ^[ \ s#]* ' + key + ' \ s*= \ s* " [^ " ]* " ' )
repl = key + ' = " ' + value + ' " '
pattern2 = re . compile ( ' ^[ \ s#]* ' + key + " \ s*= \ s* ' [^ ' ]* ' " )
repl2 = key + " = ' " + value + " ' "
# If we replaced this line once, we make sure won't be a
# second instance of this line for this key in the document.
didRepl = False
# edit config file
with open ( cfg , ' w ' , encoding = " utf-8 " ) as f :
for line in lines :
if pattern . match ( line ) or pattern2 . match ( line ) :
if not didRepl :
line = pattern . sub ( repl , line )
line = pattern2 . sub ( repl2 , line )
f . write ( line )
didRepl = True
else :
f . write ( line )
if not didRepl :
f . write ( ' \n ' )
f . write ( repl )
f . write ( ' \n ' )
2015-06-03 14:35:50 +02:00
2015-06-03 15:42:45 +02:00
def parse_xml ( path ) :
2015-06-03 14:35:50 +02:00
return XMLElementTree . parse ( path ) . getroot ( )
2015-06-03 19:40:43 +02:00
def string_is_integer ( string ) :
try :
int ( string )
return True
except ValueError :
return False
2015-07-24 06:42:21 +02:00
2017-11-20 15:54:00 +01:00
def local_rsync ( options , fromdir , todir ) :
''' Rsync method for local to local copying of things
This is an rsync wrapper with all the settings for safe use within
the various fdroidserver use cases . This uses stricter rsync
checking on all files since people using offline mode are already
prioritizing security above ease and speed .
'''
rsyncargs = [ ' rsync ' , ' --recursive ' , ' --safe-links ' , ' --times ' , ' --perms ' ,
' --one-file-system ' , ' --delete ' , ' --chmod=Da+rx,Fa-x,a+r,u+w ' ]
if not options . no_checksum :
rsyncargs . append ( ' --checksum ' )
if options . verbose :
rsyncargs + = [ ' --verbose ' ]
if options . quiet :
rsyncargs + = [ ' --quiet ' ]
logging . debug ( ' ' . join ( rsyncargs + [ fromdir , todir ] ) )
if subprocess . call ( rsyncargs + [ fromdir , todir ] ) != 0 :
raise FDroidException ( )
2015-08-20 17:40:18 +02:00
def get_per_app_repos ( ) :
''' per-app repos are dirs named with the packageName of a single app '''
# Android packageNames are Java packages, they may contain uppercase or
# lowercase letters ('A' through 'Z'), numbers, and underscores
# ('_'). However, individual package name parts may only start with
# letters. https://developer.android.com/guide/topics/manifest/manifest-element.html#package
p = re . compile ( ' ^([a-zA-Z][a-zA-Z0-9_]*( \\ .[a-zA-Z][a-zA-Z0-9_]*)*)?$ ' )
repos = [ ]
for root , dirs , files in os . walk ( os . getcwd ( ) ) :
for d in dirs :
2015-09-16 22:44:41 +02:00
print ( ' checking ' , root , ' for ' , d )
2015-08-20 17:40:18 +02:00
if d in ( ' archive ' , ' metadata ' , ' repo ' , ' srclibs ' , ' tmp ' ) :
# standard parts of an fdroid repo, so never packageNames
continue
elif p . match ( d ) \
and os . path . exists ( os . path . join ( d , ' fdroid ' , ' repo ' , ' index.jar ' ) ) :
repos . append ( d )
break
return repos
2016-11-03 10:26:38 +01:00
def is_repo_file ( filename ) :
''' Whether the file in a repo is a build product to be delivered to users '''
2017-04-03 20:24:00 +02:00
if isinstance ( filename , str ) :
filename = filename . encode ( ' utf-8 ' , errors = " surrogateescape " )
2016-11-03 10:26:38 +01:00
return os . path . isfile ( filename ) \
2017-04-03 20:24:00 +02:00
and not filename . endswith ( b ' .asc ' ) \
and not filename . endswith ( b ' .sig ' ) \
2016-11-03 10:26:38 +01:00
and os . path . basename ( filename ) not in [
2017-04-03 20:24:00 +02:00
b ' index.jar ' ,
b ' index_unsigned.jar ' ,
b ' index.xml ' ,
b ' index.html ' ,
b ' index-v1.jar ' ,
b ' index-v1.json ' ,
b ' categories.txt ' ,
2016-11-03 10:26:38 +01:00
]
2017-11-17 15:48:45 +01:00
def get_examples_dir ( ) :
''' Return the dir where the fdroidserver example files are available '''
examplesdir = None
tmp = os . path . dirname ( sys . argv [ 0 ] )
if os . path . basename ( tmp ) == ' bin ' :
egg_links = glob . glob ( os . path . join ( tmp , ' .. ' ,
' local/lib/python3.*/site-packages/fdroidserver.egg-link ' ) )
if egg_links :
# installed from local git repo
examplesdir = os . path . join ( open ( egg_links [ 0 ] ) . readline ( ) . rstrip ( ) , ' examples ' )
else :
# try .egg layout
examplesdir = os . path . dirname ( os . path . dirname ( __file__ ) ) + ' /share/doc/fdroidserver/examples '
if not os . path . exists ( examplesdir ) : # use UNIX layout
examplesdir = os . path . dirname ( tmp ) + ' /share/doc/fdroidserver/examples '
else :
# we're running straight out of the git repo
prefix = os . path . normpath ( os . path . join ( os . path . dirname ( __file__ ) , ' .. ' ) )
examplesdir = prefix + ' /examples '
return examplesdir
2018-01-17 14:39:54 +01:00
2018-01-17 15:18:05 +01:00
def get_wiki_timestamp ( timestamp = None ) :
2018-01-17 14:39:54 +01:00
""" Return current time in the standard format for posting to the wiki """
2018-01-17 15:18:05 +01:00
if timestamp is None :
timestamp = time . gmtime ( )
return time . strftime ( " % Y- % m- %d % H: % M: % SZ " , timestamp )
2018-01-17 17:13:12 +01:00
def get_android_tools_versions ( ndk_path = None ) :
''' get a list of the versions of all installed Android SDK/NDK components '''
global config
sdk_path = config [ ' sdk_path ' ]
if sdk_path [ - 1 ] != ' / ' :
sdk_path + = ' / '
components = [ ]
if ndk_path :
ndk_release_txt = os . path . join ( ndk_path , ' RELEASE.TXT ' )
if os . path . isfile ( ndk_release_txt ) :
with open ( ndk_release_txt , ' r ' ) as fp :
components . append ( ( os . path . basename ( ndk_path ) , fp . read ( ) [ : - 1 ] ) )
pattern = re . compile ( ' ^Pkg.Revision=(.+) ' , re . MULTILINE )
for root , dirs , files in os . walk ( sdk_path ) :
if ' source.properties ' in files :
source_properties = os . path . join ( root , ' source.properties ' )
with open ( source_properties , ' r ' ) as fp :
m = pattern . search ( fp . read ( ) )
if m :
components . append ( ( root [ len ( sdk_path ) : ] , m . group ( 1 ) ) )
return components
def get_android_tools_version_log ( ndk_path = None ) :
''' get a list of the versions of all installed Android SDK/NDK components '''
log = ' == Installed Android Tools == \n \n '
components = get_android_tools_versions ( ndk_path )
for name , version in sorted ( components ) :
log + = ' * ' + name + ' ( ' + version + ' ) \n '
return log