import socket
import base64
import zipfile
+import tempfile
+import json
import xml.etree.ElementTree as XMLElementTree
from binascii import hexlify
from queue import Queue
from zipfile import ZipFile
+from pyasn1.codec.der import decoder, encoder
+from pyasn1_modules import rfc2315
+from pyasn1.error import PyAsn1Error
+
+from distutils.util import strtobool
+
import fdroidserver.metadata
+from fdroidserver import _
+from fdroidserver.exception import FDroidException, VCSException, BuildException, VerificationException
from .asynchronousfilereader import AsynchronousFileReader
+# A signature block file with a .DSA, .RSA, or .EC extension
+CERT_PATH_REGEX = re.compile(r'^META-INF/.*\.(DSA|EC|RSA)$')
+APK_NAME_REGEX = re.compile(r'^([a-zA-Z][\w.]*)_(-?[0-9]+)_?([0-9a-f]{7})?\.apk')
+STANDARD_FILE_NAME_REGEX = re.compile(r'^(\w[\w.]*)_(-?[0-9]+)\.\w+')
+
XMLElementTree.register_namespace('android', 'http://schemas.android.com/apk/res/android')
config = None
'r11c': None,
'r12b': "$ANDROID_NDK",
'r13b': None,
- 'r14': None,
+ 'r14b': None,
+ 'r15c': None,
},
'qt_sdk_path': None,
'build_tools': "25.0.2",
'gradle': 'gradle',
'accepted_formats': ['txt', 'yml'],
'sync_from_local_copy_dir': False,
+ 'allow_disabled_algorithms': False,
'per_app_repos': False,
'make_current_version_link': True,
'current_version_name_source': 'Name',
'keystore': 'keystore.jks',
'smartcardoptions': [],
'char_limits': {
- 'Summary': 80,
- 'Description': 4000,
+ 'author': 256,
+ 'name': 30,
+ 'summary': 80,
+ 'description': 4000,
+ 'video': 256,
+ 'whatsNew': 500,
},
'keyaliases': {},
'repo_url': "https://MyFirstFDroidRepo.org/fdroid/repo",
def setup_global_opts(parser):
parser.add_argument("-v", "--verbose", action="store_true", default=False,
- help="Spew out even more information than normal")
+ help=_("Spew out even more information than normal"))
parser.add_argument("-q", "--quiet", action="store_true", default=False,
- help="Restrict output to warnings and errors")
+ help=_("Restrict output to warnings and errors"))
def fill_config_defaults(thisconfig):
config = {}
if os.path.isfile(config_file):
- logging.debug("Reading %s" % config_file)
+ logging.debug(_("Reading '{config_file}'").format(config_file=config_file))
with io.open(config_file, "rb") as f:
code = compile(f.read(), config_file, 'exec')
exec(code, None, config)
- elif len(get_local_metadata_files()) == 0:
- logging.critical("Missing config file - is this a repo directory?")
- sys.exit(2)
+ else:
+ logging.warning(_("No 'config.py' found, using defaults."))
for k in ('mirrors', 'install_list', 'uninstall_list', 'serverwebroot', 'servergitroot'):
if k in config:
if not type(config[k]) in (str, list, tuple):
- logging.warn('"' + k + '" will be in random order!'
- + ' Use () or [] brackets if order is important!')
+ logging.warning(
+ _("'{field}' will be in random order! Use () or [] brackets if order is important!")
+ .format(field=k))
# smartcardoptions must be a list since its command line args for Popen
if 'smartcardoptions' in config:
if any(k in config for k in ["keystore", "keystorepass", "keypass"]):
st = os.stat(config_file)
if st.st_mode & stat.S_IRWXG or st.st_mode & stat.S_IRWXO:
- logging.warn("unsafe permissions on {0} (should be 0600)!".format(config_file))
+ logging.warning(_("unsafe permissions on '{config_file}' (should be 0600)!")
+ .format(config_file=config_file))
fill_config_defaults(config)
- for k in ["keystorepass", "keypass"]:
- if k in config:
- write_password_file(k)
-
for k in ["repo_description", "archive_description"]:
if k in config:
config[k] = clean_description(config[k])
elif all(isinstance(item, str) for item in config['serverwebroot']):
roots = config['serverwebroot']
else:
- raise TypeError('only accepts strings, lists, and tuples')
+ raise TypeError(_('only accepts strings, lists, and tuples'))
rootlist = []
for rootstr in roots:
# since this is used with rsync, where trailing slashes have
elif all(isinstance(item, str) for item in config['servergitmirrors']):
roots = config['servergitmirrors']
else:
- raise TypeError('only accepts strings, lists, and tuples')
+ raise TypeError(_('only accepts strings, lists, and tuples'))
config['servergitmirrors'] = roots
return config
'''Check whether the version of aapt is new enough'''
output = subprocess.check_output([aapt, 'version'], universal_newlines=True)
if output is None or output == '':
- logging.error(aapt + ' failed to execute!')
+ logging.error(_("'{path}' failed to execute!").format(path=aapt))
else:
m = re.match(r'.*v([0-9]+)\.([0-9]+)[.-]?([0-9.-]*)', output)
if m:
bugfix = m.group(3)
# the Debian package has the version string like "v0.2-23.0.2"
if '.' not in bugfix and LooseVersion('.'.join((major, minor, bugfix))) < LooseVersion('0.2.2166767'):
- logging.warning(aapt + ' is too old, fdroid requires build-tools-23.0.0 or newer!')
+ logging.warning(_("'{aapt}' is too old, fdroid requires build-tools-23.0.0 or newer!")
+ .format(aapt=aapt))
else:
- logging.warning('Unknown version of aapt, might cause problems: ' + output)
+ logging.warning(_('Unknown version of aapt, might cause problems: ') + output)
def test_sdk_exists(thisconfig):
test_aapt_version(thisconfig['aapt'])
return True
else:
- logging.error("'sdk_path' not set in config.py!")
+ logging.error(_("'sdk_path' not set in 'config.py'!"))
return False
if thisconfig['sdk_path'] == default_config['sdk_path']:
- logging.error('No Android SDK found!')
- logging.error('You can use ANDROID_HOME to set the path to your SDK, i.e.:')
+ logging.error(_('No Android SDK found!'))
+ logging.error(_('You can use ANDROID_HOME to set the path to your SDK, i.e.:'))
logging.error('\texport ANDROID_HOME=/opt/android-sdk')
return False
if not os.path.exists(thisconfig['sdk_path']):
- logging.critical('Android SDK path "' + thisconfig['sdk_path'] + '" does not exist!')
+ logging.critical(_("Android SDK path '{path}' does not exist!")
+ .format(path=thisconfig['sdk_path']))
return False
if not os.path.isdir(thisconfig['sdk_path']):
- logging.critical('Android SDK path "' + thisconfig['sdk_path'] + '" is not a directory!')
+ logging.critical(_("Android SDK path '{path}' is not a directory!")
+ .format(path=thisconfig['sdk_path']))
return False
for d in ['build-tools', 'platform-tools', 'tools']:
if not os.path.isdir(os.path.join(thisconfig['sdk_path'], d)):
- logging.critical('Android SDK path "%s" does not contain "%s/"!' % (
- thisconfig['sdk_path'], d))
+ logging.critical(_("Android SDK '{path}' does not have '{dirname}' installed!")
+ .format(path=thisconfig['sdk_path'], dirname=d))
return False
return True
def ensure_build_tools_exists(thisconfig):
if not test_sdk_exists(thisconfig):
- sys.exit(3)
+ raise FDroidException(_("Android SDK not found!"))
build_tools = os.path.join(thisconfig['sdk_path'], 'build-tools')
versioned_build_tools = os.path.join(build_tools, thisconfig['build_tools'])
if not os.path.isdir(versioned_build_tools):
- logging.critical('Android Build Tools path "'
- + versioned_build_tools + '" does not exist!')
- sys.exit(3)
-
-
-def write_password_file(pwtype, password=None):
- '''
- writes out passwords to a protected file instead of passing passwords as
- command line argments
- '''
- filename = '.fdroid.' + pwtype + '.txt'
- fd = os.open(filename, os.O_CREAT | os.O_TRUNC | os.O_WRONLY, 0o600)
- if password is None:
- os.write(fd, config[pwtype].encode('utf-8'))
- else:
- os.write(fd, password.encode('utf-8'))
- os.close(fd)
- config[pwtype + 'file'] = filename
-
-
-def signjar(jar):
- '''
- sign a JAR file with Java's jarsigner.
-
- This does use old hashing algorithms, i.e. SHA1, but that's not
- broken yet for file verification. This could be set to SHA256,
- but then Android < 4.3 would not be able to verify it.
- https://code.google.com/p/android/issues/detail?id=38321
- '''
- args = [config['jarsigner'], '-keystore', config['keystore'],
- '-storepass:file', config['keystorepassfile'],
- '-digestalg', 'SHA1', '-sigalg', 'SHA1withRSA',
- jar, config['repo_keyalias']]
- if config['keystore'] == 'NONE':
- args += config['smartcardoptions']
- else: # smardcards never use -keypass
- args += ['-keypass:file', config['keypassfile']]
- p = FDroidPopen(args)
- if p.returncode != 0:
- logging.critical("Failed to sign %s!" % jar)
- sys.exit(1)
-
-
-def sign_index_v1(repodir, json_name):
- """
- sign index-v1.json to make index-v1.jar
-
- This is a bit different than index.jar: instead of their being index.xml
- and index_unsigned.jar, the presense of index-v1.json means that there is
- unsigned data. That file is then stuck into a jar and signed by the
- signing process. index-v1.json is never published to the repo. It is
- included in the binary transparency log, if that is enabled.
- """
- name, ext = get_extension(json_name)
- index_file = os.path.join(repodir, json_name)
- jar_file = os.path.join(repodir, name + '.jar')
- with zipfile.ZipFile(jar_file, 'w', zipfile.ZIP_DEFLATED) as jar:
- jar.write(index_file, json_name)
- signjar(jar_file)
+ raise FDroidException(
+ _("Android Build Tools path '{path}' does not exist!")
+ .format(path=versioned_build_tools))
def get_local_metadata_files():
def read_pkg_args(args, allow_vercodes=False):
"""
- Given the arguments in the form of multiple appid:[vc] strings, this returns
- a dictionary with the set of vercodes specified for each package.
+ :param args: arguments in the form of multiple appid:[vc] strings
+ :returns: a dictionary with the set of vercodes specified for each package
"""
vercodes = {}
if len(apps) != len(vercodes):
for p in vercodes:
if p not in allapps:
- logging.critical("No such package: %s" % p)
- raise FDroidException("Found invalid app ids in arguments")
+ logging.critical(_("No such package: %s") % p)
+ raise FDroidException(_("Found invalid appids in arguments"))
if not apps:
- raise FDroidException("No packages specified")
+ raise FDroidException(_("No packages specified"))
error = False
for appid, app in apps.items():
allvcs = [b.versionCode for b in app.builds]
for v in vercodes[appid]:
if v not in allvcs:
- logging.critical("No such vercode %s for app %s" % (v, appid))
+ logging.critical(_("No such versionCode {versionCode} for app {appid}")
+ .format(versionCode=v, appid=appid))
if error:
- raise FDroidException("Found invalid vercodes for some apps")
+ raise FDroidException(_("Found invalid versionCodes for some apps"))
return apps
try:
result = (m.group(1), m.group(2))
except AttributeError:
- raise FDroidException("Invalid name for published file: %s" % filename)
+ raise FDroidException(_("Invalid name for published file: %s") % filename)
return result
+apk_release_filename = re.compile('(?P<appid>[a-zA-Z0-9_\.]+)_(?P<vercode>[0-9]+)\.apk')
+apk_release_filename_with_sigfp = re.compile('(?P<appid>[a-zA-Z0-9_\.]+)_(?P<vercode>[0-9]+)_(?P<sigfp>[0-9a-f]{7})\.apk')
+
+
+def apk_parse_release_filename(apkname):
+ """Parses the name of an APK file according the F-Droids APK naming
+ scheme and returns the tokens.
+
+ WARNING: Returned values don't necessarily represent the APKs actual
+ properties, the are just paresed from the file name.
+
+ :returns: A triplet containing (appid, versionCode, signer), where appid
+ should be the package name, versionCode should be the integer
+ represion of the APKs version and signer should be the first 7 hex
+ digists of the sha256 signing key fingerprint which was used to sign
+ this APK.
+ """
+ m = apk_release_filename_with_sigfp.match(apkname)
+ if m:
+ return m.group('appid'), m.group('vercode'), m.group('sigfp')
+ m = apk_release_filename.match(apkname)
+ if m:
+ return m.group('appid'), m.group('vercode'), None
+ return None, None, None
+
+
def get_release_filename(app, build):
if build.output:
return "%s_%s.%s" % (app.id, build.versionCode, get_file_extension(build.output))
return "%s_%s.apk" % (app.id, build.versionCode)
+def get_toolsversion_logname(app, build):
+ return "%s_%s_toolsversion.log" % (app.id, build.versionCode)
+
+
def getsrcname(app, build):
return "%s_%s_src.tar.gz" % (app.id, build.versionCode)
raise VCSException("Authentication is not supported for git-svn")
self.username, remote = remote.split('@')
if ':' not in self.username:
- raise VCSException("Password required with username")
+ raise VCSException(_("Password required with username"))
self.username, self.password = self.username.split(':')
self.remote = remote
def repotype(self):
return None
- # Take the local repository to a clean version of the given revision, which
- # is specificed in the VCS's native format. Beforehand, the repository can
- # be dirty, or even non-existent. If the repository does already exist
- # locally, it will be updated from the origin, but only once in the
- # lifetime of the vcs object.
- # None is acceptable for 'rev' if you know you are cloning a clean copy of
- # the repo - otherwise it must specify a valid revision.
def gotorevision(self, rev, refresh=True):
+ """Take the local repository to a clean version of the given
+ revision, which is specificed in the VCS's native
+ format. Beforehand, the repository can be dirty, or even
+ non-existent. If the repository does already exist locally, it
+ will be updated from the origin, but only once in the lifetime
+ of the vcs object. None is acceptable for 'rev' if you know
+ you are cloning a clean copy of the repo - otherwise it must
+ specify a valid revision.
+ """
if self.clone_failed:
- raise VCSException("Downloading the repository already failed once, not trying again.")
+ raise VCSException(_("Downloading the repository already failed once, not trying again."))
# The .fdroidvcs-id file for a repo tells us what VCS type
# and remote that directory was created from, allowing us to drop it
if exc is not None:
raise exc
- # Derived classes need to implement this. It's called once basic checking
- # has been performend.
- def gotorevisionx(self, rev):
+ def gotorevisionx(self, rev): # pylint: disable=unused-argument
+ """Derived classes need to implement this.
+
+ It's called once basic checking has been performed.
+ """
raise VCSException("This VCS type doesn't define gotorevisionx")
# Initialise and update submodules
rtags.append(tag)
return rtags
- # Get a list of all the known tags, sorted from newest to oldest
def latesttags(self):
+ """Get a list of all the known tags, sorted from newest to oldest"""
raise VCSException('latesttags not supported for this vcs type')
- # Get current commit reference (hash, revision, etc)
def getref(self):
+ """Get current commit reference (hash, revision, etc)"""
raise VCSException('getref not supported for this vcs type')
- # Returns the srclib (name, path) used in setting up the current
- # revision, or None.
def getsrclib(self):
+ """Returns the srclib (name, path) used in setting up the current revision, or None."""
return self.srclib
def repotype(self):
return 'git'
- # If the local directory exists, but is somehow not a git repository, git
- # will traverse up the directory tree until it finds one that is (i.e.
- # fdroidserver) and then we'll proceed to destroy it! This is called as
- # a safety check.
def checkrepo(self):
+ """If the local directory exists, but is somehow not a git repository,
+ git will traverse up the directory tree until it finds one
+ that is (i.e. fdroidserver) and then we'll proceed to destroy
+ it! This is called as a safety check.
+
+ """
+
p = FDroidPopen(['git', 'rev-parse', '--show-toplevel'], cwd=self.local, output=False)
result = p.output.rstrip()
if not result.endswith(self.local):
p = FDroidPopen(['git', 'submodule', 'foreach', '--recursive',
'git', 'reset', '--hard'], cwd=self.local, output=False)
if p.returncode != 0:
- raise VCSException("Git reset failed", p.output)
+ raise VCSException(_("Git reset failed"), p.output)
# Remove untracked files now, in case they're tracked in the target
# revision (it happens!)
p = FDroidPopen(['git', 'submodule', 'foreach', '--recursive',
'git', 'clean', '-dffx'], cwd=self.local, output=False)
if p.returncode != 0:
- raise VCSException("Git clean failed", p.output)
+ raise VCSException(_("Git clean failed"), p.output)
if not self.refreshed:
# Get latest commits and tags from remote
p = FDroidPopen(['git', 'fetch', 'origin'], cwd=self.local)
if p.returncode != 0:
- raise VCSException("Git fetch failed", p.output)
+ raise VCSException(_("Git fetch failed"), p.output)
p = FDroidPopen(['git', 'fetch', '--prune', '--tags', 'origin'], cwd=self.local, output=False)
if p.returncode != 0:
- raise VCSException("Git fetch failed", p.output)
+ raise VCSException(_("Git fetch failed"), p.output)
# Recreate origin/HEAD as git clone would do it, in case it disappeared
p = FDroidPopen(['git', 'remote', 'set-head', 'origin', '--auto'], cwd=self.local, output=False)
if p.returncode != 0:
lines = p.output.splitlines()
if 'Multiple remote HEAD branches' not in lines[0]:
- raise VCSException("Git remote set-head failed", p.output)
+ raise VCSException(_("Git remote set-head failed"), p.output)
branch = lines[1].split(' ')[-1]
p2 = FDroidPopen(['git', 'remote', 'set-head', 'origin', branch], cwd=self.local, output=False)
if p2.returncode != 0:
- raise VCSException("Git remote set-head failed", p.output + '\n' + p2.output)
+ raise VCSException(_("Git remote set-head failed"), p.output + '\n' + p2.output)
self.refreshed = True
# origin/HEAD is the HEAD of the remote, e.g. the "default branch" on
# a github repo. Most of the time this is the same as origin/master.
rev = rev or 'origin/HEAD'
p = FDroidPopen(['git', 'checkout', '-f', rev], cwd=self.local, output=False)
if p.returncode != 0:
- raise VCSException("Git checkout of '%s' failed" % rev, p.output)
+ raise VCSException(_("Git checkout of '%s' failed") % rev, p.output)
# Get rid of any uncontrolled files left behind
p = FDroidPopen(['git', 'clean', '-dffx'], cwd=self.local, output=False)
if p.returncode != 0:
- raise VCSException("Git clean failed", p.output)
+ raise VCSException(_("Git clean failed"), p.output)
def initsubmodules(self):
self.checkrepo()
submfile = os.path.join(self.local, '.gitmodules')
if not os.path.isfile(submfile):
- raise VCSException("No git submodules available")
+ raise VCSException(_("No git submodules available"))
# fix submodules not accessible without an account and public key auth
with open(submfile, 'r') as f:
p = FDroidPopen(['git', 'submodule', 'sync'], cwd=self.local, output=False)
if p.returncode != 0:
- raise VCSException("Git submodule sync failed", p.output)
+ raise VCSException(_("Git submodule sync failed"), p.output)
p = FDroidPopen(['git', 'submodule', 'update', '--init', '--force', '--recursive'], cwd=self.local)
if p.returncode != 0:
- raise VCSException("Git submodule update failed", p.output)
+ raise VCSException(_("Git submodule update failed"), p.output)
def _gettags(self):
self.checkrepo()
def repotype(self):
return 'git-svn'
- # If the local directory exists, but is somehow not a git repository, git
- # will traverse up the directory tree until it finds one that is (i.e.
- # fdroidserver) and then we'll proceed to destory it! This is called as
- # a safety check.
def checkrepo(self):
+ """If the local directory exists, but is somehow not a git repository,
+ git will traverse up the directory tree until it finds one that
+ is (i.e. fdroidserver) and then we'll proceed to destory it!
+ This is called as a safety check.
+
+ """
p = FDroidPopen(['git', 'rev-parse', '--show-toplevel'], cwd=self.local, output=False)
result = p.output.rstrip()
if not result.endswith(self.local):
# Check out the git rev equivalent to the svn rev
p = FDroidPopen(['git', 'checkout', git_rev], cwd=self.local, output=False)
if p.returncode != 0:
- raise VCSException("Git checkout of '%s' failed" % rev, p.output)
+ raise VCSException(_("Git checkout of '%s' failed") % rev, p.output)
# Get rid of any uncontrolled files left behind
p = FDroidPopen(['git', 'clean', '-dffx'], cwd=self.local, output=False)
if p.returncode != 0:
- raise VCSException("Git clean failed", p.output)
+ raise VCSException(_("Git clean failed"), p.output)
def _gettags(self):
self.checkrepo()
os.path.join(app_dir, 'res'),
os.path.join(app_dir, 'src', 'main', 'res'),
]:
- for r, d, f in os.walk(res_dir):
- if os.path.basename(r) == 'values':
- xmlfiles += [os.path.join(r, x) for x in f if x.endswith('.xml')]
+ for root, dirs, files in os.walk(res_dir):
+ if os.path.basename(root) == 'values':
+ xmlfiles += [os.path.join(root, x) for x in files if x.endswith('.xml')]
name = string[len('@string/'):]
if not os.path.isfile(path):
continue
- logging.debug("Parsing manifest at {0}".format(path))
+ logging.debug(_("Parsing manifest at '{path}'").format(path=path))
version = None
vercode = None
package = None
if string_is_integer(a):
vercode = a
except Exception:
- logging.warning("Problem with xml at {0}".format(path))
+ logging.warning(_("Problem with xml at '{path}'").format(path=path))
# Remember package name, may be defined separately from version+vercode
if package is None:
max_version = "Unknown"
if max_package and not is_valid_package_name(max_package):
- raise FDroidException("Invalid package name {0}".format(max_package))
+ raise FDroidException(_("Invalid package name {0}").format(max_package))
return (max_version, max_vercode, max_package)
return re.match("[A-Za-z_][A-Za-z_0-9.]+$", name)
-class FDroidException(Exception):
-
- def __init__(self, value, detail=None):
- self.value = value
- self.detail = detail
-
- def shortened_detail(self):
- if len(self.detail) < 16000:
- return self.detail
- return '[...]\n' + self.detail[-16000:]
-
- def get_wikitext(self):
- ret = repr(self.value) + "\n"
- if self.detail:
- ret += "=detail=\n"
- ret += "<pre>\n" + self.shortened_detail() + "</pre>\n"
- return ret
-
- def __str__(self):
- ret = self.value
- if self.detail:
- ret += "\n==== detail begin ====\n%s\n==== detail end ====" % self.detail.strip()
- return ret
-
-
-class VCSException(FDroidException):
- pass
-
-
-class BuildException(FDroidException):
- pass
-
-
-# Get the specified source library.
-# Returns the path to it. Normally this is the path to be used when referencing
-# it, which may be a subdirectory of the actual project. If you want the base
-# directory of the project, pass 'basepath=True'.
def getsrclib(spec, srclib_dir, subdir=None, basepath=False,
raw=False, prepare=True, preponly=False, refresh=True,
build=None):
+ """Get the specified source library.
+ Returns the path to it. Normally this is the path to be used when
+ referencing it, which may be a subdirectory of the actual project. If
+ you want the base directory of the project, pass 'basepath=True'.
+
+ """
number = None
subdir = None
if raw:
gradle_version_regex = re.compile(r"[^/]*'com\.android\.tools\.build:gradle:([^\.]+\.[^\.]+).*'.*")
-# Prepare the source code for a particular build
-# 'vcs' - the appropriate vcs object for the application
-# 'app' - the application details from the metadata
-# 'build' - the build details from the metadata
-# 'build_dir' - the path to the build directory, usually
-# 'build/app.id'
-# 'srclib_dir' - the path to the source libraries directory, usually
-# 'build/srclib'
-# 'extlib_dir' - the path to the external libraries directory, usually
-# 'build/extlib'
-# Returns the (root, srclibpaths) where:
-# 'root' is the root directory, which may be the same as 'build_dir' or may
-# be a subdirectory of it.
-# 'srclibpaths' is information on the srclibs being used
def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver=False, refresh=True):
+ """ Prepare the source code for a particular build
+
+ :param vcs: the appropriate vcs object for the application
+ :param app: the application details from the metadata
+ :param build: the build details from the metadata
+ :param build_dir: the path to the build directory, usually 'build/app.id'
+ :param srclib_dir: the path to the source libraries directory, usually 'build/srclib'
+ :param extlib_dir: the path to the external libraries directory, usually 'build/extlib'
+
+ Returns the (root, srclibpaths) where:
+ :param root: is the root directory, which may be the same as 'build_dir' or may
+ be a subdirectory of it.
+ :param srclibpaths: is information on the srclibs being used
+ """
# Optionally, the actual app source can be in a subdirectory
if build.subdir:
# Initialise submodules if required
if build.submodules:
- logging.info("Initialising submodules")
+ logging.info(_("Initialising submodules"))
vcs.initsubmodules()
# Check that a subdir (if we're using one) exists. This has to happen
# Delete unwanted files
if build.rm:
- logging.info("Removing specified files")
+ logging.info(_("Removing specified files"))
for part in getpaths(build_dir, build.rm):
dest = os.path.join(build_dir, part)
logging.info("Removing {0}".format(part))
return (root_dir, srclibpaths)
-# Extend via globbing the paths from a field and return them as a map from
-# original path to resulting paths
def getpaths_map(build_dir, globpaths):
+ """Extend via globbing the paths from a field and return them as a map from original path to resulting paths"""
paths = dict()
for p in globpaths:
p = p.strip()
return paths
-# Extend via globbing the paths from a field and return them as a set
def getpaths(build_dir, globpaths):
+ """Extend via globbing the paths from a field and return them as a set"""
paths_map = getpaths_map(build_dir, globpaths)
paths = set()
for k, v in paths_map.items():
class KnownApks:
+ """permanent store of existing APKs with the date they were added
+
+ This is currently the only way to permanently store the "updated"
+ date of APKs.
+ """
def __init__(self):
+ '''Load filename/date info about previously seen APKs
+
+ Since the appid and date strings both will never have spaces,
+ this is parsed as a list from the end to allow the filename to
+ have any combo of spaces.
+ '''
+
self.path = os.path.join('stats', 'known_apks.txt')
self.apks = {}
if os.path.isfile(self.path):
if len(t) == 2:
self.apks[t[0]] = (t[1], None)
else:
- self.apks[t[0]] = (t[1], datetime.strptime(t[2], '%Y-%m-%d'))
+ appid = t[-2]
+ date = datetime.strptime(t[-1], '%Y-%m-%d')
+ filename = line[0:line.rfind(appid) - 1]
+ self.apks[filename] = (appid, date)
self.changed = False
def writeifchanged(self):
for line in sorted(lst, key=natural_key):
f.write(line + '\n')
- def recordapk(self, apk, app, default_date=None):
+ def recordapk(self, apkName, app, default_date=None):
'''
Record an apk (if it's new, otherwise does nothing)
Returns the date it was added as a datetime instance
'''
- if apk not in self.apks:
+ if apkName not in self.apks:
if default_date is None:
default_date = datetime.utcnow()
- self.apks[apk] = (app, default_date)
+ self.apks[apkName] = (app, default_date)
self.changed = True
- _, added = self.apks[apk]
+ _, added = self.apks[apkName]
return added
- # Look up information - given the 'apkname', returns (app id, date added/None).
- # Or returns None for an unknown apk.
def getapp(self, apkname):
+ """Look up information - given the 'apkname', returns (app id, date added/None).
+
+ Or returns None for an unknown apk.
+ """
if apkname in self.apks:
return self.apks[apkname]
return None
- # Get the most recent 'num' apps added to the repo, as a list of package ids
- # with the most recent first.
def getlatest(self, num):
+ """Get the most recent 'num' apps added to the repo, as a list of package ids with the most recent first"""
apps = {}
for apk, app in self.apks.items():
appid, added = app
else:
apps[appid] = added
sortedapps = sorted(apps.items(), key=operator.itemgetter(1))[-num:]
- lst = [app for app, _ in sortedapps]
+ lst = [app for app, _ignored in sortedapps]
lst.reverse()
return lst
def get_file_extension(filename):
"""get the normalized file extension, can be blank string but never None"""
-
+ if isinstance(filename, bytes):
+ filename = filename.decode('utf-8')
return os.path.splitext(filename)[1].lower()[1:]
-def isApkAndDebuggable(apkfile, config):
+def get_apk_debuggable_aapt(apkfile):
+ p = SdkToolsPopen(['aapt', 'dump', 'xmltree', apkfile, 'AndroidManifest.xml'],
+ output=False)
+ if p.returncode != 0:
+ raise FDroidException(_("Failed to get APK manifest information"))
+ for line in p.output.splitlines():
+ if 'android:debuggable' in line and not line.endswith('0x0'):
+ return True
+ return False
+
+
+def get_apk_debuggable_androguard(apkfile):
+ try:
+ from androguard.core.bytecodes.apk import APK
+ except ImportError:
+ raise FDroidException("androguard library is not installed and aapt not present")
+
+ apkobject = APK(apkfile)
+ if apkobject.is_valid_APK():
+ debuggable = apkobject.get_element("application", "debuggable")
+ if debuggable is not None:
+ return bool(strtobool(debuggable))
+ return False
+
+
+def isApkAndDebuggable(apkfile):
"""Returns True if the given file is an APK and is debuggable
:param apkfile: full path to the apk to check"""
if get_file_extension(apkfile) != 'apk':
return False
- p = SdkToolsPopen(['aapt', 'dump', 'xmltree', apkfile, 'AndroidManifest.xml'],
- output=False)
- if p.returncode != 0:
- logging.critical("Failed to get apk manifest information")
- sys.exit(1)
+ if SdkToolsPopen(['aapt', 'version'], output=False):
+ return get_apk_debuggable_aapt(apkfile)
+ else:
+ return get_apk_debuggable_androguard(apkfile)
+
+
+def get_apk_id_aapt(apkfile):
+ """Extrat identification information from APK using aapt.
+
+ :param apkfile: path to an APK file.
+ :returns: triplet (appid, version code, version name)
+ """
+ r = re.compile("package: name='(?P<appid>.*)' versionCode='(?P<vercode>.*)' versionName='(?P<vername>.*)' platformBuildVersionName='.*'")
+ p = SdkToolsPopen(['aapt', 'dump', 'badging', apkfile], output=False)
for line in p.output.splitlines():
- if 'android:debuggable' in line and not line.endswith('0x0'):
- return True
- return False
+ m = r.match(line)
+ if m:
+ return m.group('appid'), m.group('vercode'), m.group('vername')
+ raise FDroidException(_("Reading packageName/versionCode/versionName failed, APK invalid: '{apkfilename}'")
+ .format(apkfilename=apkfile))
class PopenResult:
config[cmd] = find_sdk_tools_cmd(commands[0])
abscmd = config[cmd]
if abscmd is None:
- logging.critical("Could not find '%s' on your system" % cmd)
- sys.exit(1)
+ raise FDroidException(_("Could not find '{command}' on your system").format(command=cmd))
if cmd == 'aapt':
test_aapt_version(config['aapt'])
return FDroidPopen([abscmd] + commands[1:],
cwd=cwd, output=output)
-def FDroidPopenBytes(commands, cwd=None, output=True, stderr_to_stdout=True):
+def FDroidPopenBytes(commands, cwd=None, envs=None, output=True, stderr_to_stdout=True):
"""
Run a command and capture the possibly huge output as bytes.
:param commands: command and argument list like in subprocess.Popen
:param cwd: optionally specifies a working directory
+ :param envs: a optional dictionary of environment variables and their values
:returns: A PopenResult.
"""
if env is None:
set_FDroidPopen_env()
+ process_env = env.copy()
+ if envs is not None and len(envs) > 0:
+ process_env.update(envs)
+
if cwd:
cwd = os.path.normpath(cwd)
logging.debug("Directory: %s" % cwd)
result = PopenResult()
p = None
try:
- p = subprocess.Popen(commands, cwd=cwd, shell=False, env=env,
+ p = subprocess.Popen(commands, cwd=cwd, shell=False, env=process_env,
stdout=subprocess.PIPE, stderr=stderr_param)
except OSError as e:
raise BuildException("OSError while trying to execute " +
result.returncode = p.wait()
result.output = buf.getvalue()
buf.close()
+ # make sure all filestreams of the subprocess are closed
+ for streamvar in ['stdin', 'stdout', 'stderr']:
+ if hasattr(p, streamvar):
+ stream = getattr(p, streamvar)
+ if stream:
+ stream.close()
return result
-def FDroidPopen(commands, cwd=None, output=True, stderr_to_stdout=True):
+def FDroidPopen(commands, cwd=None, envs=None, output=True, stderr_to_stdout=True):
"""
Run a command and capture the possibly huge output as a str.
:param commands: command and argument list like in subprocess.Popen
:param cwd: optionally specifies a working directory
+ :param envs: a optional dictionary of environment variables and their values
:returns: A PopenResult.
"""
- result = FDroidPopenBytes(commands, cwd, output, stderr_to_stdout)
+ result = FDroidPopenBytes(commands, cwd, envs, output, stderr_to_stdout)
result.output = result.output.decode('utf-8', 'ignore')
return result
apk_sigfile = re.compile(r'META-INF/[0-9A-Za-z]+\.(SF|RSA|DSA|EC)')
+def signer_fingerprint_short(sig):
+ """Obtain shortened sha256 signing-key fingerprint for pkcs7 signature.
+
+ Extracts the first 7 hexadecimal digits of sha256 signing-key fingerprint
+ for a given pkcs7 signature.
+
+ :param sig: Contents of an APK signing certificate.
+ :returns: shortened signing-key fingerprint.
+ """
+ return signer_fingerprint(sig)[:7]
+
+
+def signer_fingerprint(sig):
+ """Obtain sha256 signing-key fingerprint for pkcs7 signature.
+
+ Extracts hexadecimal sha256 signing-key fingerprint string
+ for a given pkcs7 signature.
+
+ :param: Contents of an APK signature.
+ :returns: shortened signature fingerprint.
+ """
+ cert_encoded = get_certificate(sig)
+ return hashlib.sha256(cert_encoded).hexdigest()
+
+
+def apk_signer_fingerprint(apk_path):
+ """Obtain sha256 signing-key fingerprint for APK.
+
+ Extracts hexadecimal sha256 signing-key fingerprint string
+ for a given APK.
+
+ :param apkpath: path to APK
+ :returns: signature fingerprint
+ """
+
+ with zipfile.ZipFile(apk_path, 'r') as apk:
+ certs = [n for n in apk.namelist() if CERT_PATH_REGEX.match(n)]
+
+ if len(certs) < 1:
+ logging.error("Found no signing certificates on %s" % apk_path)
+ return None
+ if len(certs) > 1:
+ logging.error("Found multiple signing certificates on %s" % apk_path)
+ return None
+
+ cert = apk.read(certs[0])
+ return signer_fingerprint(cert)
+
+
+def apk_signer_fingerprint_short(apk_path):
+ """Obtain shortened sha256 signing-key fingerprint for APK.
+
+ Extracts the first 7 hexadecimal digits of sha256 signing-key fingerprint
+ for a given pkcs7 APK.
+
+ :param apk_path: path to APK
+ :returns: shortened signing-key fingerprint
+ """
+ return apk_signer_fingerprint(apk_path)[:7]
+
+
+def metadata_get_sigdir(appid, vercode=None):
+ """Get signature directory for app"""
+ if vercode:
+ return os.path.join('metadata', appid, 'signatures', vercode)
+ else:
+ return os.path.join('metadata', appid, 'signatures')
+
+
+def metadata_find_developer_signature(appid, vercode=None):
+ """Tires to find the developer signature for given appid.
+
+ This picks the first signature file found in metadata an returns its
+ signature.
+
+ :returns: sha256 signing key fingerprint of the developer signing key.
+ None in case no signature can not be found."""
+
+ # fetch list of dirs for all versions of signatures
+ appversigdirs = []
+ if vercode:
+ appversigdirs.append(metadata_get_sigdir(appid, vercode))
+ else:
+ appsigdir = metadata_get_sigdir(appid)
+ if os.path.isdir(appsigdir):
+ numre = re.compile('[0-9]+')
+ for ver in os.listdir(appsigdir):
+ if numre.match(ver):
+ appversigdir = os.path.join(appsigdir, ver)
+ appversigdirs.append(appversigdir)
+
+ for sigdir in appversigdirs:
+ sigs = glob.glob(os.path.join(sigdir, '*.DSA')) + \
+ glob.glob(os.path.join(sigdir, '*.EC')) + \
+ glob.glob(os.path.join(sigdir, '*.RSA'))
+ if len(sigs) > 1:
+ raise FDroidException('ambiguous signatures, please make sure there is only one signature in \'{}\'. (The signature has to be the App maintainers signature for version of the APK.)'.format(sigdir))
+ for sig in sigs:
+ with open(sig, 'rb') as f:
+ return signer_fingerprint(f.read())
+ return None
+
+
+def metadata_find_signing_files(appid, vercode):
+ """Gets a list of singed manifests and signatures.
+
+ :param appid: app id string
+ :param vercode: app version code
+ :returns: a list of triplets for each signing key with following paths:
+ (signature_file, singed_file, manifest_file)
+ """
+ ret = []
+ sigdir = metadata_get_sigdir(appid, vercode)
+ sigs = glob.glob(os.path.join(sigdir, '*.DSA')) + \
+ glob.glob(os.path.join(sigdir, '*.EC')) + \
+ glob.glob(os.path.join(sigdir, '*.RSA'))
+ extre = re.compile('(\.DSA|\.EC|\.RSA)$')
+ for sig in sigs:
+ sf = extre.sub('.SF', sig)
+ if os.path.isfile(sf):
+ mf = os.path.join(sigdir, 'MANIFEST.MF')
+ if os.path.isfile(mf):
+ ret.append((sig, sf, mf))
+ return ret
+
+
+def metadata_find_developer_signing_files(appid, vercode):
+ """Get developer signature files for specified app from metadata.
+
+ :returns: A triplet of paths for signing files from metadata:
+ (signature_file, singed_file, manifest_file)
+ """
+ allsigningfiles = metadata_find_signing_files(appid, vercode)
+ if allsigningfiles and len(allsigningfiles) == 1:
+ return allsigningfiles[0]
+ else:
+ return None
+
+
+def apk_strip_signatures(signed_apk, strip_manifest=False):
+ """Removes signatures from APK.
+
+ :param signed_apk: path to apk file.
+ :param strip_manifest: when set to True also the manifest file will
+ be removed from the APK.
+ """
+ with tempfile.TemporaryDirectory() as tmpdir:
+ tmp_apk = os.path.join(tmpdir, 'tmp.apk')
+ os.rename(signed_apk, tmp_apk)
+ with ZipFile(tmp_apk, 'r') as in_apk:
+ with ZipFile(signed_apk, 'w') as out_apk:
+ for f in in_apk.infolist():
+ if not apk_sigfile.match(f.filename):
+ if strip_manifest:
+ if f.filename != 'META-INF/MANIFEST.MF':
+ buf = in_apk.read(f.filename)
+ out_apk.writestr(f.filename, buf)
+ else:
+ buf = in_apk.read(f.filename)
+ out_apk.writestr(f.filename, buf)
+
+
+def apk_implant_signatures(apkpath, signaturefile, signedfile, manifest):
+ """Implats a signature from metadata into an APK.
+
+ Note: this changes there supplied APK in place. So copy it if you
+ need the original to be preserved.
+
+ :param apkpath: location of the apk
+ """
+ # get list of available signature files in metadata
+ with tempfile.TemporaryDirectory() as tmpdir:
+ # orig_apk = os.path.join(tmpdir, 'orig.apk')
+ # os.rename(apkpath, orig_apk)
+ apkwithnewsig = os.path.join(tmpdir, 'newsig.apk')
+ with ZipFile(apkpath, 'r') as in_apk:
+ with ZipFile(apkwithnewsig, 'w') as out_apk:
+ for sig_file in [signaturefile, signedfile, manifest]:
+ out_apk.write(sig_file, arcname='META-INF/' +
+ os.path.basename(sig_file))
+ for f in in_apk.infolist():
+ if not apk_sigfile.match(f.filename):
+ if f.filename != 'META-INF/MANIFEST.MF':
+ buf = in_apk.read(f.filename)
+ out_apk.writestr(f.filename, buf)
+ os.remove(apkpath)
+ p = SdkToolsPopen(['zipalign', '-v', '4', apkwithnewsig, apkpath])
+ if p.returncode != 0:
+ raise BuildException("Failed to align application")
+
+
+def apk_extract_signatures(apkpath, outdir, manifest=True):
+ """Extracts a signature files from APK and puts them into target directory.
+
+ :param apkpath: location of the apk
+ :param outdir: folder where the extracted signature files will be stored
+ :param manifest: (optionally) disable extracting manifest file
+ """
+ with ZipFile(apkpath, 'r') as in_apk:
+ for f in in_apk.infolist():
+ if apk_sigfile.match(f.filename) or \
+ (manifest and f.filename == 'META-INF/MANIFEST.MF'):
+ newpath = os.path.join(outdir, os.path.basename(f.filename))
+ with open(newpath, 'wb') as out_file:
+ out_file.write(in_apk.read(f.filename))
+
+
def verify_apks(signed_apk, unsigned_apk, tmp_dir):
"""Verify that two apks are the same
One of the inputs is signed, the other is unsigned. The signature metadata
is transferred from the signed to the unsigned apk, and then jarsigner is
used to verify that the signature from the signed apk is also varlid for
- the unsigned one.
+ the unsigned one. If the APK given as unsigned actually does have a
+ signature, it will be stripped out and ignored.
+
+ There are two SHA1 git commit IDs that fdroidserver includes in the builds
+ it makes: fdroidserverid and buildserverid. Originally, these were inserted
+ into AndroidManifest.xml, but that makes the build not reproducible. So
+ instead they are included as separate files in the APK's META-INF/ folder.
+ If those files exist in the signed APK, they will be part of the signature
+ and need to also be included in the unsigned APK for it to validate.
+
:param signed_apk: Path to a signed apk file
:param unsigned_apk: Path to an unsigned apk file expected to match it
:param tmp_dir: Path to directory for temporary files
:returns: None if the verification is successful, otherwise a string
describing what went wrong.
"""
- with ZipFile(signed_apk) as signed_apk_as_zip:
+
+ if not os.path.isfile(signed_apk):
+ return 'can not verify: file does not exists: {}'.format(signed_apk)
+
+ if not os.path.isfile(unsigned_apk):
+ return 'can not verify: file does not exists: {}'.format(unsigned_apk)
+
+ with ZipFile(signed_apk, 'r') as signed:
meta_inf_files = ['META-INF/MANIFEST.MF']
- for f in signed_apk_as_zip.namelist():
- if apk_sigfile.match(f):
+ for f in signed.namelist():
+ if apk_sigfile.match(f) \
+ or f in ['META-INF/fdroidserverid', 'META-INF/buildserverid']:
meta_inf_files.append(f)
if len(meta_inf_files) < 3:
return "Signature files missing from {0}".format(signed_apk)
- signed_apk_as_zip.extractall(tmp_dir, meta_inf_files)
- with ZipFile(unsigned_apk, mode='a') as unsigned_apk_as_zip:
- for meta_inf_file in meta_inf_files:
- unsigned_apk_as_zip.write(os.path.join(tmp_dir, meta_inf_file), arcname=meta_inf_file)
-
- if subprocess.call([config['jarsigner'], '-verify', unsigned_apk]) != 0:
- logging.info("...NOT verified - {0}".format(signed_apk))
- return compare_apks(signed_apk, unsigned_apk, tmp_dir)
+
+ tmp_apk = os.path.join(tmp_dir, 'sigcp_' + os.path.basename(unsigned_apk))
+ with ZipFile(unsigned_apk, 'r') as unsigned:
+ # only read the signature from the signed APK, everything else from unsigned
+ with ZipFile(tmp_apk, 'w') as tmp:
+ for filename in meta_inf_files:
+ tmp.writestr(signed.getinfo(filename), signed.read(filename))
+ for info in unsigned.infolist():
+ if info.filename in meta_inf_files:
+ logging.warning('Ignoring %s from %s',
+ info.filename, unsigned_apk)
+ continue
+ if info.filename in tmp.namelist():
+ return "duplicate filename found: " + info.filename
+ tmp.writestr(info, unsigned.read(info.filename))
+
+ verified = verify_apk_signature(tmp_apk)
+
+ if not verified:
+ logging.info("...NOT verified - {0}".format(tmp_apk))
+ return compare_apks(signed_apk, tmp_apk, tmp_dir,
+ os.path.dirname(unsigned_apk))
+
logging.info("...successfully verified")
return None
+def verify_jar_signature(jar):
+ """Verifies the signature of a given JAR file.
+
+ jarsigner is very shitty: unsigned JARs pass as "verified"! So
+ this has to turn on -strict then check for result 4, since this
+ does not expect the signature to be from a CA-signed certificate.
+
+ :raises: VerificationException() if the JAR's signature could not be verified
+
+ """
+
+ if subprocess.call([config['jarsigner'], '-strict', '-verify', jar]) != 4:
+ raise VerificationException(_("The repository's index could not be verified."))
+
+
+def verify_apk_signature(apk, min_sdk_version=None):
+ """verify the signature on an APK
+
+ Try to use apksigner whenever possible since jarsigner is very
+ shitty: unsigned APKs pass as "verified"! Warning, this does
+ not work on JARs with apksigner >= 0.7 (build-tools 26.0.1)
+
+ :returns: boolean whether the APK was verified
+ """
+ if set_command_in_config('apksigner'):
+ args = [config['apksigner'], 'verify']
+ if min_sdk_version:
+ args += ['--min-sdk-version=' + min_sdk_version]
+ return subprocess.call(args + [apk]) == 0
+ else:
+ logging.warning("Using Java's jarsigner, not recommended for verifying APKs! Use apksigner")
+ try:
+ verify_jar_signature(apk)
+ return True
+ except Exception:
+ pass
+ return False
+
+
+def verify_old_apk_signature(apk):
+ """verify the signature on an archived APK, supporting deprecated algorithms
+
+ F-Droid aims to keep every single binary that it ever published. Therefore,
+ it needs to be able to verify APK signatures that include deprecated/removed
+ algorithms. For example, jarsigner treats an MD5 signature as unsigned.
+
+ jarsigner passes unsigned APKs as "verified"! So this has to turn
+ on -strict then check for result 4.
+
+ :returns: boolean whether the APK was verified
+ """
+
+ _java_security = os.path.join(os.getcwd(), '.java.security')
+ with open(_java_security, 'w') as fp:
+ fp.write('jdk.jar.disabledAlgorithms=MD2, RSA keySize < 1024')
+
+ return subprocess.call([config['jarsigner'], '-J-Djava.security.properties=' + _java_security,
+ '-strict', '-verify', apk]) == 4
+
+
apk_badchars = re.compile('''[/ :;'"]''')
-def compare_apks(apk1, apk2, tmp_dir):
+def compare_apks(apk1, apk2, tmp_dir, log_dir=None):
"""Compare two apks
Returns None if the apk content is the same (apart from the signing key),
trying to do the comparison.
"""
+ if not log_dir:
+ log_dir = tmp_dir
+
absapk1 = os.path.abspath(apk1)
absapk2 = os.path.abspath(apk2)
- # try to find diffoscope in the path, if it hasn't been manually configed
- if 'diffoscope' not in config:
- tmp = find_command('diffoscope')
- if tmp is not None:
- config['diffoscope'] = tmp
- if 'diffoscope' in config:
- htmlfile = absapk1 + '.diffoscope.html'
- textfile = absapk1 + '.diffoscope.txt'
+ if set_command_in_config('diffoscope'):
+ logfilename = os.path.join(log_dir, os.path.basename(absapk1))
+ htmlfile = logfilename + '.diffoscope.html'
+ textfile = logfilename + '.diffoscope.txt'
if subprocess.call([config['diffoscope'],
'--max-report-size', '12345678', '--max-diff-block-lines', '100',
'--html', htmlfile, '--text', textfile,
cwd=os.path.join(apk2dir, 'jar-xf')) != 0:
return("Failed to unpack " + apk2)
- # try to find apktool in the path, if it hasn't been manually configed
- if 'apktool' not in config:
- tmp = find_command('apktool')
- if tmp is not None:
- config['apktool'] = tmp
- if 'apktool' in config:
+ if set_command_in_config('apktool'):
if subprocess.call([config['apktool'], 'd', os.path.abspath(apk1), '--output', 'apktool'],
cwd=apk1dir) != 0:
return("Failed to unpack " + apk1)
p = FDroidPopen(['diff', '-r', apk1dir, apk2dir], output=False)
lines = p.output.splitlines()
if len(lines) != 1 or 'META-INF' not in lines[0]:
- meld = find_command('meld')
- if meld is not None:
- p = FDroidPopen(['meld', apk1dir, apk2dir], output=False)
+ if set_command_in_config('meld'):
+ p = FDroidPopen([config['meld'], apk1dir, apk2dir], output=False)
return("Unexpected diff output - " + p.output)
# since everything verifies, delete the comparison to keep cruft down
return None
+def set_command_in_config(command):
+ '''Try to find specified command in the path, if it hasn't been
+ manually set in config.py. If found, it is added to the config
+ dict. The return value says whether the command is available.
+
+ '''
+ if command in config:
+ return True
+ else:
+ tmp = find_command(command)
+ if tmp is not None:
+ config[command] = tmp
+ return True
+ return False
+
+
def find_command(command):
'''find the full path of a command, or None if it can't be found in the PATH'''
if not os.path.exists(keystoredir):
os.makedirs(keystoredir, mode=0o700)
- write_password_file("keystorepass", localconfig['keystorepass'])
- write_password_file("keypass", localconfig['keypass'])
+ env_vars = {
+ 'FDROID_KEY_STORE_PASS': localconfig['keystorepass'],
+ 'FDROID_KEY_PASS': localconfig['keypass'],
+ }
p = FDroidPopen([config['keytool'], '-genkey',
'-keystore', localconfig['keystore'],
'-alias', localconfig['repo_keyalias'],
'-keyalg', 'RSA', '-keysize', '4096',
'-sigalg', 'SHA256withRSA',
'-validity', '10000',
- '-storepass:file', config['keystorepassfile'],
- '-keypass:file', config['keypassfile'],
- '-dname', localconfig['keydname']])
- # TODO keypass should be sent via stdin
+ '-storepass:env', 'FDROID_KEY_STORE_PASS',
+ '-keypass:env', 'FDROID_KEY_PASS',
+ '-dname', localconfig['keydname']], envs=env_vars)
if p.returncode != 0:
raise BuildException("Failed to generate key", p.output)
os.chmod(localconfig['keystore'], 0o0600)
p = FDroidPopen([config['keytool'], '-list', '-v',
'-keystore', localconfig['keystore'],
'-alias', localconfig['repo_keyalias'],
- '-storepass:file', config['keystorepassfile']])
+ '-storepass:env', 'FDROID_KEY_STORE_PASS'], envs=env_vars)
logging.info(p.output.strip() + '\n\n')
# get the public key
p = FDroidPopenBytes([config['keytool'], '-exportcert',
'-keystore', localconfig['keystore'],
'-alias', localconfig['repo_keyalias'],
- '-storepass:file', config['keystorepassfile']]
+ '-storepass:env', 'FDROID_KEY_STORE_PASS']
+ config['smartcardoptions'],
- output=False, stderr_to_stdout=False)
+ envs=env_vars, output=False, stderr_to_stdout=False)
if p.returncode != 0 or len(p.output) < 20:
raise BuildException("Failed to get public key", p.output)
pubkey = p.output
return " ".join(ret)
-def write_to_config(thisconfig, key, value=None):
- '''write a key/value to the local config.py'''
+def get_certificate(certificate_file):
+ """
+ Extracts a certificate from the given file.
+ :param certificate_file: file bytes (as string) representing the certificate
+ :return: A binary representation of the certificate's public key, or None in case of error
+ """
+ content = decoder.decode(certificate_file, asn1Spec=rfc2315.ContentInfo())[0]
+ if content.getComponentByName('contentType') != rfc2315.signedData:
+ return None
+ content = decoder.decode(content.getComponentByName('content'),
+ asn1Spec=rfc2315.SignedData())[0]
+ try:
+ certificates = content.getComponentByName('certificates')
+ cert = certificates[0].getComponentByName('certificate')
+ except PyAsn1Error:
+ logging.error("Certificates not found.")
+ return None
+ return encoder.encode(cert)
+
+
+def load_stats_fdroid_signing_key_fingerprints():
+ """Load list of signing-key fingerprints stored by fdroid publish from file.
+
+ :returns: list of dictionanryies containing the singing-key fingerprints.
+ """
+ jar_file = os.path.join('stats', 'publishsigkeys.jar')
+ if not os.path.isfile(jar_file):
+ return {}
+ cmd = [config['jarsigner'], '-strict', '-verify', jar_file]
+ p = FDroidPopen(cmd, output=False)
+ if p.returncode != 4:
+ raise FDroidException("Signature validation of '{}' failed! "
+ "Please run publish again to rebuild this file.".format(jar_file))
+
+ jar_sigkey = apk_signer_fingerprint(jar_file)
+ repo_key_sig = config.get('repo_key_sha256')
+ if repo_key_sig:
+ if jar_sigkey != repo_key_sig:
+ raise FDroidException("Signature key fingerprint of file '{}' does not match repo_key_sha256 in config.py (found fingerprint: '{}')".format(jar_file, jar_sigkey))
+ else:
+ logging.warning("repo_key_sha256 not in config.py, setting it to the signature key fingerprint of '{}'".format(jar_file))
+ config['repo_key_sha256'] = jar_sigkey
+ write_to_config(config, 'repo_key_sha256')
+
+ with zipfile.ZipFile(jar_file, 'r') as f:
+ return json.loads(str(f.read('publishsigkeys.json'), 'utf-8'))
+
+
+def write_to_config(thisconfig, key, value=None, config_file=None):
+ '''write a key/value to the local config.py
+
+ NOTE: only supports writing string variables.
+
+ :param thisconfig: config dictionary
+ :param key: variable name in config.py to be overwritten/added
+ :param value: optional value to be written, instead of fetched
+ from 'thisconfig' dictionary.
+ '''
if value is None:
origkey = key + '_orig'
value = thisconfig[origkey] if origkey in thisconfig else thisconfig[key]
- with open('config.py', 'r', encoding='utf8') as f:
- data = f.read()
- pattern = '\n[\s#]*' + key + '\s*=\s*"[^"]*"'
- repl = '\n' + key + ' = "' + value + '"'
- data = re.sub(pattern, repl, data)
- # if this key is not in the file, append it
- if not re.match('\s*' + key + '\s*=\s*"', data):
- data += repl
+ cfg = config_file if config_file else 'config.py'
+
+ # load config file, create one if it doesn't exist
+ if not os.path.exists(cfg):
+ open(cfg, 'a').close()
+ logging.info("Creating empty " + cfg)
+ with open(cfg, 'r', encoding="utf-8") as f:
+ lines = f.readlines()
+
# make sure the file ends with a carraige return
- if not re.match('\n$', data):
- data += '\n'
- with open('config.py', 'w', encoding='utf8') as f:
- f.writelines(data)
+ if len(lines) > 0:
+ if not lines[-1].endswith('\n'):
+ lines[-1] += '\n'
+
+ # regex for finding and replacing python string variable
+ # definitions/initializations
+ pattern = re.compile('^[\s#]*' + key + '\s*=\s*"[^"]*"')
+ repl = key + ' = "' + value + '"'
+ pattern2 = re.compile('^[\s#]*' + key + "\s*=\s*'[^']*'")
+ repl2 = key + " = '" + value + "'"
+
+ # If we replaced this line once, we make sure won't be a
+ # second instance of this line for this key in the document.
+ didRepl = False
+ # edit config file
+ with open(cfg, 'w', encoding="utf-8") as f:
+ for line in lines:
+ if pattern.match(line) or pattern2.match(line):
+ if not didRepl:
+ line = pattern.sub(repl, line)
+ line = pattern2.sub(repl2, line)
+ f.write(line)
+ didRepl = True
+ else:
+ f.write(line)
+ if not didRepl:
+ f.write('\n')
+ f.write(repl)
+ f.write('\n')
def parse_xml(path):
def is_repo_file(filename):
'''Whether the file in a repo is a build product to be delivered to users'''
+ if isinstance(filename, str):
+ filename = filename.encode('utf-8', errors="surrogateescape")
return os.path.isfile(filename) \
- and not filename.endswith('.asc') \
- and not filename.endswith('.sig') \
+ and not filename.endswith(b'.asc') \
+ and not filename.endswith(b'.sig') \
and os.path.basename(filename) not in [
- 'index.jar',
- 'index_unsigned.jar',
- 'index.xml',
- 'index.html',
- 'index-v1.jar',
- 'index-v1.json',
- 'categories.txt',
+ b'index.jar',
+ b'index_unsigned.jar',
+ b'index.xml',
+ b'index.html',
+ b'index-v1.jar',
+ b'index-v1.json',
+ b'categories.txt',
]