import hashlib
import socket
import base64
+import zipfile
+import tempfile
+import json
import xml.etree.ElementTree as XMLElementTree
from binascii import hexlify
-from datetime import datetime
+from datetime import datetime, timedelta
from distutils.version import LooseVersion
from queue import Queue
from zipfile import ZipFile
import fdroidserver.metadata
from fdroidserver import _
-from fdroidserver.exception import FDroidException, VCSException, BuildException
+from fdroidserver.exception import FDroidException, VCSException, NoSubmodulesException,\
+ BuildException, VerificationException
from .asynchronousfilereader import AsynchronousFileReader
+# this is the build-tools version, aapt has a separate version that
+# has to be manually set in test_aapt_version()
+MINIMUM_AAPT_VERSION = '26.0.0'
# A signature block file with a .DSA, .RSA, or .EC extension
CERT_PATH_REGEX = re.compile(r'^META-INF/.*\.(DSA|EC|RSA)$')
'r13b': None,
'r14b': None,
'r15c': None,
+ 'r16': None,
},
'qt_sdk_path': None,
- 'build_tools': "25.0.2",
+ 'build_tools': MINIMUM_AAPT_VERSION,
'force_build_tools': False,
'java_paths': None,
'ant': "ant",
def setup_global_opts(parser):
+ try: # the buildserver VM might not have PIL installed
+ from PIL import PngImagePlugin
+ logger = logging.getLogger(PngImagePlugin.__name__)
+ logger.setLevel(logging.INFO) # tame the "STREAM" debug messages
+ except ImportError:
+ pass
+
parser.add_argument("-v", "--verbose", action="store_true", default=False,
help=_("Spew out even more information than normal"))
parser.add_argument("-q", "--quiet", action="store_true", default=False,
help=_("Restrict output to warnings and errors"))
+def _add_java_paths_to_config(pathlist, thisconfig):
+ def path_version_key(s):
+ versionlist = []
+ for u in re.split('[^0-9]+', s):
+ try:
+ versionlist.append(int(u))
+ except ValueError:
+ pass
+ return versionlist
+
+ for d in sorted(pathlist, key=path_version_key):
+ if os.path.islink(d):
+ continue
+ j = os.path.basename(d)
+ # the last one found will be the canonical one, so order appropriately
+ for regex in [
+ r'^1\.([6-9])\.0\.jdk$', # OSX
+ r'^jdk1\.([6-9])\.0_[0-9]+.jdk$', # OSX and Oracle tarball
+ r'^jdk1\.([6-9])\.0_[0-9]+$', # Oracle Windows
+ r'^jdk([6-9])-openjdk$', # Arch
+ r'^java-([6-9])-openjdk$', # Arch
+ r'^java-([6-9])-jdk$', # Arch (oracle)
+ r'^java-1\.([6-9])\.0-.*$', # RedHat
+ r'^java-([6-9])-oracle$', # Debian WebUpd8
+ r'^jdk-([6-9])-oracle-.*$', # Debian make-jpkg
+ r'^java-([6-9])-openjdk-[^c][^o][^m].*$', # Debian
+ ]:
+ m = re.match(regex, j)
+ if not m:
+ continue
+ for p in [d, os.path.join(d, 'Contents', 'Home')]:
+ if os.path.exists(os.path.join(p, 'bin', 'javac')):
+ thisconfig['java_paths'][m.group(1)] = p
+
+
def fill_config_defaults(thisconfig):
for k, v in default_config.items():
if k not in thisconfig:
pathlist.append(os.getenv('JAVA_HOME'))
if os.getenv('PROGRAMFILES') is not None:
pathlist += glob.glob(os.path.join(os.getenv('PROGRAMFILES'), 'Java', 'jdk1.[6-9].*'))
- for d in sorted(pathlist):
- if os.path.islink(d):
- continue
- j = os.path.basename(d)
- # the last one found will be the canonical one, so order appropriately
- for regex in [
- r'^1\.([6-9])\.0\.jdk$', # OSX
- r'^jdk1\.([6-9])\.0_[0-9]+.jdk$', # OSX and Oracle tarball
- r'^jdk1\.([6-9])\.0_[0-9]+$', # Oracle Windows
- r'^jdk([6-9])-openjdk$', # Arch
- r'^java-([6-9])-openjdk$', # Arch
- r'^java-([6-9])-jdk$', # Arch (oracle)
- r'^java-1\.([6-9])\.0-.*$', # RedHat
- r'^java-([6-9])-oracle$', # Debian WebUpd8
- r'^jdk-([6-9])-oracle-.*$', # Debian make-jpkg
- r'^java-([6-9])-openjdk-[^c][^o][^m].*$', # Debian
- ]:
- m = re.match(regex, j)
- if not m:
- continue
- for p in [d, os.path.join(d, 'Contents', 'Home')]:
- if os.path.exists(os.path.join(p, 'bin', 'javac')):
- thisconfig['java_paths'][m.group(1)] = p
+ _add_java_paths_to_config(pathlist, thisconfig)
for java_version in ('7', '8', '9'):
if java_version not in thisconfig['java_paths']:
return config
+def assert_config_keystore(config):
+ """Check weather keystore is configured correctly and raise exception if not."""
+
+ nosigningkey = False
+ if 'repo_keyalias' not in config:
+ nosigningkey = True
+ logging.critical(_("'repo_keyalias' not found in config.py!"))
+ if 'keystore' not in config:
+ nosigningkey = True
+ logging.critical(_("'keystore' not found in config.py!"))
+ elif not os.path.exists(config['keystore']):
+ nosigningkey = True
+ logging.critical("'" + config['keystore'] + "' does not exist!")
+ if 'keystorepass' not in config:
+ nosigningkey = True
+ logging.critical(_("'keystorepass' not found in config.py!"))
+ if 'keypass' not in config:
+ nosigningkey = True
+ logging.critical(_("'keypass' not found in config.py!"))
+ if nosigningkey:
+ raise FDroidException("This command requires a signing key, " +
+ "you can create one using: fdroid update --create-key")
+
+
def find_sdk_tools_cmd(cmd):
'''find a working path to a tool from the Android SDK'''
minor = m.group(2)
bugfix = m.group(3)
# the Debian package has the version string like "v0.2-23.0.2"
- if '.' not in bugfix and LooseVersion('.'.join((major, minor, bugfix))) < LooseVersion('0.2.2166767'):
- logging.warning(_("'{aapt}' is too old, fdroid requires build-tools-23.0.0 or newer!")
- .format(aapt=aapt))
+ too_old = False
+ if '.' in bugfix:
+ if LooseVersion(bugfix) < LooseVersion(MINIMUM_AAPT_VERSION):
+ too_old = True
+ elif LooseVersion('.'.join((major, minor, bugfix))) < LooseVersion('0.2.4062713'):
+ too_old = True
+ if too_old:
+ logging.warning(_("'{aapt}' is too old, fdroid requires build-tools-{version} or newer!")
+ .format(aapt=aapt, version=MINIMUM_AAPT_VERSION))
else:
logging.warning(_('Unknown version of aapt, might cause problems: ') + output)
versioned_build_tools = os.path.join(build_tools, thisconfig['build_tools'])
if not os.path.isdir(versioned_build_tools):
raise FDroidException(
- _("Android Build Tools path '{path}' does not exist!")
+ _("Android build-tools path '{path}' does not exist!")
.format(path=versioned_build_tools))
return glob.glob('.fdroid.[a-jl-z]*[a-rt-z]')
-def read_pkg_args(args, allow_vercodes=False):
+def read_pkg_args(appid_versionCode_pairs, allow_vercodes=False):
"""
- :param args: arguments in the form of multiple appid:[vc] strings
+ :param appids: arguments in the form of multiple appid:[vc] strings
:returns: a dictionary with the set of vercodes specified for each package
"""
-
vercodes = {}
- if not args:
+ if not appid_versionCode_pairs:
return vercodes
- for p in args:
+ for p in appid_versionCode_pairs:
if allow_vercodes and ':' in p:
package, vercode = p.split(':')
else:
return vercodes
-def read_app_args(args, allapps, allow_vercodes=False):
- """
- On top of what read_pkg_args does, this returns the whole app metadata, but
- limiting the builds list to the builds matching the vercodes specified.
+def read_app_args(appid_versionCode_pairs, allapps, allow_vercodes=False):
+ """Build a list of App instances for processing
+
+ On top of what read_pkg_args does, this returns the whole app
+ metadata, but limiting the builds list to the builds matching the
+ appid_versionCode_pairs and vercodes specified. If no appid_versionCode_pairs are specified, then
+ all App and Build instances are returned.
+
"""
- vercodes = read_pkg_args(args, allow_vercodes)
+ vercodes = read_pkg_args(appid_versionCode_pairs, allow_vercodes)
if not vercodes:
return allapps
def has_extension(filename, ext):
- _, f_ext = get_extension(filename)
+ _ignored, f_ext = get_extension(filename)
return ext == f_ext
return result
+apk_release_filename = re.compile('(?P<appid>[a-zA-Z0-9_\.]+)_(?P<vercode>[0-9]+)\.apk')
+apk_release_filename_with_sigfp = re.compile('(?P<appid>[a-zA-Z0-9_\.]+)_(?P<vercode>[0-9]+)_(?P<sigfp>[0-9a-f]{7})\.apk')
+
+
+def apk_parse_release_filename(apkname):
+ """Parses the name of an APK file according the F-Droids APK naming
+ scheme and returns the tokens.
+
+ WARNING: Returned values don't necessarily represent the APKs actual
+ properties, the are just paresed from the file name.
+
+ :returns: A triplet containing (appid, versionCode, signer), where appid
+ should be the package name, versionCode should be the integer
+ represion of the APKs version and signer should be the first 7 hex
+ digists of the sha256 signing key fingerprint which was used to sign
+ this APK.
+ """
+ m = apk_release_filename_with_sigfp.match(apkname)
+ if m:
+ return m.group('appid'), m.group('vercode'), m.group('sigfp')
+ m = apk_release_filename.match(apkname)
+ if m:
+ return m.group('appid'), m.group('vercode'), None
+ return None, None, None
+
+
def get_release_filename(app, build):
if build.output:
return "%s_%s.%s" % (app.id, build.versionCode, get_file_extension(build.output))
def repotype(self):
return None
- # Take the local repository to a clean version of the given revision, which
- # is specificed in the VCS's native format. Beforehand, the repository can
- # be dirty, or even non-existent. If the repository does already exist
- # locally, it will be updated from the origin, but only once in the
- # lifetime of the vcs object.
- # None is acceptable for 'rev' if you know you are cloning a clean copy of
- # the repo - otherwise it must specify a valid revision.
+ def clientversion(self):
+ versionstr = FDroidPopen(self.clientversioncmd()).output
+ return versionstr[0:versionstr.find('\n')]
+
+ def clientversioncmd(self):
+ return None
+
def gotorevision(self, rev, refresh=True):
+ """Take the local repository to a clean version of the given
+ revision, which is specificed in the VCS's native
+ format. Beforehand, the repository can be dirty, or even
+ non-existent. If the repository does already exist locally, it
+ will be updated from the origin, but only once in the lifetime
+ of the vcs object. None is acceptable for 'rev' if you know
+ you are cloning a clean copy of the repo - otherwise it must
+ specify a valid revision.
+ """
if self.clone_failed:
raise VCSException(_("Downloading the repository already failed once, not trying again."))
if exc is not None:
raise exc
- # Derived classes need to implement this. It's called once basic checking
- # has been performend.
def gotorevisionx(self, rev): # pylint: disable=unused-argument
+ """Derived classes need to implement this.
+
+ It's called once basic checking has been performed.
+ """
raise VCSException("This VCS type doesn't define gotorevisionx")
# Initialise and update submodules
rtags.append(tag)
return rtags
- # Get a list of all the known tags, sorted from newest to oldest
def latesttags(self):
+ """Get a list of all the known tags, sorted from newest to oldest"""
raise VCSException('latesttags not supported for this vcs type')
- # Get current commit reference (hash, revision, etc)
def getref(self):
+ """Get current commit reference (hash, revision, etc)"""
raise VCSException('getref not supported for this vcs type')
- # Returns the srclib (name, path) used in setting up the current
- # revision, or None.
def getsrclib(self):
+ """Returns the srclib (name, path) used in setting up the current revision, or None."""
return self.srclib
def repotype(self):
return 'git'
- # If the local directory exists, but is somehow not a git repository, git
- # will traverse up the directory tree until it finds one that is (i.e.
- # fdroidserver) and then we'll proceed to destroy it! This is called as
- # a safety check.
+ def clientversioncmd(self):
+ return ['git', '--version']
+
+ def git(self, args, envs=dict(), cwd=None, output=True):
+ '''Prevent git fetch/clone/submodule from hanging at the username/password prompt
+
+ While fetch/pull/clone respect the command line option flags,
+ it seems that submodule commands do not. They do seem to
+ follow whatever is in env vars, if the version of git is new
+ enough. So we just throw the kitchen sink at it to see what
+ sticks.
+
+ Also, because of CVE-2017-1000117, block all SSH URLs.
+ '''
+ #
+ # supported in git >= 2.3
+ git_config = [
+ '-c', 'core.sshCommand=false',
+ '-c', 'url.https://.insteadOf=ssh://',
+ ]
+ for domain in ('bitbucket.org', 'github.com', 'gitlab.com'):
+ git_config.append('-c')
+ git_config.append('url.https://u:p@' + domain + '/.insteadOf=git@' + domain + ':')
+ git_config.append('-c')
+ git_config.append('url.https://u:p@' + domain + '.insteadOf=git://' + domain)
+ git_config.append('-c')
+ git_config.append('url.https://u:p@' + domain + '.insteadOf=https://' + domain)
+ envs.update({
+ 'GIT_TERMINAL_PROMPT': '0',
+ 'GIT_SSH': 'false', # for git < 2.3
+ })
+ return FDroidPopen(['git', ] + git_config + args,
+ envs=envs, cwd=cwd, output=output)
+
def checkrepo(self):
+ """If the local directory exists, but is somehow not a git repository,
+ git will traverse up the directory tree until it finds one
+ that is (i.e. fdroidserver) and then we'll proceed to destroy
+ it! This is called as a safety check.
+
+ """
+
p = FDroidPopen(['git', 'rev-parse', '--show-toplevel'], cwd=self.local, output=False)
result = p.output.rstrip()
if not result.endswith(self.local):
def gotorevisionx(self, rev):
if not os.path.exists(self.local):
# Brand new checkout
- p = FDroidPopen(['git', 'clone', self.remote, self.local])
+ p = self.git(['clone', self.remote, self.local])
if p.returncode != 0:
self.clone_failed = True
raise VCSException("Git clone failed", p.output)
raise VCSException(_("Git clean failed"), p.output)
if not self.refreshed:
# Get latest commits and tags from remote
- p = FDroidPopen(['git', 'fetch', 'origin'], cwd=self.local)
+ p = self.git(['fetch', 'origin'], cwd=self.local)
if p.returncode != 0:
raise VCSException(_("Git fetch failed"), p.output)
- p = FDroidPopen(['git', 'fetch', '--prune', '--tags', 'origin'], cwd=self.local, output=False)
+ p = self.git(['fetch', '--prune', '--tags', 'origin'], output=False, cwd=self.local)
if p.returncode != 0:
raise VCSException(_("Git fetch failed"), p.output)
# Recreate origin/HEAD as git clone would do it, in case it disappeared
self.checkrepo()
submfile = os.path.join(self.local, '.gitmodules')
if not os.path.isfile(submfile):
- raise VCSException(_("No git submodules available"))
+ raise NoSubmodulesException(_("No git submodules available"))
# fix submodules not accessible without an account and public key auth
with open(submfile, 'r') as f:
lines = f.readlines()
with open(submfile, 'w') as f:
for line in lines:
- if 'git@github.com' in line:
- line = line.replace('git@github.com:', 'https://github.com/')
- if 'git@gitlab.com' in line:
- line = line.replace('git@gitlab.com:', 'https://gitlab.com/')
+ for domain in ('bitbucket.org', 'github.com', 'gitlab.com'):
+ line = re.sub('git@' + domain + ':', 'https://u:p@' + domain + '/', line)
f.write(line)
p = FDroidPopen(['git', 'submodule', 'sync'], cwd=self.local, output=False)
if p.returncode != 0:
raise VCSException(_("Git submodule sync failed"), p.output)
- p = FDroidPopen(['git', 'submodule', 'update', '--init', '--force', '--recursive'], cwd=self.local)
+ p = self.git(['submodule', 'update', '--init', '--force', '--recursive'], cwd=self.local)
if p.returncode != 0:
raise VCSException(_("Git submodule update failed"), p.output)
def repotype(self):
return 'git-svn'
- # If the local directory exists, but is somehow not a git repository, git
- # will traverse up the directory tree until it finds one that is (i.e.
- # fdroidserver) and then we'll proceed to destory it! This is called as
- # a safety check.
+ def clientversioncmd(self):
+ return ['git', 'svn', '--version']
+
def checkrepo(self):
+ """If the local directory exists, but is somehow not a git repository,
+ git will traverse up the directory tree until it finds one that
+ is (i.e. fdroidserver) and then we'll proceed to destory it!
+ This is called as a safety check.
+
+ """
p = FDroidPopen(['git', 'rev-parse', '--show-toplevel'], cwd=self.local, output=False)
result = p.output.rstrip()
if not result.endswith(self.local):
raise VCSException('Repository mismatch')
+ def git(self, args, envs=dict(), cwd=None, output=True):
+ '''Prevent git fetch/clone/submodule from hanging at the username/password prompt
+ '''
+ # CVE-2017-1000117 block all SSH URLs (supported in git >= 2.3)
+ config = ['-c', 'core.sshCommand=false']
+ envs.update({
+ 'GIT_TERMINAL_PROMPT': '0',
+ 'GIT_SSH': 'false', # for git < 2.3
+ 'SVN_SSH': 'false',
+ })
+ return FDroidPopen(['git', ] + config + args,
+ envs=envs, cwd=cwd, output=output)
+
def gotorevisionx(self, rev):
if not os.path.exists(self.local):
# Brand new checkout
- gitsvn_args = ['git', 'svn', 'clone']
+ gitsvn_args = ['svn', 'clone']
if ';' in self.remote:
remote_split = self.remote.split(';')
for i in remote_split[1:]:
elif i.startswith('branches='):
gitsvn_args.extend(['-b', i[9:]])
gitsvn_args.extend([remote_split[0], self.local])
- p = FDroidPopen(gitsvn_args, output=False)
+ p = self.git(gitsvn_args, output=False)
if p.returncode != 0:
self.clone_failed = True
raise VCSException("Git svn clone failed", p.output)
else:
gitsvn_args.extend([self.remote, self.local])
- p = FDroidPopen(gitsvn_args, output=False)
+ p = self.git(gitsvn_args, output=False)
if p.returncode != 0:
self.clone_failed = True
raise VCSException("Git svn clone failed", p.output)
else:
self.checkrepo()
# Discard any working tree changes
- p = FDroidPopen(['git', 'reset', '--hard'], cwd=self.local, output=False)
+ p = self.git(['reset', '--hard'], cwd=self.local, output=False)
if p.returncode != 0:
raise VCSException("Git reset failed", p.output)
# Remove untracked files now, in case they're tracked in the target
# revision (it happens!)
- p = FDroidPopen(['git', 'clean', '-dffx'], cwd=self.local, output=False)
+ p = self.git(['clean', '-dffx'], cwd=self.local, output=False)
if p.returncode != 0:
raise VCSException("Git clean failed", p.output)
if not self.refreshed:
# Get new commits, branches and tags from repo
- p = FDroidPopen(['git', 'svn', 'fetch'], cwd=self.local, output=False)
+ p = self.git(['svn', 'fetch'], cwd=self.local, output=False)
if p.returncode != 0:
raise VCSException("Git svn fetch failed")
- p = FDroidPopen(['git', 'svn', 'rebase'], cwd=self.local, output=False)
+ p = self.git(['svn', 'rebase'], cwd=self.local, output=False)
if p.returncode != 0:
raise VCSException("Git svn rebase failed", p.output)
self.refreshed = True
nospaces_rev = rev.replace(' ', '%20')
# Try finding a svn tag
for treeish in ['origin/', '']:
- p = FDroidPopen(['git', 'checkout', treeish + 'tags/' + nospaces_rev], cwd=self.local, output=False)
+ p = self.git(['checkout', treeish + 'tags/' + nospaces_rev], cwd=self.local, output=False)
if p.returncode == 0:
break
if p.returncode != 0:
svn_rev = svn_rev if svn_rev[0] == 'r' else 'r' + svn_rev
- p = FDroidPopen(['git', 'svn', 'find-rev', '--before', svn_rev, treeish], cwd=self.local, output=False)
+ p = self.git(['svn', 'find-rev', '--before', svn_rev, treeish], cwd=self.local, output=False)
git_rev = p.output.rstrip()
if p.returncode == 0 and git_rev:
if p.returncode != 0 or not git_rev:
# Try a plain git checkout as a last resort
- p = FDroidPopen(['git', 'checkout', rev], cwd=self.local, output=False)
+ p = self.git(['checkout', rev], cwd=self.local, output=False)
if p.returncode != 0:
raise VCSException("No git treeish found and direct git checkout of '%s' failed" % rev, p.output)
else:
# Check out the git rev equivalent to the svn rev
- p = FDroidPopen(['git', 'checkout', git_rev], cwd=self.local, output=False)
+ p = self.git(['checkout', git_rev], cwd=self.local, output=False)
if p.returncode != 0:
raise VCSException(_("Git checkout of '%s' failed") % rev, p.output)
# Get rid of any uncontrolled files left behind
- p = FDroidPopen(['git', 'clean', '-dffx'], cwd=self.local, output=False)
+ p = self.git(['clean', '-dffx'], cwd=self.local, output=False)
if p.returncode != 0:
raise VCSException(_("Git clean failed"), p.output)
def repotype(self):
return 'hg'
+ def clientversioncmd(self):
+ return ['hg', '--version']
+
def gotorevisionx(self, rev):
if not os.path.exists(self.local):
- p = FDroidPopen(['hg', 'clone', self.remote, self.local], output=False)
+ p = FDroidPopen(['hg', 'clone', '--ssh', 'false', self.remote, self.local], output=False)
if p.returncode != 0:
self.clone_failed = True
raise VCSException("Hg clone failed", p.output)
raise VCSException("Unexpected output from hg status -uS: " + line)
FDroidPopen(['rm', '-rf', line[2:]], cwd=self.local, output=False)
if not self.refreshed:
- p = FDroidPopen(['hg', 'pull'], cwd=self.local, output=False)
+ p = FDroidPopen(['hg', 'pull', '--ssh', 'false'], cwd=self.local, output=False)
if p.returncode != 0:
raise VCSException("Hg pull failed", p.output)
self.refreshed = True
def repotype(self):
return 'bzr'
+ def clientversioncmd(self):
+ return ['bzr', '--version']
+
+ def bzr(self, args, envs=dict(), cwd=None, output=True):
+ '''Prevent bzr from ever using SSH to avoid security vulns'''
+ envs.update({
+ 'BZR_SSH': 'false',
+ })
+ return FDroidPopen(['bzr', ] + args, envs=envs, cwd=cwd, output=output)
+
def gotorevisionx(self, rev):
if not os.path.exists(self.local):
- p = FDroidPopen(['bzr', 'branch', self.remote, self.local], output=False)
+ p = self.bzr(['branch', self.remote, self.local], output=False)
if p.returncode != 0:
self.clone_failed = True
raise VCSException("Bzr branch failed", p.output)
else:
- p = FDroidPopen(['bzr', 'clean-tree', '--force', '--unknown', '--ignored'], cwd=self.local, output=False)
+ p = self.bzr(['clean-tree', '--force', '--unknown', '--ignored'], cwd=self.local, output=False)
if p.returncode != 0:
raise VCSException("Bzr revert failed", p.output)
if not self.refreshed:
- p = FDroidPopen(['bzr', 'pull'], cwd=self.local, output=False)
+ p = self.bzr(['pull'], cwd=self.local, output=False)
if p.returncode != 0:
raise VCSException("Bzr update failed", p.output)
self.refreshed = True
revargs = list(['-r', rev] if rev else [])
- p = FDroidPopen(['bzr', 'revert'] + revargs, cwd=self.local, output=False)
+ p = self.bzr(['revert'] + revargs, cwd=self.local, output=False)
if p.returncode != 0:
raise VCSException("Bzr revert of '%s' failed" % rev, p.output)
def _gettags(self):
- p = FDroidPopen(['bzr', 'tags'], cwd=self.local, output=False)
+ p = self.bzr(['tags'], cwd=self.local, output=False)
return [tag.split(' ')[0].strip() for tag in
p.output.splitlines()]
# Remove forced debuggable flags
logging.debug("Removing debuggable flags from %s" % root_dir)
for root, dirs, files in os.walk(root_dir):
- if 'AndroidManifest.xml' in files:
+ if 'AndroidManifest.xml' in files and os.path.isfile(os.path.join(root, 'AndroidManifest.xml')):
regsub_file(r'android:debuggable="[^"]*"',
'',
os.path.join(root, 'AndroidManifest.xml'))
vercode = None
package = None
+ flavour = None
+ if app.builds and 'gradle' in app.builds[-1] and app.builds[-1].gradle:
+ flavour = app.builds[-1].gradle[-1]
+
if has_extension(path, 'gradle'):
with open(path, 'r') as f:
+ inside_flavour_group = 0
+ inside_required_flavour = 0
for line in f:
if gradle_comment.match(line):
continue
- # Grab first occurence of each to avoid running into
- # alternative flavours and builds.
- if not package:
- matches = psearch_g(line)
- if matches:
- s = matches.group(2)
- if app_matches_packagename(app, s):
- package = s
- if not version:
- matches = vnsearch_g(line)
- if matches:
- version = matches.group(2)
- if not vercode:
- matches = vcsearch_g(line)
- if matches:
- vercode = matches.group(1)
+
+ if inside_flavour_group > 0:
+ if inside_required_flavour > 0:
+ matches = psearch_g(line)
+ if matches:
+ s = matches.group(2)
+ if app_matches_packagename(app, s):
+ package = s
+
+ matches = vnsearch_g(line)
+ if matches:
+ version = matches.group(2)
+
+ matches = vcsearch_g(line)
+ if matches:
+ vercode = matches.group(1)
+
+ if '{' in line:
+ inside_required_flavour += 1
+ if '}' in line:
+ inside_required_flavour -= 1
+ else:
+ if flavour and (flavour in line):
+ inside_required_flavour = 1
+
+ if '{' in line:
+ inside_flavour_group += 1
+ if '}' in line:
+ inside_flavour_group -= 1
+ else:
+ if "productFlavors" in line:
+ inside_flavour_group = 1
+ if not package:
+ matches = psearch_g(line)
+ if matches:
+ s = matches.group(2)
+ if app_matches_packagename(app, s):
+ package = s
+ if not version:
+ matches = vnsearch_g(line)
+ if matches:
+ version = matches.group(2)
+ if not vercode:
+ matches = vcsearch_g(line)
+ if matches:
+ vercode = matches.group(1)
else:
try:
xml = parse_xml(path)
return re.match("[A-Za-z_][A-Za-z_0-9.]+$", name)
-# Get the specified source library.
-# Returns the path to it. Normally this is the path to be used when referencing
-# it, which may be a subdirectory of the actual project. If you want the base
-# directory of the project, pass 'basepath=True'.
def getsrclib(spec, srclib_dir, subdir=None, basepath=False,
raw=False, prepare=True, preponly=False, refresh=True,
build=None):
+ """Get the specified source library.
+ Returns the path to it. Normally this is the path to be used when
+ referencing it, which may be a subdirectory of the actual project. If
+ you want the base directory of the project, pass 'basepath=True'.
+
+ """
number = None
subdir = None
if raw:
dest = os.path.join(build_dir, part)
logging.info("Removing {0}".format(part))
if os.path.lexists(dest):
- if os.path.islink(dest):
- FDroidPopen(['unlink', dest], output=False)
+ # rmtree can only handle directories that are not symlinks, so catch anything else
+ if not os.path.isdir(dest) or os.path.islink(dest):
+ os.remove(dest)
else:
- FDroidPopen(['rm', '-rf', dest], output=False)
+ shutil.rmtree(dest)
else:
logging.info("...but it didn't exist")
return (root_dir, srclibpaths)
-# Extend via globbing the paths from a field and return them as a map from
-# original path to resulting paths
def getpaths_map(build_dir, globpaths):
+ """Extend via globbing the paths from a field and return them as a map from original path to resulting paths"""
paths = dict()
for p in globpaths:
p = p.strip()
return paths
-# Extend via globbing the paths from a field and return them as a set
def getpaths(build_dir, globpaths):
+ """Extend via globbing the paths from a field and return them as a set"""
paths_map = getpaths_map(build_dir, globpaths)
paths = set()
for k, v in paths_map.items():
return [int(sp) if sp.isdigit() else sp for sp in re.split(r'(\d+)', s)]
+def check_system_clock(dt_obj, path):
+ """Check if system clock is updated based on provided date
+
+ If an APK has files newer than the system time, suggest updating
+ the system clock. This is useful for offline systems, used for
+ signing, which do not have another source of clock sync info. It
+ has to be more than 24 hours newer because ZIP/APK files do not
+ store timezone info
+
+ """
+ checkdt = dt_obj - timedelta(1)
+ if datetime.today() < checkdt:
+ logging.warning(_('System clock is older than date in {path}!').format(path=path)
+ + '\n' + _('Set clock to that time using:') + '\n'
+ + 'sudo date -s "' + str(dt_obj) + '"')
+
+
class KnownApks:
"""permanent store of existing APKs with the date they were added
"""
def __init__(self):
+ '''Load filename/date info about previously seen APKs
+
+ Since the appid and date strings both will never have spaces,
+ this is parsed as a list from the end to allow the filename to
+ have any combo of spaces.
+ '''
+
self.path = os.path.join('stats', 'known_apks.txt')
self.apks = {}
if os.path.isfile(self.path):
if len(t) == 2:
self.apks[t[0]] = (t[1], None)
else:
- self.apks[t[0]] = (t[1], datetime.strptime(t[2], '%Y-%m-%d'))
+ appid = t[-2]
+ date = datetime.strptime(t[-1], '%Y-%m-%d')
+ filename = line[0:line.rfind(appid) - 1]
+ self.apks[filename] = (appid, date)
+ check_system_clock(date, self.path)
self.changed = False
def writeifchanged(self):
default_date = datetime.utcnow()
self.apks[apkName] = (app, default_date)
self.changed = True
- _, added = self.apks[apkName]
+ _ignored, added = self.apks[apkName]
return added
- # Look up information - given the 'apkname', returns (app id, date added/None).
- # Or returns None for an unknown apk.
def getapp(self, apkname):
+ """Look up information - given the 'apkname', returns (app id, date added/None).
+
+ Or returns None for an unknown apk.
+ """
if apkname in self.apks:
return self.apks[apkname]
return None
- # Get the most recent 'num' apps added to the repo, as a list of package ids
- # with the most recent first.
def getlatest(self, num):
+ """Get the most recent 'num' apps added to the repo, as a list of package ids with the most recent first"""
apps = {}
for apk, app in self.apks.items():
appid, added = app
.format(apkfilename=apkfile))
+def get_minSdkVersion_aapt(apkfile):
+ """Extract the minimum supported Android SDK from an APK using aapt
+
+ :param apkfile: path to an APK file.
+ :returns: the integer representing the SDK version
+ """
+ r = re.compile(r"^sdkVersion:'([0-9]+)'")
+ p = SdkToolsPopen(['aapt', 'dump', 'badging', apkfile], output=False)
+ for line in p.output.splitlines():
+ m = r.match(line)
+ if m:
+ return int(m.group(1))
+ raise FDroidException(_('Reading minSdkVersion failed: "{apkfilename}"')
+ .format(apkfilename=apkfile))
+
+
class PopenResult:
def __init__(self):
self.returncode = None
raise BuildException("OSError while trying to execute " +
' '.join(commands) + ': ' + str(e))
+ # TODO are these AsynchronousFileReader threads always exiting?
if not stderr_to_stdout and options.verbose:
stderr_queue = Queue()
stderr_reader = AsynchronousFileReader(p.stderr, stderr_queue)
apk_sigfile = re.compile(r'META-INF/[0-9A-Za-z]+\.(SF|RSA|DSA|EC)')
+def signer_fingerprint_short(sig):
+ """Obtain shortened sha256 signing-key fingerprint for pkcs7 signature.
+
+ Extracts the first 7 hexadecimal digits of sha256 signing-key fingerprint
+ for a given pkcs7 signature.
+
+ :param sig: Contents of an APK signing certificate.
+ :returns: shortened signing-key fingerprint.
+ """
+ return signer_fingerprint(sig)[:7]
+
+
+def signer_fingerprint(sig):
+ """Obtain sha256 signing-key fingerprint for pkcs7 signature.
+
+ Extracts hexadecimal sha256 signing-key fingerprint string
+ for a given pkcs7 signature.
+
+ :param: Contents of an APK signature.
+ :returns: shortened signature fingerprint.
+ """
+ cert_encoded = get_certificate(sig)
+ return hashlib.sha256(cert_encoded).hexdigest()
+
+
+def apk_signer_fingerprint(apk_path):
+ """Obtain sha256 signing-key fingerprint for APK.
+
+ Extracts hexadecimal sha256 signing-key fingerprint string
+ for a given APK.
+
+ :param apkpath: path to APK
+ :returns: signature fingerprint
+ """
+
+ with zipfile.ZipFile(apk_path, 'r') as apk:
+ certs = [n for n in apk.namelist() if CERT_PATH_REGEX.match(n)]
+
+ if len(certs) < 1:
+ logging.error("Found no signing certificates on %s" % apk_path)
+ return None
+ if len(certs) > 1:
+ logging.error("Found multiple signing certificates on %s" % apk_path)
+ return None
+
+ cert = apk.read(certs[0])
+ return signer_fingerprint(cert)
+
+
+def apk_signer_fingerprint_short(apk_path):
+ """Obtain shortened sha256 signing-key fingerprint for APK.
+
+ Extracts the first 7 hexadecimal digits of sha256 signing-key fingerprint
+ for a given pkcs7 APK.
+
+ :param apk_path: path to APK
+ :returns: shortened signing-key fingerprint
+ """
+ return apk_signer_fingerprint(apk_path)[:7]
+
+
def metadata_get_sigdir(appid, vercode=None):
"""Get signature directory for app"""
if vercode:
return os.path.join('metadata', appid, 'signatures')
+def metadata_find_developer_signature(appid, vercode=None):
+ """Tires to find the developer signature for given appid.
+
+ This picks the first signature file found in metadata an returns its
+ signature.
+
+ :returns: sha256 signing key fingerprint of the developer signing key.
+ None in case no signature can not be found."""
+
+ # fetch list of dirs for all versions of signatures
+ appversigdirs = []
+ if vercode:
+ appversigdirs.append(metadata_get_sigdir(appid, vercode))
+ else:
+ appsigdir = metadata_get_sigdir(appid)
+ if os.path.isdir(appsigdir):
+ numre = re.compile('[0-9]+')
+ for ver in os.listdir(appsigdir):
+ if numre.match(ver):
+ appversigdir = os.path.join(appsigdir, ver)
+ appversigdirs.append(appversigdir)
+
+ for sigdir in appversigdirs:
+ sigs = glob.glob(os.path.join(sigdir, '*.DSA')) + \
+ glob.glob(os.path.join(sigdir, '*.EC')) + \
+ glob.glob(os.path.join(sigdir, '*.RSA'))
+ if len(sigs) > 1:
+ raise FDroidException('ambiguous signatures, please make sure there is only one signature in \'{}\'. (The signature has to be the App maintainers signature for version of the APK.)'.format(sigdir))
+ for sig in sigs:
+ with open(sig, 'rb') as f:
+ return signer_fingerprint(f.read())
+ return None
+
+
+def metadata_find_signing_files(appid, vercode):
+ """Gets a list of singed manifests and signatures.
+
+ :param appid: app id string
+ :param vercode: app version code
+ :returns: a list of triplets for each signing key with following paths:
+ (signature_file, singed_file, manifest_file)
+ """
+ ret = []
+ sigdir = metadata_get_sigdir(appid, vercode)
+ sigs = glob.glob(os.path.join(sigdir, '*.DSA')) + \
+ glob.glob(os.path.join(sigdir, '*.EC')) + \
+ glob.glob(os.path.join(sigdir, '*.RSA'))
+ extre = re.compile('(\.DSA|\.EC|\.RSA)$')
+ for sig in sigs:
+ sf = extre.sub('.SF', sig)
+ if os.path.isfile(sf):
+ mf = os.path.join(sigdir, 'MANIFEST.MF')
+ if os.path.isfile(mf):
+ ret.append((sig, sf, mf))
+ return ret
+
+
+def metadata_find_developer_signing_files(appid, vercode):
+ """Get developer signature files for specified app from metadata.
+
+ :returns: A triplet of paths for signing files from metadata:
+ (signature_file, singed_file, manifest_file)
+ """
+ allsigningfiles = metadata_find_signing_files(appid, vercode)
+ if allsigningfiles and len(allsigningfiles) == 1:
+ return allsigningfiles[0]
+ else:
+ return None
+
+
+def apk_strip_signatures(signed_apk, strip_manifest=False):
+ """Removes signatures from APK.
+
+ :param signed_apk: path to apk file.
+ :param strip_manifest: when set to True also the manifest file will
+ be removed from the APK.
+ """
+ with tempfile.TemporaryDirectory() as tmpdir:
+ tmp_apk = os.path.join(tmpdir, 'tmp.apk')
+ shutil.move(signed_apk, tmp_apk)
+ with ZipFile(tmp_apk, 'r') as in_apk:
+ with ZipFile(signed_apk, 'w') as out_apk:
+ for info in in_apk.infolist():
+ if not apk_sigfile.match(info.filename):
+ if strip_manifest:
+ if info.filename != 'META-INF/MANIFEST.MF':
+ buf = in_apk.read(info.filename)
+ out_apk.writestr(info, buf)
+ else:
+ buf = in_apk.read(info.filename)
+ out_apk.writestr(info, buf)
+
+
+def apk_implant_signatures(apkpath, signaturefile, signedfile, manifest):
+ """Implats a signature from metadata into an APK.
+
+ Note: this changes there supplied APK in place. So copy it if you
+ need the original to be preserved.
+
+ :param apkpath: location of the apk
+ """
+ # get list of available signature files in metadata
+ with tempfile.TemporaryDirectory() as tmpdir:
+ apkwithnewsig = os.path.join(tmpdir, 'newsig.apk')
+ with ZipFile(apkpath, 'r') as in_apk:
+ with ZipFile(apkwithnewsig, 'w') as out_apk:
+ for sig_file in [signaturefile, signedfile, manifest]:
+ with open(sig_file, 'rb') as fp:
+ buf = fp.read()
+ info = zipfile.ZipInfo('META-INF/' + os.path.basename(sig_file))
+ info.compress_type = zipfile.ZIP_DEFLATED
+ info.create_system = 0 # "Windows" aka "FAT", what Android SDK uses
+ out_apk.writestr(info, buf)
+ for info in in_apk.infolist():
+ if not apk_sigfile.match(info.filename):
+ if info.filename != 'META-INF/MANIFEST.MF':
+ buf = in_apk.read(info.filename)
+ out_apk.writestr(info, buf)
+ os.remove(apkpath)
+ p = SdkToolsPopen(['zipalign', '-v', '4', apkwithnewsig, apkpath])
+ if p.returncode != 0:
+ raise BuildException("Failed to align application")
+
+
def apk_extract_signatures(apkpath, outdir, manifest=True):
"""Extracts a signature files from APK and puts them into target directory.
out_file.write(in_apk.read(f.filename))
+def sign_apk(unsigned_path, signed_path, keyalias):
+ """Sign and zipalign an unsigned APK, then save to a new file, deleting the unsigned
+
+ android-18 (4.3) finally added support for reasonable hash
+ algorithms, like SHA-256, before then, the only options were MD5
+ and SHA1 :-/ This aims to use SHA-256 when the APK does not target
+ older Android versions, and is therefore safe to do so.
+
+ https://issuetracker.google.com/issues/36956587
+ https://android-review.googlesource.com/c/platform/libcore/+/44491
+
+ """
+
+ if get_minSdkVersion_aapt(unsigned_path) < 18:
+ signature_algorithm = ['-sigalg', 'SHA1withRSA', '-digestalg', 'SHA1']
+ else:
+ signature_algorithm = ['-sigalg', 'SHA256withRSA', '-digestalg', 'SHA-256']
+
+ p = FDroidPopen([config['jarsigner'], '-keystore', config['keystore'],
+ '-storepass:env', 'FDROID_KEY_STORE_PASS',
+ '-keypass:env', 'FDROID_KEY_PASS']
+ + signature_algorithm + [unsigned_path, keyalias],
+ envs={
+ 'FDROID_KEY_STORE_PASS': config['keystorepass'],
+ 'FDROID_KEY_PASS': config['keypass'], })
+ if p.returncode != 0:
+ raise BuildException(_("Failed to sign application"), p.output)
+
+ p = SdkToolsPopen(['zipalign', '-v', '4', unsigned_path, signed_path])
+ if p.returncode != 0:
+ raise BuildException(_("Failed to zipalign application"))
+ os.remove(unsigned_path)
+
+
def verify_apks(signed_apk, unsigned_apk, tmp_dir):
"""Verify that two apks are the same
describing what went wrong.
"""
- signed = ZipFile(signed_apk, 'r')
- meta_inf_files = ['META-INF/MANIFEST.MF']
- for f in signed.namelist():
- if apk_sigfile.match(f) \
- or f in ['META-INF/fdroidserverid', 'META-INF/buildserverid']:
- meta_inf_files.append(f)
- if len(meta_inf_files) < 3:
- return "Signature files missing from {0}".format(signed_apk)
-
- tmp_apk = os.path.join(tmp_dir, 'sigcp_' + os.path.basename(unsigned_apk))
- unsigned = ZipFile(unsigned_apk, 'r')
- # only read the signature from the signed APK, everything else from unsigned
- with ZipFile(tmp_apk, 'w') as tmp:
- for filename in meta_inf_files:
- tmp.writestr(signed.getinfo(filename), signed.read(filename))
- for info in unsigned.infolist():
- if info.filename in meta_inf_files:
- logging.warning('Ignoring ' + info.filename + ' from ' + unsigned_apk)
- continue
- if info.filename in tmp.namelist():
- return "duplicate filename found: " + info.filename
- tmp.writestr(info, unsigned.read(info.filename))
- unsigned.close()
- signed.close()
+ if not os.path.isfile(signed_apk):
+ return 'can not verify: file does not exists: {}'.format(signed_apk)
+
+ if not os.path.isfile(unsigned_apk):
+ return 'can not verify: file does not exists: {}'.format(unsigned_apk)
+
+ with ZipFile(signed_apk, 'r') as signed:
+ meta_inf_files = ['META-INF/MANIFEST.MF']
+ for f in signed.namelist():
+ if apk_sigfile.match(f) \
+ or f in ['META-INF/fdroidserverid', 'META-INF/buildserverid']:
+ meta_inf_files.append(f)
+ if len(meta_inf_files) < 3:
+ return "Signature files missing from {0}".format(signed_apk)
+
+ tmp_apk = os.path.join(tmp_dir, 'sigcp_' + os.path.basename(unsigned_apk))
+ with ZipFile(unsigned_apk, 'r') as unsigned:
+ # only read the signature from the signed APK, everything else from unsigned
+ with ZipFile(tmp_apk, 'w') as tmp:
+ for filename in meta_inf_files:
+ tmp.writestr(signed.getinfo(filename), signed.read(filename))
+ for info in unsigned.infolist():
+ if info.filename in meta_inf_files:
+ logging.warning('Ignoring %s from %s',
+ info.filename, unsigned_apk)
+ continue
+ if info.filename in tmp.namelist():
+ return "duplicate filename found: " + info.filename
+ tmp.writestr(info, unsigned.read(info.filename))
verified = verify_apk_signature(tmp_apk)
return None
-def verify_apk_signature(apk, jar=False):
+def verify_jar_signature(jar):
+ """Verifies the signature of a given JAR file.
+
+ jarsigner is very shitty: unsigned JARs pass as "verified"! So
+ this has to turn on -strict then check for result 4, since this
+ does not expect the signature to be from a CA-signed certificate.
+
+ :raises: VerificationException() if the JAR's signature could not be verified
+
+ """
+
+ error = _('JAR signature failed to verify: {path}').format(path=jar)
+ try:
+ output = subprocess.check_output([config['jarsigner'], '-strict', '-verify', jar],
+ stderr=subprocess.STDOUT)
+ raise VerificationException(error + '\n' + output.decode('utf-8'))
+ except subprocess.CalledProcessError as e:
+ if e.returncode == 4:
+ logging.debug(_('JAR signature verified: {path}').format(path=jar))
+ else:
+ raise VerificationException(error + '\n' + e.output.decode('utf-8'))
+
+
+def verify_apk_signature(apk, min_sdk_version=None):
"""verify the signature on an APK
Try to use apksigner whenever possible since jarsigner is very
- shitty: unsigned APKs pass as "verified"! So this has to turn on
- -strict then check for result 4.
+ shitty: unsigned APKs pass as "verified"! Warning, this does
+ not work on JARs with apksigner >= 0.7 (build-tools 26.0.1)
- You can set :param: jar to True if you want to use this method
- to verify jar signatures.
+ :returns: boolean whether the APK was verified
"""
if set_command_in_config('apksigner'):
args = [config['apksigner'], 'verify']
- if jar:
- args += ['--min-sdk-version=1']
- return subprocess.call(args + [apk]) == 0
+ if min_sdk_version:
+ args += ['--min-sdk-version=' + min_sdk_version]
+ if options.verbose:
+ args += ['--verbose']
+ try:
+ output = subprocess.check_output(args + [apk])
+ if options.verbose:
+ logging.debug(apk + ': ' + output.decode('utf-8'))
+ return True
+ except subprocess.CalledProcessError as e:
+ logging.error('\n' + apk + ': ' + e.output.decode('utf-8'))
else:
- logging.warning("Using Java's jarsigner, not recommended for verifying APKs! Use apksigner")
- return subprocess.call([config['jarsigner'], '-strict', '-verify', apk]) == 4
+ if not config.get('jarsigner_warning_displayed'):
+ config['jarsigner_warning_displayed'] = True
+ logging.warning(_("Using Java's jarsigner, not recommended for verifying APKs! Use apksigner"))
+ try:
+ verify_jar_signature(apk)
+ return True
+ except Exception as e:
+ logging.error(e)
+ return False
def verify_old_apk_signature(apk):
jarsigner passes unsigned APKs as "verified"! So this has to turn
on -strict then check for result 4.
+ :returns: boolean whether the APK was verified
"""
_java_security = os.path.join(os.getcwd(), '.java.security')
with open(_java_security, 'w') as fp:
fp.write('jdk.jar.disabledAlgorithms=MD2, RSA keySize < 1024')
- return subprocess.call([config['jarsigner'], '-J-Djava.security.properties=' + _java_security,
- '-strict', '-verify', apk]) == 4
+ try:
+ cmd = [
+ config['jarsigner'],
+ '-J-Djava.security.properties=' + _java_security,
+ '-strict', '-verify', apk
+ ]
+ output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as e:
+ if e.returncode != 4:
+ output = e.output
+ else:
+ logging.debug(_('JAR signature verified: {path}').format(path=apk))
+ return True
+
+ logging.error(_('Old APK signature failed to verify: {path}').format(path=apk)
+ + '\n' + output.decode('utf-8'))
+ return False
apk_badchars = re.compile('''[/ :;'"]''')
return encoder.encode(cert)
+def load_stats_fdroid_signing_key_fingerprints():
+ """Load list of signing-key fingerprints stored by fdroid publish from file.
+
+ :returns: list of dictionanryies containing the singing-key fingerprints.
+ """
+ jar_file = os.path.join('stats', 'publishsigkeys.jar')
+ if not os.path.isfile(jar_file):
+ return {}
+ cmd = [config['jarsigner'], '-strict', '-verify', jar_file]
+ p = FDroidPopen(cmd, output=False)
+ if p.returncode != 4:
+ raise FDroidException("Signature validation of '{}' failed! "
+ "Please run publish again to rebuild this file.".format(jar_file))
+
+ jar_sigkey = apk_signer_fingerprint(jar_file)
+ repo_key_sig = config.get('repo_key_sha256')
+ if repo_key_sig:
+ if jar_sigkey != repo_key_sig:
+ raise FDroidException("Signature key fingerprint of file '{}' does not match repo_key_sha256 in config.py (found fingerprint: '{}')".format(jar_file, jar_sigkey))
+ else:
+ logging.warning("repo_key_sha256 not in config.py, setting it to the signature key fingerprint of '{}'".format(jar_file))
+ config['repo_key_sha256'] = jar_sigkey
+ write_to_config(config, 'repo_key_sha256')
+
+ with zipfile.ZipFile(jar_file, 'r') as f:
+ return json.loads(str(f.read('publishsigkeys.json'), 'utf-8'))
+
+
def write_to_config(thisconfig, key, value=None, config_file=None):
'''write a key/value to the local config.py
# load config file, create one if it doesn't exist
if not os.path.exists(cfg):
- os.mknod(cfg)
+ open(cfg, 'a').close()
logging.info("Creating empty " + cfg)
with open(cfg, 'r', encoding="utf-8") as f:
lines = f.readlines()
return False
+def local_rsync(options, fromdir, todir):
+ '''Rsync method for local to local copying of things
+
+ This is an rsync wrapper with all the settings for safe use within
+ the various fdroidserver use cases. This uses stricter rsync
+ checking on all files since people using offline mode are already
+ prioritizing security above ease and speed.
+
+ '''
+ rsyncargs = ['rsync', '--recursive', '--safe-links', '--times', '--perms',
+ '--one-file-system', '--delete', '--chmod=Da+rx,Fa-x,a+r,u+w']
+ if not options.no_checksum:
+ rsyncargs.append('--checksum')
+ if options.verbose:
+ rsyncargs += ['--verbose']
+ if options.quiet:
+ rsyncargs += ['--quiet']
+ logging.debug(' '.join(rsyncargs + [fromdir, todir]))
+ if subprocess.call(rsyncargs + [fromdir, todir]) != 0:
+ raise FDroidException()
+
+
def get_per_app_repos():
'''per-app repos are dirs named with the packageName of a single app'''
b'index-v1.json',
b'categories.txt',
]
+
+
+def get_examples_dir():
+ '''Return the dir where the fdroidserver example files are available'''
+ examplesdir = None
+ tmp = os.path.dirname(sys.argv[0])
+ if os.path.basename(tmp) == 'bin':
+ egg_links = glob.glob(os.path.join(tmp, '..',
+ 'local/lib/python3.*/site-packages/fdroidserver.egg-link'))
+ if egg_links:
+ # installed from local git repo
+ examplesdir = os.path.join(open(egg_links[0]).readline().rstrip(), 'examples')
+ else:
+ # try .egg layout
+ examplesdir = os.path.dirname(os.path.dirname(__file__)) + '/share/doc/fdroidserver/examples'
+ if not os.path.exists(examplesdir): # use UNIX layout
+ examplesdir = os.path.dirname(tmp) + '/share/doc/fdroidserver/examples'
+ else:
+ # we're running straight out of the git repo
+ prefix = os.path.normpath(os.path.join(os.path.dirname(__file__), '..'))
+ examplesdir = prefix + '/examples'
+
+ return examplesdir