import zipfile
import hashlib
import pickle
-from datetime import datetime, timedelta
+import time
+from datetime import datetime
from argparse import ArgumentParser
import collections
from binascii import hexlify
-from PIL import Image
+from PIL import Image, PngImagePlugin
import logging
-from . import btlog
+from . import _
from . import common
from . import index
from . import metadata
from .common import SdkToolsPopen
from .exception import BuildException, FDroidException
-METADATA_VERSION = 18
+METADATA_VERSION = 19
# less than the valid range of versionCode, i.e. Java's Integer.MIN_VALUE
UNSET_VERSION_CODE = -0x100000000
SCREENSHOT_DIRS = ('phoneScreenshots', 'sevenInchScreenshots',
'tenInchScreenshots', 'tvScreenshots', 'wearScreenshots')
+BLANK_PNG_INFO = PngImagePlugin.PngInfo()
+
def dpi_to_px(density):
return (int(density) * 48) / 160
if app.Disabled:
wikidata += '{{Disabled|' + app.Disabled + '}}\n'
if app.AntiFeatures:
- for af in app.AntiFeatures:
+ for af in sorted(app.AntiFeatures):
wikidata += '{{AntiFeature|' + af + '}}\n'
if app.RequiresRoot:
requiresroot = 'Yes'
else:
requiresroot = 'No'
- wikidata += '{{App|id=%s|name=%s|added=%s|lastupdated=%s|source=%s|tracker=%s|web=%s|changelog=%s|donate=%s|flattr=%s|bitcoin=%s|litecoin=%s|license=%s|root=%s|author=%s|email=%s}}\n' % (
+ wikidata += '{{App|id=%s|name=%s|added=%s|lastupdated=%s|source=%s|tracker=%s|web=%s|changelog=%s|donate=%s|flattr=%s|liberapay=%s|bitcoin=%s|litecoin=%s|license=%s|root=%s|author=%s|email=%s}}\n' % (
appid,
app.Name,
app.added.strftime('%Y-%m-%d') if app.added else '',
app.Changelog,
app.Donate,
app.FlattrID,
+ app.LiberapayID,
app.Bitcoin,
app.Litecoin,
app.License,
im.thumbnail((size, size), Image.ANTIALIAS)
logging.debug("%s was too large at %s - new size is %s" % (
iconpath, oldsize, im.size))
- im.save(iconpath, "PNG")
+ im.save(iconpath, "PNG", optimize=True,
+ pnginfo=BLANK_PNG_INFO, icc_profile=None)
except Exception as e:
- logging.error("Failed resizing {0} - {1}".format(iconpath, e))
+ logging.error(_("Failed resizing {path}: {error}".format(path=iconpath, error=e)))
finally:
if fp:
certs = [n for n in apk.namelist() if common.CERT_PATH_REGEX.match(n)]
if len(certs) < 1:
- logging.error("Found no signing certificates on %s" % apkpath)
+ logging.error(_("No signing certificates found in {path}").format(path=apkpath))
return None
if len(certs) > 1:
- logging.error("Found multiple signing certificates on %s" % apkpath)
+ logging.error(_("Found multiple signing certificates in {path}").format(path=apkpath))
return None
cert = apk.read(certs[0])
def get_cache():
- """
+ """Get the cached dict of the APK index
+
Gather information about all the apk files in the repo directory,
- using cached data if possible.
+ using cached data if possible. Some of the index operations take a
+ long time, like calculating the SHA-256 and verifying the APK
+ signature.
+
+ The cache is invalidated if the metadata version is different, or
+ the 'allow_disabled_algorithms' config/option is different. In
+ those cases, there is no easy way to know what has changed from
+ the cache, so just rerun the whole thing.
+
:return: apkcache
+
"""
apkcachefile = get_cache_file()
+ ada = options.allow_disabled_algorithms or config['allow_disabled_algorithms']
if not options.clean and os.path.exists(apkcachefile):
with open(apkcachefile, 'rb') as cf:
apkcache = pickle.load(cf, encoding='utf-8')
- if apkcache.get("METADATA_VERSION") != METADATA_VERSION:
+ if apkcache.get("METADATA_VERSION") != METADATA_VERSION \
+ or apkcache.get('allow_disabled_algorithms') != ada:
apkcache = {}
else:
apkcache = {}
+ apkcache["METADATA_VERSION"] = METADATA_VERSION
+ apkcache['allow_disabled_algorithms'] = ada
+
return apkcache
cache_path = os.path.dirname(apkcachefile)
if not os.path.exists(cache_path):
os.makedirs(cache_path)
- apkcache["METADATA_VERSION"] = METADATA_VERSION
with open(apkcachefile, 'wb') as cf:
pickle.dump(apkcache, cf)
return sha.hexdigest()
-def has_old_openssl(filename):
- '''checks for known vulnerable openssl versions in the APK'''
+def has_known_vulnerability(filename):
+ """checks for known vulnerabilities in the APK
+
+ Checks OpenSSL .so files in the APK to see if they are a known vulnerable
+ version. Google also enforces this:
+ https://support.google.com/faqs/answer/6376725?hl=en
+
+ Checks whether there are more than one classes.dex or AndroidManifest.xml
+ files, which is invalid and an essential part of the "Master Key" attack.
+ http://www.saurik.com/id/17
+
+ Janus is similar to Master Key but is perhaps easier to scan for.
+ https://www.guardsquare.com/en/blog/new-android-vulnerability-allows-attackers-modify-apps-without-affecting-their-signatures
+ """
+
+ found_vuln = False
# statically load this pattern
- if not hasattr(has_old_openssl, "pattern"):
- has_old_openssl.pattern = re.compile(b'.*OpenSSL ([01][0-9a-z.-]+)')
+ if not hasattr(has_known_vulnerability, "pattern"):
+ has_known_vulnerability.pattern = re.compile(b'.*OpenSSL ([01][0-9a-z.-]+)')
+
+ with open(filename.encode(), 'rb') as fp:
+ first4 = fp.read(4)
+ if first4 != b'\x50\x4b\x03\x04':
+ raise FDroidException(_('{path} has bad file signature "{pattern}", possible Janus exploit!')
+ .format(path=filename, pattern=first4.decode().replace('\n', ' ')) + '\n'
+ + 'https://www.guardsquare.com/en/blog/new-android-vulnerability-allows-attackers-modify-apps-without-affecting-their-signatures')
+ files_in_apk = set()
with zipfile.ZipFile(filename) as zf:
for name in zf.namelist():
if name.endswith('libcrypto.so') or name.endswith('libssl.so'):
chunk = lib.read(4096)
if chunk == b'':
break
- m = has_old_openssl.pattern.search(chunk)
+ m = has_known_vulnerability.pattern.search(chunk)
if m:
version = m.group(1).decode('ascii')
- if version.startswith('1.0.1') and version[5] >= 'r' \
- or version.startswith('1.0.2') and version[5] >= 'f':
- logging.debug('"%s" contains recent %s (%s)', filename, name, version)
+ if (version.startswith('1.0.1') and len(version) > 5 and version[5] >= 'r') \
+ or (version.startswith('1.0.2') and len(version) > 5 and version[5] >= 'f') \
+ or re.match(r'[1-9]\.[1-9]\.[0-9].*', version):
+ logging.debug(_('"{path}" contains recent {name} ({version})')
+ .format(path=filename, name=name, version=version))
else:
- logging.warning('"%s" contains outdated %s (%s)', filename, name, version)
- return True
+ logging.warning(_('"{path}" contains outdated {name} ({version})')
+ .format(path=filename, name=name, version=version))
+ found_vuln = True
break
- return False
+ elif name == 'AndroidManifest.xml' or name == 'classes.dex' or name.endswith('.so'):
+ if name in files_in_apk:
+ logging.warning(_('{apkfilename} has multiple {name} files, looks like Master Key exploit!')
+ .format(apkfilename=filename, name=name))
+ found_vuln = True
+ files_in_apk.add(name)
+ return found_vuln
def insert_obbs(repodir, apps, apks):
"""
def obbWarnDelete(f, msg):
- logging.warning(msg + f)
+ logging.warning(msg + ' ' + f)
if options.delete_unknown:
- logging.error("Deleting unknown file: " + f)
+ logging.error(_("Deleting unknown file: {path}").format(path=f))
os.remove(f)
obbs = []
# obbfile looks like: [main|patch].<expansion-version>.<package-name>.obb
chunks = obbfile.split('.')
if chunks[0] != 'main' and chunks[0] != 'patch':
- obbWarnDelete(f, 'OBB filename must start with "main." or "patch.": ')
+ obbWarnDelete(f, _('OBB filename must start with "main." or "patch.":'))
continue
if not re.match(r'^-?[0-9]+$', chunks[1]):
- obbWarnDelete('The OBB version code must come after "' + chunks[0] + '.": ')
+ obbWarnDelete(f, _('The OBB version code must come after "{name}.":')
+ .format(name=chunks[0]))
continue
versionCode = int(chunks[1])
packagename = ".".join(chunks[2:-1])
highestVersionCode = java_Integer_MIN_VALUE
if packagename not in currentPackageNames:
- obbWarnDelete(f, "OBB's packagename does not match a supported APK: ")
+ obbWarnDelete(f, _("OBB's packagename does not match a supported APK:"))
continue
for apk in apks:
if packagename == apk['packageName'] and apk['versionCode'] > highestVersionCode:
highestVersionCode = apk['versionCode']
if versionCode > highestVersionCode:
- obbWarnDelete(f, 'OBB file has newer versionCode(' + str(versionCode)
- + ') than any APK: ')
+ obbWarnDelete(f, _('OBB file has newer versionCode({integer}) than any APK:')
+ .format(integer=str(versionCode)))
continue
obbsha256 = sha256sum(f)
obbs.append((packagename, versionCode, obbfile, obbsha256))
break
+def translate_per_build_anti_features(apps, apks):
+ """Grab the anti-features list from the build metadata
+
+ For most Anti-Features, they are really most applicable per-APK,
+ not for an app. An app can fix a vulnerability, add/remove
+ tracking, etc. This reads the 'antifeatures' list from the Build
+ entries in the fdroiddata metadata file, then transforms it into
+ the 'antiFeatures' list of unique items for the index.
+
+ The field key is all lower case in the metadata file to match the
+ rest of the Build fields. It is 'antiFeatures' camel case in the
+ implementation, index, and fdroidclient since it is translated
+ from the build 'antifeatures' field, not directly included.
+
+ """
+
+ antiFeatures = dict()
+ for packageName, app in apps.items():
+ d = dict()
+ for build in app['builds']:
+ afl = build.get('antifeatures')
+ if afl:
+ d[int(build.versionCode)] = afl
+ if len(d) > 0:
+ antiFeatures[packageName] = d
+
+ for apk in apks:
+ d = antiFeatures.get(apk['packageName'])
+ if d:
+ afl = d.get(apk['versionCode'])
+ if afl:
+ apk['antiFeatures'].update(afl)
+
+
def _get_localized_dict(app, locale):
'''get the dict to add localized store metadata to'''
if 'localized' not in app:
app[key] = text
+def _strip_and_copy_image(inpath, outpath):
+ """Remove any metadata from image and copy it to new path
+
+ Sadly, image metadata like EXIF can be used to exploit devices.
+ It is not used at all in the F-Droid ecosystem, so its much safer
+ just to remove it entirely.
+
+ """
+
+ extension = common.get_extension(inpath)[1]
+ if os.path.isdir(outpath):
+ outpath = os.path.join(outpath, os.path.basename(inpath))
+ if extension == 'png':
+ with open(inpath, 'rb') as fp:
+ in_image = Image.open(fp)
+ in_image.save(outpath, "PNG", optimize=True,
+ pnginfo=BLANK_PNG_INFO, icc_profile=None)
+ elif extension == 'jpg' or extension == 'jpeg':
+ with open(inpath, 'rb') as fp:
+ in_image = Image.open(fp)
+ data = list(in_image.getdata())
+ out_image = Image.new(in_image.mode, in_image.size)
+ out_image.putdata(data)
+ out_image.save(outpath, "JPEG", optimize=True)
+ else:
+ raise FDroidException(_('Unsupported file type "{extension}" for repo graphic')
+ .format(extension=extension))
+
+
def copy_triple_t_store_metadata(apps):
"""Include store metadata from the app's source repo
base, extension = common.get_extension(f)
dirname = os.path.basename(root)
- if dirname in GRAPHIC_NAMES and extension in ALLOWED_EXTENSIONS:
+ if extension in ALLOWED_EXTENSIONS \
+ and (dirname in GRAPHIC_NAMES or dirname in SCREENSHOT_DIRS):
if segments[-2] == 'listing':
locale = segments[-3]
else:
locale = segments[-2]
- destdir = os.path.join('repo', packageName, locale)
+ destdir = os.path.join('repo', packageName, locale, dirname)
os.makedirs(destdir, mode=0o755, exist_ok=True)
sourcefile = os.path.join(root, f)
- destfile = os.path.join(destdir, dirname + '.' + extension)
+ destfile = os.path.join(destdir, os.path.basename(f))
logging.debug('copying ' + sourcefile + ' ' + destfile)
- shutil.copy(sourcefile, destfile)
+ _strip_and_copy_image(sourcefile, destfile)
def insert_localized_app_metadata(apps):
"""
- sourcedirs = glob.glob(os.path.join('build', '[A-Za-z]*', 'fastlane', 'metadata', 'android', '[a-z][a-z]*'))
+ sourcedirs = glob.glob(os.path.join('build', '[A-Za-z]*', 'src', '[A-Za-z]*', 'fastlane', 'metadata', 'android', '[a-z][a-z]*'))
+ sourcedirs += glob.glob(os.path.join('build', '[A-Za-z]*', 'fastlane', 'metadata', 'android', '[a-z][a-z]*'))
sourcedirs += glob.glob(os.path.join('build', '[A-Za-z]*', 'metadata', '[a-z][a-z]*'))
sourcedirs += glob.glob(os.path.join('metadata', '[A-Za-z]*', '[a-z][a-z]*'))
continue
locale = segments[-1]
destdir = os.path.join('repo', packageName, locale)
+
+ # flavours specified in build receipt
+ build_flavours = ""
+ if apps[packageName] and 'builds' in apps[packageName] and len(apps[packageName].builds) > 0\
+ and 'gradle' in apps[packageName].builds[-1]:
+ build_flavours = apps[packageName].builds[-1].gradle
+
+ if len(segments) >= 5 and segments[4] == "fastlane" and segments[3] not in build_flavours:
+ logging.debug("ignoring due to wrong flavour")
+ continue
+
for f in files:
if f in ('description.txt', 'full_description.txt'):
_set_localized_text_entry(apps[packageName], locale, 'description',
if base in GRAPHIC_NAMES and extension in ALLOWED_EXTENSIONS:
os.makedirs(destdir, mode=0o755, exist_ok=True)
logging.debug('copying ' + os.path.join(root, f) + ' ' + destdir)
- shutil.copy(os.path.join(root, f), destdir)
+ _strip_and_copy_image(os.path.join(root, f), destdir)
for d in dirs:
if d in SCREENSHOT_DIRS:
+ if locale == 'images':
+ locale = segments[-2]
+ destdir = os.path.join('repo', packageName, locale)
for f in glob.glob(os.path.join(root, d, '*.*')):
- _, extension = common.get_extension(f)
+ _ignored, extension = common.get_extension(f)
if extension in ALLOWED_EXTENSIONS:
screenshotdestdir = os.path.join(destdir, d)
os.makedirs(screenshotdestdir, mode=0o755, exist_ok=True)
logging.debug('copying ' + f + ' ' + screenshotdestdir)
- shutil.copy(f, screenshotdestdir)
+ _strip_and_copy_image(f, screenshotdestdir)
repofiles = sorted(glob.glob(os.path.join('repo', '[A-Za-z]*', '[a-z][a-z][A-Z-.@]*')))
for d in repofiles:
base, extension = common.get_extension(filename)
if packageName not in apps:
- logging.warning('Found "%s" graphic without metadata for app "%s"!'
- % (filename, packageName))
+ logging.warning(_('Found "{path}" graphic without metadata for app "{name}"!')
+ .format(path=filename, name=packageName))
continue
graphics = _get_localized_dict(apps[packageName], locale)
if extension not in ALLOWED_EXTENSIONS:
- logging.warning('Only PNG and JPEG are supported for graphics, found: ' + f)
+ logging.warning(_('Only PNG and JPEG are supported for graphics, found: {path}').format(path=f))
elif base in GRAPHIC_NAMES:
# there can only be zero or one of these per locale
graphics[base] = filename
elif screenshotdir in SCREENSHOT_DIRS:
# there can any number of these per locale
- logging.debug('adding to ' + screenshotdir + ': ' + f)
+ logging.debug(_('adding to {name}: {path}').format(name=screenshotdir, path=f))
if screenshotdir not in graphics:
graphics[screenshotdir] = []
graphics[screenshotdir].append(filename)
else:
- logging.warning('Unsupported graphics file found: ' + f)
+ logging.warning(_('Unsupported graphics file found: {path}').format(path=f))
def scan_repo_files(apkcache, repodir, knownapks, use_date_from_file=False):
filename = os.path.join(repodir, name)
name_utf8 = name.decode('utf-8')
if filename.endswith(b'_src.tar.gz'):
- logging.debug('skipping source tarball: ' + filename.decode('utf-8'))
+ logging.debug(_('skipping source tarball: {path}')
+ .format(path=filename.decode('utf-8')))
continue
if not common.is_repo_file(filename):
continue
stat = os.stat(filename)
if stat.st_size == 0:
- raise FDroidException(filename + ' is zero size!')
+ raise FDroidException(_('{path} is zero size!')
+ .format(path=filename))
shasum = sha256sum(filename)
usecache = False
else:
repo_file['added'] = datetime(*a[:6])
if repo_file.get('hash') == shasum:
- logging.debug("Reading " + name_utf8 + " from cache")
+ logging.debug(_("Reading {apkfilename} from cache")
+ .format(apkfilename=name_utf8))
usecache = True
else:
- logging.debug("Ignoring stale cache data for " + name)
+ logging.debug(_("Ignoring stale cache data for {apkfilename}")
+ .format(apkfilename=name_utf8))
if not usecache:
- logging.debug("Processing " + name_utf8)
+ logging.debug(_("Processing {apkfilename}").format(apkfilename=name_utf8))
repo_file = collections.OrderedDict()
repo_file['name'] = os.path.splitext(name_utf8)[0]
# TODO rename apkname globally to something more generic
if use_date_from_file:
timestamp = stat.st_ctime
- default_date_param = datetime.fromtimestamp(timestamp).utctimetuple()
+ default_date_param = time.gmtime(time.mktime(datetime.fromtimestamp(timestamp).timetuple()))
else:
default_date_param = None
return repo_files, cachechanged
+def scan_apk(apk_file):
+ """
+ Scans an APK file and returns dictionary with metadata of the APK.
+
+ Attention: This does *not* verify that the APK signature is correct.
+
+ :param apk_file: The (ideally absolute) path to the APK file
+ :raises BuildException
+ :return A dict containing APK metadata
+ """
+ apk = {
+ 'hash': sha256sum(apk_file),
+ 'hashType': 'sha256',
+ 'uses-permission': [],
+ 'uses-permission-sdk-23': [],
+ 'features': [],
+ 'icons_src': {},
+ 'icons': {},
+ 'antiFeatures': set(),
+ }
+
+ if SdkToolsPopen(['aapt', 'version'], output=False):
+ scan_apk_aapt(apk, apk_file)
+ else:
+ scan_apk_androguard(apk, apk_file)
+
+ # Get the signature, or rather the signing key fingerprints
+ logging.debug('Getting signature of {0}'.format(os.path.basename(apk_file)))
+ apk['sig'] = getsig(apk_file)
+ if not apk['sig']:
+ raise BuildException("Failed to get apk signature")
+ apk['signer'] = common.apk_signer_fingerprint(os.path.join(os.getcwd(),
+ apk_file))
+ if not apk.get('signer'):
+ raise BuildException("Failed to get apk signing key fingerprint")
+
+ # Get size of the APK
+ apk['size'] = os.path.getsize(apk_file)
+
+ if 'minSdkVersion' not in apk:
+ logging.warning("No SDK version information found in {0}".format(apk_file))
+ apk['minSdkVersion'] = 1
+
+ # Check for known vulnerabilities
+ if has_known_vulnerability(apk_file):
+ apk['antiFeatures'].add('KnownVuln')
+
+ return apk
+
+
def scan_apk_aapt(apk, apkfile):
p = SdkToolsPopen(['aapt', 'dump', 'badging', apkfile], output=False)
if p.returncode != 0:
if options.delete_unknown:
if os.path.exists(apkfile):
- logging.error("Failed to get apk information, deleting " + apkfile)
+ logging.error(_("Failed to get apk information, deleting {path}").format(path=apkfile))
os.remove(apkfile)
else:
logging.error("Could not find {0} to remove it".format(apkfile))
else:
- logging.error("Failed to get apk information, skipping " + apkfile)
- raise BuildException("Invalid APK")
+ logging.error(_("Failed to get apk information, skipping {path}").format(path=apkfile))
+ raise BuildException(_("Invalid APK"))
for line in p.output.splitlines():
if line.startswith("package:"):
try:
else:
if options.delete_unknown:
if os.path.exists(apkfile):
- logging.error("Failed to get apk information, deleting " + apkfile)
+ logging.error(_("Failed to get apk information, deleting {path}")
+ .format(path=apkfile))
os.remove(apkfile)
else:
- logging.error("Could not find {0} to remove it".format(apkfile))
+ logging.error(_("Could not find {path} to remove it")
+ .format(path=apkfile))
else:
- logging.error("Failed to get apk information, skipping " + apkfile)
- raise BuildException("Invaild APK")
+ logging.error(_("Failed to get apk information, skipping {path}")
+ .format(path=apkfile))
+ raise BuildException(_("Invalid APK"))
except ImportError:
raise FDroidException("androguard library is not installed and aapt not present")
except FileNotFoundError:
- logging.error("Could not open apk file for analysis")
- raise BuildException("Invalid APK")
+ logging.error(_("Could not open apk file for analysis"))
+ raise BuildException(_("Invalid APK"))
apk['packageName'] = apkobject.get_package()
apk['versionCode'] = int(apkobject.get_androidversion_code())
apk['features'].append(feature)
-def scan_apk(apkcache, apkfilename, repodir, knownapks, use_date_from_apk):
- """Scan the apk with the given filename in the given repo directory.
+def process_apk(apkcache, apkfilename, repodir, knownapks, use_date_from_apk=False,
+ allow_disabled_algorithms=False, archive_bad_sig=False):
+ """Processes the apk with the given filename in the given repo directory.
This also extracts the icons.
:param knownapks: known apks info
:param use_date_from_apk: use date from APK (instead of current date)
for newly added APKs
+ :param allow_disabled_algorithms: allow APKs with valid signatures that include
+ disabled algorithms in the signature (e.g. MD5)
+ :param archive_bad_sig: move APKs with a bad signature to the archive
:returns: (skip, apk, cachechanged) where skip is a boolean indicating whether to skip this apk,
apk is the scanned apk information, and cachechanged is True if the apkcache got changed.
"""
- if ' ' in apkfilename:
- if options.rename_apks:
- newfilename = apkfilename.replace(' ', '_')
- os.rename(os.path.join(repodir, apkfilename),
- os.path.join(repodir, newfilename))
- apkfilename = newfilename
- else:
- logging.critical("Spaces in filenames are not allowed.")
- return True, None, False
-
+ apk = {}
apkfile = os.path.join(repodir, apkfilename)
- shasum = sha256sum(apkfile)
cachechanged = False
usecache = False
if apkfilename in apkcache:
apk = apkcache[apkfilename]
- if apk.get('hash') == shasum:
- logging.debug("Reading " + apkfilename + " from cache")
+ if apk.get('hash') == sha256sum(apkfile):
+ logging.debug(_("Reading {apkfilename} from cache")
+ .format(apkfilename=apkfilename))
usecache = True
else:
- logging.debug("Ignoring stale cache data for " + apkfilename)
+ logging.debug(_("Ignoring stale cache data for {apkfilename}")
+ .format(apkfilename=apkfilename))
if not usecache:
- logging.debug("Processing " + apkfilename)
- apk = {}
- apk['hash'] = shasum
- apk['hashType'] = 'sha256'
- apk['uses-permission'] = []
- apk['uses-permission-sdk-23'] = []
- apk['features'] = []
- apk['icons_src'] = {}
- apk['icons'] = {}
- apk['antiFeatures'] = set()
+ logging.debug(_("Processing {apkfilename}").format(apkfilename=apkfilename))
try:
- if SdkToolsPopen(['aapt', 'version'], output=False):
- scan_apk_aapt(apk, apkfile)
- else:
- scan_apk_androguard(apk, apkfile)
+ apk = scan_apk(apkfile)
except BuildException:
+ logging.warning(_("Skipping '{apkfilename}' with invalid signature!")
+ .format(apkfilename=apkfilename))
return True, None, False
- if 'minSdkVersion' not in apk:
- logging.warn("No SDK version information found in {0}".format(apkfile))
- apk['minSdkVersion'] = 1
-
# Check for debuggable apks...
if common.isApkAndDebuggable(apkfile):
logging.warning('{0} is set to android:debuggable="true"'.format(apkfile))
- # Get the signature (or md5 of, to be precise)...
- logging.debug('Getting signature of {0}'.format(apkfile))
- apk['sig'] = getsig(os.path.join(os.getcwd(), apkfile))
- if not apk['sig']:
- logging.critical("Failed to get apk signature")
- return True, None, False
-
if options.rename_apks:
n = apk['packageName'] + '_' + str(apk['versionCode']) + '.apk'
std_short_name = os.path.join(repodir, n)
srcfilename = apkfilename[:-4] + "_src.tar.gz"
if os.path.exists(os.path.join(repodir, srcfilename)):
apk['srcname'] = srcfilename
- apk['size'] = os.path.getsize(apkfile)
- # verify the jar signature is correct
+ # verify the jar signature is correct, allow deprecated
+ # algorithms only if the APK is in the archive.
+ skipapk = False
if not common.verify_apk_signature(apkfile):
- return True, None, False
+ if repodir == 'archive' or allow_disabled_algorithms:
+ if common.verify_old_apk_signature(apkfile):
+ apk['antiFeatures'].update(['KnownVuln', 'DisabledAlgorithm'])
+ else:
+ skipapk = True
+ else:
+ skipapk = True
- if has_old_openssl(apkfile):
- apk['antiFeatures'].add('KnownVuln')
+ if skipapk:
+ if archive_bad_sig:
+ logging.warning(_('Archiving {apkfilename} with invalid signature!')
+ .format(apkfilename=apkfilename))
+ move_apk_between_sections(repodir, 'archive', apk)
+ else:
+ logging.warning(_('Skipping {apkfilename} with invalid signature!')
+ .format(apkfilename=apkfilename))
+ return True, None, False
apkzip = zipfile.ZipFile(apkfile, 'r')
- # if an APK has files newer than the system time, suggest updating
- # the system clock. This is useful for offline systems, used for
- # signing, which do not have another source of clock sync info. It
- # has to be more than 24 hours newer because ZIP/APK files do not
- # store timezone info
manifest = apkzip.getinfo('AndroidManifest.xml')
if manifest.date_time[1] == 0: # month can't be zero
- logging.debug('AndroidManifest.xml has no date')
+ logging.debug(_('AndroidManifest.xml has no date'))
else:
- dt_obj = datetime(*manifest.date_time)
- checkdt = dt_obj - timedelta(1)
- if datetime.today() < checkdt:
- logging.warn('System clock is older than manifest in: '
- + apkfilename
- + '\nSet clock to that time using:\n'
- + 'sudo date -s "' + str(dt_obj) + '"')
-
- iconfilename = "%s.%s.png" % (
- apk['packageName'],
- apk['versionCode'])
-
- # Extract the icon file...
- empty_densities = []
- for density in screen_densities:
- if density not in apk['icons_src']:
- empty_densities.append(density)
- continue
- iconsrc = apk['icons_src'][density]
- icon_dir = get_icon_dir(repodir, density)
- icondest = os.path.join(icon_dir, iconfilename)
-
- try:
- with open(icondest, 'wb') as f:
- f.write(get_icon_bytes(apkzip, iconsrc))
- apk['icons'][density] = iconfilename
- except (zipfile.BadZipFile, ValueError, KeyError) as e:
- logging.warning("Error retrieving icon file: %s" % (icondest))
- del apk['icons_src'][density]
- empty_densities.append(density)
-
- if '-1' in apk['icons_src']:
- iconsrc = apk['icons_src']['-1']
- iconpath = os.path.join(
- get_icon_dir(repodir, '0'), iconfilename)
- with open(iconpath, 'wb') as f:
- f.write(get_icon_bytes(apkzip, iconsrc))
- try:
- im = Image.open(iconpath)
- dpi = px_to_dpi(im.size[0])
- for density in screen_densities:
- if density in apk['icons']:
- break
- if density == screen_densities[-1] or dpi >= int(density):
- apk['icons'][density] = iconfilename
- shutil.move(iconpath,
- os.path.join(get_icon_dir(repodir, density), iconfilename))
- empty_densities.remove(density)
- break
- except Exception as e:
- logging.warn("Failed reading {0} - {1}".format(iconpath, e))
-
- if apk['icons']:
- apk['icon'] = iconfilename
-
- apkzip.close()
-
- # First try resizing down to not lose quality
- last_density = None
- for density in screen_densities:
- if density not in empty_densities:
- last_density = density
- continue
- if last_density is None:
- continue
- logging.debug("Density %s not available, resizing down from %s"
- % (density, last_density))
-
- last_iconpath = os.path.join(
- get_icon_dir(repodir, last_density), iconfilename)
- iconpath = os.path.join(
- get_icon_dir(repodir, density), iconfilename)
- fp = None
- try:
- fp = open(last_iconpath, 'rb')
- im = Image.open(fp)
-
- size = dpi_to_px(density)
+ common.check_system_clock(datetime(*manifest.date_time), apkfilename)
- im.thumbnail((size, size), Image.ANTIALIAS)
- im.save(iconpath, "PNG")
- empty_densities.remove(density)
- except Exception as e:
- logging.warning("Invalid image file at %s: %s" % (last_iconpath, e))
- finally:
- if fp:
- fp.close()
-
- # Then just copy from the highest resolution available
- last_density = None
- for density in reversed(screen_densities):
- if density not in empty_densities:
- last_density = density
- continue
- if last_density is None:
- continue
- logging.debug("Density %s not available, copying from lower density %s"
- % (density, last_density))
-
- shutil.copyfile(
- os.path.join(get_icon_dir(repodir, last_density), iconfilename),
- os.path.join(get_icon_dir(repodir, density), iconfilename))
-
- empty_densities.remove(density)
-
- for density in screen_densities:
- icon_dir = get_icon_dir(repodir, density)
- icondest = os.path.join(icon_dir, iconfilename)
- resize_icon(icondest, density)
+ # extract icons from APK zip file
+ iconfilename = "%s.%s.png" % (apk['packageName'], apk['versionCode'])
+ try:
+ empty_densities = extract_apk_icons(iconfilename, apk, apkzip, repodir)
+ finally:
+ apkzip.close() # ensure that APK zip file gets closed
- # Copy from icons-mdpi to icons since mdpi is the baseline density
- baseline = os.path.join(get_icon_dir(repodir, '160'), iconfilename)
- if os.path.isfile(baseline):
- apk['icons']['0'] = iconfilename
- shutil.copyfile(baseline,
- os.path.join(get_icon_dir(repodir, '0'), iconfilename))
+ # resize existing icons for densities missing in the APK
+ fill_missing_icon_densities(empty_densities, iconfilename, apk, repodir)
if use_date_from_apk and manifest.date_time[1] != 0:
default_date_param = datetime(*manifest.date_time)
return False, apk, cachechanged
-def scan_apks(apkcache, repodir, knownapks, use_date_from_apk=False):
- """Scan the apks in the given repo directory.
+def process_apks(apkcache, repodir, knownapks, use_date_from_apk=False):
+ """Processes the apks in the given repo directory.
This also extracts the icons.
apks = []
for apkfile in sorted(glob.glob(os.path.join(repodir, '*.apk'))):
apkfilename = apkfile[len(repodir) + 1:]
- (skip, apk, cachechanged) = scan_apk(apkcache, apkfilename, repodir, knownapks, use_date_from_apk)
+ ada = options.allow_disabled_algorithms or config['allow_disabled_algorithms']
+ (skip, apk, cachethis) = process_apk(apkcache, apkfilename, repodir, knownapks,
+ use_date_from_apk, ada, True)
if skip:
continue
apks.append(apk)
+ cachechanged = cachechanged or cachethis
return apks, cachechanged
+def extract_apk_icons(icon_filename, apk, apkzip, repo_dir):
+ """
+ Extracts icons from the given APK zip in various densities,
+ saves them into given repo directory
+ and stores their names in the APK metadata dictionary.
+
+ :param icon_filename: A string representing the icon's file name
+ :param apk: A populated dictionary containing APK metadata.
+ Needs to have 'icons_src' key
+ :param apkzip: An opened zipfile.ZipFile of the APK file
+ :param repo_dir: The directory of the APK's repository
+ :return: A list of icon densities that are missing
+ """
+ empty_densities = []
+ for density in screen_densities:
+ if density not in apk['icons_src']:
+ empty_densities.append(density)
+ continue
+ icon_src = apk['icons_src'][density]
+ icon_dir = get_icon_dir(repo_dir, density)
+ icon_dest = os.path.join(icon_dir, icon_filename)
+
+ # Extract the icon files per density
+ if icon_src.endswith('.xml'):
+ png = os.path.basename(icon_src)[:-4] + '.png'
+ for f in apkzip.namelist():
+ if f.endswith(png):
+ m = re.match(r'res/(drawable|mipmap)-(x*[hlm]dpi).*/', f)
+ if m and screen_resolutions[m.group(2)] == density:
+ icon_src = f
+ if icon_src.endswith('.xml'):
+ empty_densities.append(density)
+ continue
+ try:
+ with open(icon_dest, 'wb') as f:
+ f.write(get_icon_bytes(apkzip, icon_src))
+ apk['icons'][density] = icon_filename
+ except (zipfile.BadZipFile, ValueError, KeyError) as e:
+ logging.warning("Error retrieving icon file: %s %s", icon_dest, e)
+ del apk['icons_src'][density]
+ empty_densities.append(density)
+
+ if '-1' in apk['icons_src']:
+ icon_src = apk['icons_src']['-1']
+ icon_path = os.path.join(get_icon_dir(repo_dir, '0'), icon_filename)
+ with open(icon_path, 'wb') as f:
+ f.write(get_icon_bytes(apkzip, icon_src))
+ try:
+ im = Image.open(icon_path)
+ dpi = px_to_dpi(im.size[0])
+ for density in screen_densities:
+ if density in apk['icons']:
+ break
+ if density == screen_densities[-1] or dpi >= int(density):
+ apk['icons'][density] = icon_filename
+ shutil.move(icon_path,
+ os.path.join(get_icon_dir(repo_dir, density), icon_filename))
+ empty_densities.remove(density)
+ break
+ except Exception as e:
+ logging.warning(_("Failed reading {path}: {error}")
+ .format(path=icon_path, error=e))
+
+ if apk['icons']:
+ apk['icon'] = icon_filename
+
+ return empty_densities
+
+
+def fill_missing_icon_densities(empty_densities, icon_filename, apk, repo_dir):
+ """
+ Resize existing icons for densities missing in the APK to ensure all densities are available
+
+ :param empty_densities: A list of icon densities that are missing
+ :param icon_filename: A string representing the icon's file name
+ :param apk: A populated dictionary containing APK metadata. Needs to have 'icons' key
+ :param repo_dir: The directory of the APK's repository
+ """
+ # First try resizing down to not lose quality
+ last_density = None
+ for density in screen_densities:
+ if density not in empty_densities:
+ last_density = density
+ continue
+ if last_density is None:
+ continue
+ logging.debug("Density %s not available, resizing down from %s", density, last_density)
+
+ last_icon_path = os.path.join(get_icon_dir(repo_dir, last_density), icon_filename)
+ icon_path = os.path.join(get_icon_dir(repo_dir, density), icon_filename)
+ fp = None
+ try:
+ fp = open(last_icon_path, 'rb')
+ im = Image.open(fp)
+
+ size = dpi_to_px(density)
+
+ im.thumbnail((size, size), Image.ANTIALIAS)
+ im.save(icon_path, "PNG", optimize=True,
+ pnginfo=BLANK_PNG_INFO, icc_profile=None)
+ empty_densities.remove(density)
+ except Exception as e:
+ logging.warning("Invalid image file at %s: %s", last_icon_path, e)
+ finally:
+ if fp:
+ fp.close()
+
+ # Then just copy from the highest resolution available
+ last_density = None
+ for density in reversed(screen_densities):
+ if density not in empty_densities:
+ last_density = density
+ continue
+
+ if last_density is None:
+ continue
+
+ shutil.copyfile(
+ os.path.join(get_icon_dir(repo_dir, last_density), icon_filename),
+ os.path.join(get_icon_dir(repo_dir, density), icon_filename)
+ )
+ empty_densities.remove(density)
+
+ for density in screen_densities:
+ icon_dir = get_icon_dir(repo_dir, density)
+ icon_dest = os.path.join(icon_dir, icon_filename)
+ resize_icon(icon_dest, density)
+
+ # Copy from icons-mdpi to icons since mdpi is the baseline density
+ baseline = os.path.join(get_icon_dir(repo_dir, '160'), icon_filename)
+ if os.path.isfile(baseline):
+ apk['icons']['0'] = icon_filename
+ shutil.copyfile(baseline, os.path.join(get_icon_dir(repo_dir, '0'), icon_filename))
+
+
def apply_info_from_latest_apk(apps, apks):
"""
Some information from the apks needs to be applied up to the application level.
def archive_old_apks(apps, apks, archapks, repodir, archivedir, defaultkeepversions):
+ def filter_apk_list_sorted(apk_list):
+ res = []
+ for apk in apk_list:
+ if apk['packageName'] == appid:
+ res.append(apk)
+
+ # Sort the apk list by version code. First is highest/newest.
+ return sorted(res, key=lambda apk: apk['versionCode'], reverse=True)
+
for appid, app in apps.items():
if app.ArchivePolicy:
else:
keepversions = defaultkeepversions
- def filter_apk_list_sorted(apk_list):
- res = []
- for apk in apk_list:
- if apk['packageName'] == appid:
- res.append(apk)
-
- # Sort the apk list by version code. First is highest/newest.
- return sorted(res, key=lambda apk: apk['versionCode'], reverse=True)
-
- def move_file(from_dir, to_dir, filename, ignore_missing):
- from_path = os.path.join(from_dir, filename)
- if ignore_missing and not os.path.exists(from_path):
- return
- to_path = os.path.join(to_dir, filename)
- shutil.move(from_path, to_path)
-
- logging.debug("Checking archiving for {0} - apks:{1}, keepversions:{2}, archapks:{3}"
- .format(appid, len(apks), keepversions, len(archapks)))
+ logging.debug(_("Checking archiving for {appid} - apks:{integer}, keepversions:{keep}, archapks:{arch}")
+ .format(appid=appid, integer=len(apks), keep=keepversions, arch=len(archapks)))
- if len(apks) > keepversions:
- apklist = filter_apk_list_sorted(apks)
+ current_app_apks = filter_apk_list_sorted(apks)
+ if len(current_app_apks) > keepversions:
# Move back the ones we don't want.
- for apk in apklist[keepversions:]:
- logging.info("Moving " + apk['apkName'] + " to archive")
- move_file(repodir, archivedir, apk['apkName'], False)
- move_file(repodir, archivedir, apk['apkName'] + '.asc', True)
- for density in all_screen_densities:
- repo_icon_dir = get_icon_dir(repodir, density)
- archive_icon_dir = get_icon_dir(archivedir, density)
- if density not in apk['icons']:
- continue
- move_file(repo_icon_dir, archive_icon_dir, apk['icons'][density], True)
- if 'srcname' in apk:
- move_file(repodir, archivedir, apk['srcname'], False)
+ for apk in current_app_apks[keepversions:]:
+ move_apk_between_sections(repodir, archivedir, apk)
archapks.append(apk)
apks.remove(apk)
- elif len(apks) < keepversions and len(archapks) > 0:
- required = keepversions - len(apks)
- archapklist = filter_apk_list_sorted(archapks)
- # Move forward the ones we want again.
- for apk in archapklist[:required]:
- logging.info("Moving " + apk['apkName'] + " from archive")
- move_file(archivedir, repodir, apk['apkName'], False)
- move_file(archivedir, repodir, apk['apkName'] + '.asc', True)
- for density in all_screen_densities:
- repo_icon_dir = get_icon_dir(repodir, density)
- archive_icon_dir = get_icon_dir(archivedir, density)
- if density not in apk['icons']:
- continue
- move_file(archive_icon_dir, repo_icon_dir, apk['icons'][density], True)
- if 'srcname' in apk:
- move_file(archivedir, repodir, apk['srcname'], False)
- archapks.remove(apk)
- apks.append(apk)
+
+ current_app_archapks = filter_apk_list_sorted(archapks)
+ if len(current_app_apks) < keepversions and len(current_app_archapks) > 0:
+ kept = 0
+ # Move forward the ones we want again, except DisableAlgorithm
+ for apk in current_app_archapks:
+ if 'DisabledAlgorithm' not in apk['antiFeatures']:
+ move_apk_between_sections(archivedir, repodir, apk)
+ archapks.remove(apk)
+ apks.append(apk)
+ kept += 1
+ if kept == keepversions:
+ break
+
+
+def move_apk_between_sections(from_dir, to_dir, apk):
+ """move an APK from repo to archive or vice versa"""
+
+ def _move_file(from_dir, to_dir, filename, ignore_missing):
+ from_path = os.path.join(from_dir, filename)
+ if ignore_missing and not os.path.exists(from_path):
+ return
+ to_path = os.path.join(to_dir, filename)
+ if not os.path.exists(to_dir):
+ os.mkdir(to_dir)
+ shutil.move(from_path, to_path)
+
+ if from_dir == to_dir:
+ return
+
+ logging.info("Moving %s from %s to %s" % (apk['apkName'], from_dir, to_dir))
+ _move_file(from_dir, to_dir, apk['apkName'], False)
+ _move_file(from_dir, to_dir, apk['apkName'] + '.asc', True)
+ for density in all_screen_densities:
+ from_icon_dir = get_icon_dir(from_dir, density)
+ to_icon_dir = get_icon_dir(to_dir, density)
+ if density not in apk.get('icons', []):
+ continue
+ _move_file(from_icon_dir, to_icon_dir, apk['icons'][density], True)
+ if 'srcname' in apk:
+ _move_file(from_dir, to_dir, apk['srcname'], False)
def add_apks_to_per_app_repos(repodir, apks):
apks_per_app[apk['packageName']] = apk
if not os.path.exists(apk['per_app_icons']):
- logging.info('Adding new repo for only ' + apk['packageName'])
+ logging.info(_('Adding new repo for only {name}').format(name=apk['packageName']))
os.makedirs(apk['per_app_icons'])
apkpath = os.path.join(repodir, apk['apkName'])
shutil.copy(apkascpath, apk['per_app_repo'])
+def create_metadata_from_template(apk):
+ '''create a new metadata file using internal or external template
+
+ Generate warnings for apk's with no metadata (or create skeleton
+ metadata files, if requested on the command line). Though the
+ template file is YAML, this uses neither pyyaml nor ruamel.yaml
+ since those impose things on the metadata file made from the
+ template: field sort order, empty field value, formatting, etc.
+ '''
+
+ import yaml
+ if os.path.exists('template.yml'):
+ with open('template.yml') as f:
+ metatxt = f.read()
+ if 'name' in apk and apk['name'] != '':
+ metatxt = re.sub(r'''^(((Auto)?Name|Summary):)[ '"\.]*$''',
+ r'\1 ' + apk['name'],
+ metatxt,
+ flags=re.IGNORECASE | re.MULTILINE)
+ else:
+ logging.warning(_('{appid} does not have a name! Using package name instead.')
+ .format(appid=apk['packageName']))
+ metatxt = re.sub(r'^(((Auto)?Name|Summary):).*$',
+ r'\1 ' + apk['packageName'],
+ metatxt,
+ flags=re.IGNORECASE | re.MULTILINE)
+ with open(os.path.join('metadata', apk['packageName'] + '.yml'), 'w') as f:
+ f.write(metatxt)
+ else:
+ app = dict()
+ app['Categories'] = [os.path.basename(os.getcwd())]
+ # include some blanks as part of the template
+ app['AuthorName'] = ''
+ app['Summary'] = ''
+ app['WebSite'] = ''
+ app['IssueTracker'] = ''
+ app['SourceCode'] = ''
+ app['CurrentVersionCode'] = 2147483647 # Java's Integer.MAX_VALUE
+ if 'name' in apk and apk['name'] != '':
+ app['Name'] = apk['name']
+ else:
+ logging.warning(_('{appid} does not have a name! Using package name instead.')
+ .format(appid=apk['packageName']))
+ app['Name'] = apk['packageName']
+ with open(os.path.join('metadata', apk['packageName'] + '.yml'), 'w') as f:
+ yaml.dump(app, f, default_flow_style=False)
+ logging.info(_("Generated skeleton metadata for {appid}").format(appid=apk['packageName']))
+
+
config = None
options = None
parser = ArgumentParser()
common.setup_global_opts(parser)
parser.add_argument("--create-key", action="store_true", default=False,
- help="Create a repo signing key in a keystore")
+ help=_("Add a repo signing key to an unsigned repo"))
parser.add_argument("-c", "--create-metadata", action="store_true", default=False,
- help="Create skeleton metadata files that are missing")
+ help=_("Add skeleton metadata files for APKs that are missing them"))
parser.add_argument("--delete-unknown", action="store_true", default=False,
- help="Delete APKs and/or OBBs without metadata from the repo")
+ help=_("Delete APKs and/or OBBs without metadata from the repo"))
parser.add_argument("-b", "--buildreport", action="store_true", default=False,
- help="Report on build data status")
+ help=_("Report on build data status"))
parser.add_argument("-i", "--interactive", default=False, action="store_true",
- help="Interactively ask about things that need updating.")
+ help=_("Interactively ask about things that need updating."))
parser.add_argument("-I", "--icons", action="store_true", default=False,
- help="Resize all the icons exceeding the max pixel size and exit")
+ help=_("Resize all the icons exceeding the max pixel size and exit"))
parser.add_argument("-e", "--editor", default="/etc/alternatives/editor",
- help="Specify editor to use in interactive mode. Default " +
- "is /etc/alternatives/editor")
+ help=_("Specify editor to use in interactive mode. Default " +
+ "is {path}").format(path='/etc/alternatives/editor'))
parser.add_argument("-w", "--wiki", default=False, action="store_true",
- help="Update the wiki")
+ help=_("Update the wiki"))
parser.add_argument("--pretty", action="store_true", default=False,
- help="Produce human-readable index.xml")
+ help=_("Produce human-readable XML/JSON for index files"))
parser.add_argument("--clean", action="store_true", default=False,
- help="Clean update - don't uses caches, reprocess all apks")
+ help=_("Clean update - don't uses caches, reprocess all APKs"))
parser.add_argument("--nosign", action="store_true", default=False,
- help="When configured for signed indexes, create only unsigned indexes at this stage")
+ help=_("When configured for signed indexes, create only unsigned indexes at this stage"))
parser.add_argument("--use-date-from-apk", action="store_true", default=False,
- help="Use date from apk instead of current time for newly added apks")
+ help=_("Use date from APK instead of current time for newly added APKs"))
parser.add_argument("--rename-apks", action="store_true", default=False,
- help="Rename APK files that do not match package.name_123.apk")
+ help=_("Rename APK files that do not match package.name_123.apk"))
+ parser.add_argument("--allow-disabled-algorithms", action="store_true", default=False,
+ help=_("Include APKs that are signed with disabled algorithms like MD5"))
metadata.add_metadata_arguments(parser)
options = parser.parse_args()
metadata.warnings_action = options.W
config = common.read_config(options)
if not ('jarsigner' in config and 'keytool' in config):
- raise FDroidException('Java JDK not found! Install in standard location or set java_paths!')
+ raise FDroidException(_('Java JDK not found! Install in standard location or set java_paths!'))
repodirs = ['repo']
if config['archive_older'] != 0:
for k in ['repo_icon', 'archive_icon']:
if k in config:
if not os.path.exists(config[k]):
- logging.critical(k + ' "' + config[k] + '" does not exist! Correct it in config.py.')
+ logging.critical(_('{name} "{path}" does not exist! Correct it in config.py.')
+ .format(name=k, path=config[k]))
sys.exit(1)
# if the user asks to create a keystore, do it now, reusing whatever it can
if options.create_key:
if os.path.exists(config['keystore']):
- logging.critical("Cowardily refusing to overwrite existing signing key setup!")
+ logging.critical(_("Cowardily refusing to overwrite existing signing key setup!"))
logging.critical("\t'" + config['keystore'] + "'")
sys.exit(1)
delete_disabled_builds(apps, apkcache, repodirs)
# Scan all apks in the main repo
- apks, cachechanged = scan_apks(apkcache, repodirs[0], knownapks, options.use_date_from_apk)
+ apks, cachechanged = process_apks(apkcache, repodirs[0], knownapks, options.use_date_from_apk)
files, fcachechanged = scan_repo_files(apkcache, repodirs[0], knownapks,
options.use_date_from_apk)
cachechanged = cachechanged or fcachechanged
apks += files
- # Generate warnings for apk's with no metadata (or create skeleton
- # metadata files, if requested on the command line)
- newmetadata = False
for apk in apks:
if apk['packageName'] not in apps:
if options.create_metadata:
- if 'name' not in apk:
- logging.error(apk['packageName'] + ' does not have a name! Skipping...')
- continue
- f = open(os.path.join('metadata', apk['packageName'] + '.txt'), 'w', encoding='utf8')
- f.write("License:Unknown\n")
- f.write("Web Site:\n")
- f.write("Source Code:\n")
- f.write("Issue Tracker:\n")
- f.write("Changelog:\n")
- f.write("Summary:" + apk['name'] + "\n")
- f.write("Description:\n")
- f.write(apk['name'] + "\n")
- f.write(".\n")
- f.write("Name:" + apk['name'] + "\n")
- f.close()
- logging.info("Generated skeleton metadata for " + apk['packageName'])
- newmetadata = True
+ create_metadata_from_template(apk)
+ apps = metadata.read_metadata()
else:
- msg = apk['apkName'] + " (" + apk['packageName'] + ") has no metadata!"
+ msg = _("{apkfilename} ({appid}) has no metadata!") \
+ .format(apkfilename=apk['apkName'], appid=apk['packageName'])
if options.delete_unknown:
- logging.warn(msg + "\n\tdeleting: repo/" + apk['apkName'])
+ logging.warn(msg + '\n\t' + _("deleting: repo/{apkfilename}")
+ .format(apkfilename=apk['apkName']))
rmf = os.path.join(repodirs[0], apk['apkName'])
if not os.path.exists(rmf):
- logging.error("Could not find {0} to remove it".format(rmf))
+ logging.error(_("Could not find {path} to remove it").format(path=rmf))
else:
os.remove(rmf)
else:
- logging.warn(msg + "\n\tUse `fdroid update -c` to create it.")
-
- # update the metadata with the newly created ones included
- if newmetadata:
- apps = metadata.read_metadata()
+ logging.warn(msg + '\n\t' + _("Use `fdroid update -c` to create it."))
copy_triple_t_store_metadata(apps)
insert_obbs(repodirs[0], apps, apks)
insert_localized_app_metadata(apps)
+ translate_per_build_anti_features(apps, apks)
# Scan the archive repo for apks as well
if len(repodirs) > 1:
- archapks, cc = scan_apks(apkcache, repodirs[1], knownapks, options.use_date_from_apk)
+ archapks, cc = process_apks(apkcache, repodirs[1], knownapks, options.use_date_from_apk)
if cc:
cachechanged = True
else:
if os.path.isdir(repodir):
index.make(appdict, [appid], apks, repodir, False)
else:
- logging.info('Skipping index generation for ' + appid)
+ logging.info(_('Skipping index generation for {appid}').format(appid=appid))
return
if len(repodirs) > 1:
git_remote = config.get('binary_transparency_remote')
if git_remote or os.path.isdir(os.path.join('binary_transparency', '.git')):
+ from . import btlog
btlog.make_binary_transparency_log(repodirs)
if config['update_stats']:
if options.wiki:
update_wiki(apps, sortedids, apks + archapks)
- logging.info("Finished.")
+ logging.info(_("Finished"))
if __name__ == "__main__":