-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
+#!/usr/bin/env python3
#
# update.py - part of the FDroid server tools
# Copyright (C) 2010-2015, Ciaran Gultnieks, ciaran@ciarang.com
import zipfile
import hashlib
import pickle
-import urlparse
+import urllib.parse
from datetime import datetime, timedelta
from xml.dom.minidom import Document
from argparse import ArgumentParser
from pyasn1.error import PyAsn1Error
from pyasn1.codec.der import decoder, encoder
from pyasn1_modules import rfc2315
-from hashlib import md5
from binascii import hexlify, unhexlify
from PIL import Image
import logging
-import common
-import metadata
-from common import FDroidPopen, SdkToolsPopen
-from metadata import MetaDataException
+from . import common
+from . import metadata
+from .common import FDroidPopen, FDroidPopenBytes, SdkToolsPopen
+from .metadata import MetaDataException
+
+METADATA_VERSION = 16
screen_densities = ['640', '480', '320', '240', '160', '120']
if validapks == 0 and not app.Disabled:
wikidata += '\n[[Category:Apps with no packages]]\n'
if cantupdate and not app.Disabled:
- wikidata += "\n[[Category:Apps we can't update]]\n"
+ wikidata += "\n[[Category:Apps we cannot update]]\n"
if buildfails and not app.Disabled:
wikidata += "\n[[Category:Apps with failing builds]]\n"
elif not gotcurrentver and not cantupdate and not app.Disabled and app.UpdateCheckMode != "Static":
:param apkcache: current apk cache information
:param repodirs: the repo directories to process
"""
- for appid, app in apps.iteritems():
+ for appid, app in apps.items():
for build in app.builds:
if not build.disable:
continue
if not os.path.isfile(iconpath):
return
+ fp = None
try:
- im = Image.open(iconpath)
+ fp = open(iconpath, 'rb')
+ im = Image.open(fp)
size = dpi_to_px(density)
if any(length > size for length in im.size):
except Exception as e:
logging.error("Failed resizing {0} - {1}".format(iconpath, e))
+ finally:
+ if fp:
+ fp.close()
+
def resize_all_icons(repodirs):
"""Resize all icons that exceed the max size
cert_encoded = encoder.encode(certificates)[4:]
- return md5(cert_encoded.encode('hex')).hexdigest()
+ return hashlib.md5(hexlify(cert_encoded)).hexdigest()
+
+
+def get_icon_bytes(apkzip, iconsrc):
+ '''ZIP has no official encoding, UTF-* and CP437 are defacto'''
+ try:
+ return apkzip.read(iconsrc)
+ except KeyError:
+ return apkzip.read(iconsrc.encode('utf-8').decode('cp437'))
+
+
+def sha256sum(filename):
+ '''Calculate the sha256 of the given file'''
+ sha = hashlib.sha256()
+ with open(filename, 'rb') as f:
+ while True:
+ t = f.read(16384)
+ if len(t) == 0:
+ break
+ sha.update(t)
+ return sha.hexdigest()
+
+
+def insert_obbs(repodir, apps, apks):
+ """Scans the .obb files in a given repo directory and adds them to the
+ relevant APK instances. OBB files have versionCodes like APK
+ files, and they are loosely associated. If there is an OBB file
+ present, then any APK with the same or higher versionCode will use
+ that OBB file. There are two OBB types: main and patch, each APK
+ can only have only have one of each.
+
+ https://developer.android.com/google/play/expansion-files.html
+
+ :param repodir: repo directory to scan
+ :param apps: list of current, valid apps
+ :param apks: current information on all APKs
+
+ """
+
+ def obbWarnDelete(f, msg):
+ logging.warning(msg + f)
+ if options.delete_unknown:
+ logging.error("Deleting unknown file: " + f)
+ os.remove(f)
+
+ obbs = []
+ java_Integer_MIN_VALUE = -pow(2, 31)
+ for f in glob.glob(os.path.join(repodir, '*.obb')):
+ obbfile = os.path.basename(f)
+ # obbfile looks like: [main|patch].<expansion-version>.<package-name>.obb
+ chunks = obbfile.split('.')
+ if chunks[0] != 'main' and chunks[0] != 'patch':
+ obbWarnDelete(f, 'OBB filename must start with "main." or "patch.": ')
+ continue
+ if not re.match(r'^-?[0-9]+$', chunks[1]):
+ obbWarnDelete('The OBB version code must come after "' + chunks[0] + '.": ')
+ continue
+ versioncode = int(chunks[1])
+ packagename = ".".join(chunks[2:-1])
+ highestVersionCode = java_Integer_MIN_VALUE
+ if packagename not in apps.keys():
+ obbWarnDelete(f, "OBB's packagename does not match a supported APK: ")
+ continue
+ for apk in apks:
+ if packagename == apk['id'] and apk['versioncode'] > highestVersionCode:
+ highestVersionCode = apk['versioncode']
+ if versioncode > highestVersionCode:
+ obbWarnDelete(f, 'OBB file has newer versioncode(' + str(versioncode)
+ + ') than any APK: ')
+ continue
+ obbsha256 = sha256sum(f)
+ obbs.append((packagename, versioncode, obbfile, obbsha256))
-def scan_apks(apps, apkcache, repodir, knownapks):
+ for apk in apks:
+ for (packagename, versioncode, obbfile, obbsha256) in sorted(obbs, reverse=True):
+ if versioncode <= apk['versioncode'] and packagename == apk['id']:
+ if obbfile.startswith('main.') and 'obbMainFile' not in apk:
+ apk['obbMainFile'] = obbfile
+ apk['obbMainFileSha256'] = obbsha256
+ elif obbfile.startswith('patch.') and 'obbPatchFile' not in apk:
+ apk['obbPatchFile'] = obbfile
+ apk['obbPatchFileSha256'] = obbsha256
+ if 'obbMainFile' in apk and 'obbPatchFile' in apk:
+ break
+
+
+def scan_apks(apps, apkcache, repodir, knownapks, use_date_from_apk=False):
"""Scan the apks in the given repo directory.
This also extracts the icons.
:param apkcache: current apk cache information
:param repodir: repo directory to scan
:param knownapks: known apks info
+ :param use_date_from_apk: use date from APK (instead of current date)
+ for newly added APKs
:returns: (apks, cachechanged) where apks is a list of apk information,
and cachechanged is True if the apkcache got changed.
"""
icon_pat = re.compile(".*application-icon-([0-9]+):'([^']+?)'.*")
icon_pat_nodpi = re.compile(".*icon='([^']+?)'.*")
sdkversion_pat = re.compile(".*'([0-9]*)'.*")
- string_pat = re.compile(".*'([^']*)'.*")
+ string_pat = re.compile(".* name='([^']*)'.*")
for apkfile in glob.glob(os.path.join(repodir, '*.apk')):
apkfilename = apkfile[len(repodir) + 1:]
logging.critical("Spaces in filenames are not allowed.")
sys.exit(1)
- # Calculate the sha256...
- sha = hashlib.sha256()
- with open(apkfile, 'rb') as f:
- while True:
- t = f.read(16384)
- if len(t) == 0:
- break
- sha.update(t)
- shasum = sha.hexdigest()
+ shasum = sha256sum(apkfile)
usecache = False
if apkfilename in apkcache:
logging.error(line.replace('sdkVersion:', '')
+ ' is not a valid minSdkVersion!')
else:
- apk['sdkversion'] = m.group(1)
+ apk['minSdkVersion'] = m.group(1)
+ # if target not set, default to min
+ if 'targetSdkVersion' not in apk:
+ apk['targetSdkVersion'] = m.group(1)
+ elif line.startswith("targetSdkVersion:"):
+ m = re.match(sdkversion_pat, line)
+ if m is None:
+ logging.error(line.replace('targetSdkVersion:', '')
+ + ' is not a valid targetSdkVersion!')
+ else:
+ apk['targetSdkVersion'] = m.group(1)
elif line.startswith("maxSdkVersion:"):
- apk['maxsdkversion'] = re.match(sdkversion_pat, line).group(1)
+ apk['maxSdkVersion'] = re.match(sdkversion_pat, line).group(1)
elif line.startswith("native-code:"):
apk['nativecode'] = []
for arch in line[13:].split(' '):
perm = perm[16:]
apk['features'].add(perm)
- if 'sdkversion' not in apk:
+ if 'minSdkVersion' not in apk:
logging.warn("No SDK version information found in {0}".format(apkfile))
- apk['sdkversion'] = 0
+ apk['minSdkVersion'] = 1
# Check for debuggable apks...
if common.isApkDebuggable(apkfile, config):
- logging.warn('{0} is set to android:debuggable="true"'.format(apkfile))
+ logging.warning('{0} is set to android:debuggable="true"'.format(apkfile))
# Get the signature (or md5 of, to be precise)...
logging.debug('Getting signature of {0}'.format(apkfile))
# has to be more than 24 hours newer because ZIP/APK files do not
# store timezone info
manifest = apkzip.getinfo('AndroidManifest.xml')
- dt_obj = datetime(*manifest.date_time)
- checkdt = dt_obj - timedelta(1)
- if datetime.today() < checkdt:
- logging.warn('System clock is older than manifest in: '
- + apkfilename + '\nSet clock to that time using:\n'
- + 'sudo date -s "' + str(dt_obj) + '"')
+ if manifest.date_time[1] == 0: # month can't be zero
+ logging.debug('AndroidManifest.xml has no date')
+ else:
+ dt_obj = datetime(*manifest.date_time)
+ checkdt = dt_obj - timedelta(1)
+ if datetime.today() < checkdt:
+ logging.warn('System clock is older than manifest in: '
+ + apkfilename
+ + '\nSet clock to that time using:\n'
+ + 'sudo date -s "' + str(dt_obj) + '"')
iconfilename = "%s.%s.png" % (
apk['id'],
try:
with open(icondest, 'wb') as f:
- f.write(apkzip.read(iconsrc))
+ f.write(get_icon_bytes(apkzip, iconsrc))
apk['icons'][density] = iconfilename
except:
iconpath = os.path.join(
get_icon_dir(repodir, '0'), iconfilename)
with open(iconpath, 'wb') as f:
- f.write(apkzip.read(iconsrc))
+ f.write(get_icon_bytes(apkzip, iconsrc))
try:
im = Image.open(iconpath)
dpi = px_to_dpi(im.size[0])
get_icon_dir(repodir, last_density), iconfilename)
iconpath = os.path.join(
get_icon_dir(repodir, density), iconfilename)
+ fp = None
try:
- im = Image.open(last_iconpath)
- except:
- logging.warn("Invalid image file at %s" % last_iconpath)
- continue
+ fp = open(last_iconpath, 'rb')
+ im = Image.open(fp)
- size = dpi_to_px(density)
+ size = dpi_to_px(density)
- im.thumbnail((size, size), Image.ANTIALIAS)
- im.save(iconpath, "PNG")
- empty_densities.remove(density)
+ im.thumbnail((size, size), Image.ANTIALIAS)
+ im.save(iconpath, "PNG")
+ empty_densities.remove(density)
+ except:
+ logging.warning("Invalid image file at %s" % last_iconpath)
+ finally:
+ if fp:
+ fp.close()
# Then just copy from the highest resolution available
last_density = None
# Record in known apks, getting the added date at the same time..
added = knownapks.recordapk(apk['apkname'], apk['id'])
if added:
+ if use_date_from_apk and manifest.date_time[1] != 0:
+ added = datetime(*manifest.date_time).timetuple()
+ logging.debug("Using date from APK")
+
apk['added'] = added
apkcache[apkfilename] = apk
def cert_fingerprint(data):
digest = hashlib.sha256(data).digest()
ret = []
- ret.append(' '.join("%02X" % ord(b) for b in digest))
+ ret.append(' '.join("%02X" % b for b in bytearray(digest)))
return " ".join(ret)
if 'repo_pubkey' in config:
pubkey = unhexlify(config['repo_pubkey'])
else:
- p = FDroidPopen([config['keytool'], '-exportcert',
- '-alias', config['repo_keyalias'],
- '-keystore', config['keystore'],
- '-storepass:file', config['keystorepassfile']]
- + config['smartcardoptions'], output=False)
+ p = FDroidPopenBytes([config['keytool'], '-exportcert',
+ '-alias', config['repo_keyalias'],
+ '-keystore', config['keystore'],
+ '-storepass:file', config['keystorepassfile']]
+ + config['smartcardoptions'],
+ output=False, stderr_to_stdout=False)
if p.returncode != 0 or len(p.output) < 20:
msg = "Failed to get repo pubkey!"
if config['keystore'] == 'NONE':
mirrorcheckfailed = False
for mirror in config.get('mirrors', []):
- base = os.path.basename(urlparse.urlparse(mirror).path.rstrip('/'))
+ base = os.path.basename(urllib.parse.urlparse(mirror).path.rstrip('/'))
if config.get('nonstandardwebroot') is not True and base != 'fdroid':
logging.error("mirror '" + mirror + "' does not end with 'fdroid'!")
mirrorcheckfailed = True
repoel.setAttribute("icon", os.path.basename(config['archive_icon']))
repoel.setAttribute("url", config['archive_url'])
addElement('description', config['archive_description'], doc, repoel)
- urlbasepath = os.path.basename(urlparse.urlparse(config['archive_url']).path)
+ urlbasepath = os.path.basename(urllib.parse.urlparse(config['archive_url']).path)
for mirror in config.get('mirrors', []):
- addElement('mirror', urlparse.urljoin(mirror, urlbasepath), doc, repoel)
+ addElement('mirror', urllib.parse.urljoin(mirror, urlbasepath), doc, repoel)
else:
repoel.setAttribute("name", config['repo_name'])
repoel.setAttribute("icon", os.path.basename(config['repo_icon']))
repoel.setAttribute("url", config['repo_url'])
addElement('description', config['repo_description'], doc, repoel)
- urlbasepath = os.path.basename(urlparse.urlparse(config['repo_url']).path)
+ urlbasepath = os.path.basename(urllib.parse.urlparse(config['repo_url']).path)
for mirror in config.get('mirrors', []):
- addElement('mirror', urlparse.urljoin(mirror, urlbasepath), doc, repoel)
+ addElement('mirror', urllib.parse.urljoin(mirror, urlbasepath), doc, repoel)
- repoel.setAttribute("version", "15")
+ repoel.setAttribute("version", str(METADATA_VERSION))
repoel.setAttribute("timestamp", str(int(time.time())))
nosigningkey = False
logging.warning("\tfdroid update --create-key")
sys.exit(1)
- repoel.setAttribute("pubkey", extract_pubkey())
+ repoel.setAttribute("pubkey", extract_pubkey().decode('utf-8'))
root.appendChild(repoel)
for appid in sortedids:
apkel.appendChild(hashel)
addElement('sig', apk['sig'], doc, apkel)
addElement('size', str(apk['size']), doc, apkel)
- addElement('sdkver', str(apk['sdkversion']), doc, apkel)
- if 'maxsdkversion' in apk:
- addElement('maxsdkver', str(apk['maxsdkversion']), doc, apkel)
+ addElement('sdkver', str(apk['minSdkVersion']), doc, apkel)
+ if 'targetSdkVersion' in apk:
+ addElement('targetSdkVersion', str(apk['targetSdkVersion']), doc, apkel)
+ if 'maxSdkVersion' in apk:
+ addElement('maxsdkver', str(apk['maxSdkVersion']), doc, apkel)
+ addElementNonEmpty('obbMainFile', apk.get('obbMainFile'), doc, apkel)
+ addElementNonEmpty('obbMainFileSha256', apk.get('obbMainFileSha256'), doc, apkel)
+ addElementNonEmpty('obbPatchFile', apk.get('obbPatchFile'), doc, apkel)
+ addElementNonEmpty('obbPatchFileSha256', apk.get('obbPatchFileSha256'), doc, apkel)
if 'added' in apk:
addElement('added', time.strftime('%Y-%m-%d', apk['added']), doc, apkel)
addElementNonEmpty('permissions', ','.join(apk['permissions']), doc, apkel)
os.symlink(sigfile_path, siglinkname)
if options.pretty:
- output = doc.toprettyxml()
+ output = doc.toprettyxml(encoding='utf-8')
else:
- output = doc.toxml()
+ output = doc.toxml(encoding='utf-8')
with open(os.path.join(repodir, 'index.xml'), 'wb') as f:
f.write(output)
catdata = ''
for cat in categories:
catdata += cat + '\n'
- with open(os.path.join(repodir, 'categories.txt'), 'w') as f:
+ with open(os.path.join(repodir, 'categories.txt'), 'w', encoding='utf8') as f:
f.write(catdata)
def archive_old_apks(apps, apks, archapks, repodir, archivedir, defaultkeepversions):
- for appid, app in apps.iteritems():
+ for appid, app in apps.items():
if app.ArchivePolicy:
keepversions = int(app.ArchivePolicy[:-9])
to_path = os.path.join(to_dir, filename)
shutil.move(from_path, to_path)
+ logging.debug("Checking archiving for {0} - apks:{1}, keepversions:{2}, archapks:{3}"
+ .format(appid, len(apks), keepversions, len(archapks)))
+
if len(apks) > keepversions:
apklist = filter_apk_list_sorted(apks)
# Move back the ones we don't want.
parser.add_argument("-c", "--create-metadata", action="store_true", default=False,
help="Create skeleton metadata files that are missing")
parser.add_argument("--delete-unknown", action="store_true", default=False,
- help="Delete APKs without metadata from the repo")
+ help="Delete APKs and/or OBBs without metadata from the repo")
parser.add_argument("-b", "--buildreport", action="store_true", default=False,
help="Report on build data status")
parser.add_argument("-i", "--interactive", default=False, action="store_true",
help="Clean update - don't uses caches, reprocess all apks")
parser.add_argument("--nosign", action="store_true", default=False,
help="When configured for signed indexes, create only unsigned indexes at this stage")
+ parser.add_argument("--use-date-from-apk", action="store_true", default=False,
+ help="Use date from apk instead of current time for newly added apks")
options = parser.parse_args()
config = common.read_config(options)
# Generate a list of categories...
categories = set()
- for app in apps.itervalues():
+ for app in apps.values():
categories.update(app.Categories)
# Read known apks data (will be updated and written back when we've finished)
apkcachefile = os.path.join('tmp', 'apkcache')
if not options.clean and os.path.exists(apkcachefile):
with open(apkcachefile, 'rb') as cf:
- apkcache = pickle.load(cf)
+ apkcache = pickle.load(cf, encoding='utf-8')
+ if apkcache.get("METADATA_VERSION") != METADATA_VERSION:
+ apkcache = {}
else:
apkcache = {}
delete_disabled_builds(apps, apkcache, repodirs)
# Scan all apks in the main repo
- apks, cachechanged = scan_apks(apps, apkcache, repodirs[0], knownapks)
+ apks, cachechanged = scan_apks(apps, apkcache, repodirs[0], knownapks, options.use_date_from_apk)
# Generate warnings for apk's with no metadata (or create skeleton
# metadata files, if requested on the command line)
if 'name' not in apk:
logging.error(apk['id'] + ' does not have a name! Skipping...')
continue
- f = open(os.path.join('metadata', apk['id'] + '.txt'), 'w')
+ f = open(os.path.join('metadata', apk['id'] + '.txt'), 'w', encoding='utf8')
f.write("License:Unknown\n")
f.write("Web Site:\n")
f.write("Source Code:\n")
if newmetadata:
apps = metadata.read_metadata()
+ insert_obbs(repodirs[0], apps, apks)
+
# Scan the archive repo for apks as well
if len(repodirs) > 1:
- archapks, cc = scan_apks(apps, apkcache, repodirs[1], knownapks)
+ archapks, cc = scan_apks(apps, apkcache, repodirs[1], knownapks, options.use_date_from_apk)
if cc:
cachechanged = True
else:
# level. When doing this, we use the info from the most recent version's apk.
# We deal with figuring out when the app was added and last updated at the
# same time.
- for appid, app in apps.iteritems():
+ for appid, app in apps.items():
bestver = 0
for apk in apks + archapks:
if apk['id'] == appid:
# Sort the app list by name, then the web site doesn't have to by default.
# (we had to wait until we'd scanned the apks to do this, because mostly the
# name comes from there!)
- sortedids = sorted(apps.iterkeys(), key=lambda appid: apps[appid].Name.upper())
+ sortedids = sorted(apps.keys(), key=lambda appid: apps[appid].Name.upper())
# APKs are placed into multiple repos based on the app package, providing
# per-app subscription feeds for nightly builds and things like it
if config['per_app_repos']:
add_apks_to_per_app_repos(repodirs[0], apks)
- for appid, app in apps.iteritems():
+ for appid, app in apps.items():
repodir = os.path.join(appid, 'fdroid', 'repo')
appdict = dict()
appdict[appid] = app
# Generate latest apps data for widget
if os.path.exists(os.path.join('stats', 'latestapps.txt')):
data = ''
- for line in file(os.path.join('stats', 'latestapps.txt')):
- appid = line.rstrip()
- data += appid + "\t"
- app = apps[appid]
- data += app.Name + "\t"
- if app.icon is not None:
- data += app.icon + "\t"
- data += app.License + "\n"
- with open(os.path.join(repodirs[0], 'latestapps.dat'), 'w') as f:
+ with open(os.path.join('stats', 'latestapps.txt'), 'r', encoding='utf8') as f:
+ for line in f:
+ appid = line.rstrip()
+ data += appid + "\t"
+ app = apps[appid]
+ data += app.Name + "\t"
+ if app.icon is not None:
+ data += app.icon + "\t"
+ data += app.License + "\n"
+ with open(os.path.join(repodirs[0], 'latestapps.dat'), 'w', encoding='utf8') as f:
f.write(data)
if cachechanged:
+ apkcache["METADATA_VERSION"] = METADATA_VERSION
with open(apkcachefile, 'wb') as cf:
pickle.dump(apkcache, cf)