if jni_components == ['yes']:
jni_components = ['']
- cmd = [os.path.join(config['ndk_path'], "ndk-build"), "-j1"]
+ cmd = [os.path.join(config['ndk_path'], "ndk-build"), "-j4"]
for d in jni_components:
if d:
logging.info("Building native code in '%s'" % d)
allapps = metadata.read_metadata(xref=not options.onserver)
apps = common.read_app_args(args, allapps, True)
- apps = [app for app in apps if (options.force or not app['Disabled']) and
- len(app['Repo Type']) > 0 and len(app['builds']) > 0]
+ for appid, app in apps.items():
+ if (app['Disabled'] and not options.force) or not app['Repo Type'] or not app['builds']:
+ del apps[appid]
- if len(apps) == 0:
+ if not apps:
raise FDroidException("No apps to process.")
if options.latest:
- for app in apps:
+ for app in apps.itervalues():
for build in reversed(app['builds']):
if build['disable']:
continue
# Build applications...
failed_apps = {}
build_succeeded = []
- for app in apps:
+ for appid, app in apps.iteritems():
first = True
if app['Repo Type'] == 'srclib':
build_dir = os.path.join('build', 'srclib', app['Repo'])
else:
- build_dir = os.path.join('build', app['id'])
+ build_dir = os.path.join('build', appid)
# Set up vcs interface and make sure we have the latest code...
logging.debug("Getting {0} vcs interface for {1}"
build_succeeded.append(app)
wikilog = "Build succeeded"
except BuildException as be:
- logfile = open(os.path.join(log_dir, app['id'] + '.log'), 'a+')
+ logfile = open(os.path.join(log_dir, appid + '.log'), 'a+')
logfile.write(str(be))
logfile.close()
- print("Could not build app %s due to BuildException: %s" % (app['id'], be))
+ print("Could not build app %s due to BuildException: %s" % (appid, be))
if options.stop:
sys.exit(1)
- failed_apps[app['id']] = be
+ failed_apps[appid] = be
wikilog = be.get_wikitext()
except VCSException as vcse:
reason = str(vcse).split('\n', 1)[0] if options.verbose else str(vcse)
logging.error("VCS error while building app %s: %s" % (
- app['id'], reason))
+ appid, reason))
if options.stop:
sys.exit(1)
- failed_apps[app['id']] = vcse
+ failed_apps[appid] = vcse
wikilog = str(vcse)
except Exception as e:
logging.error("Could not build app %s due to unknown error: %s" % (
- app['id'], traceback.format_exc()))
+ appid, traceback.format_exc()))
if options.stop:
sys.exit(1)
- failed_apps[app['id']] = e
+ failed_apps[appid] = e
wikilog = str(e)
if options.wiki and wikilog:
try:
# Write a page with the last build log for this version code
- lastbuildpage = app['id'] + '/lastbuild_' + thisbuild['vercode']
+ lastbuildpage = appid + '/lastbuild_' + thisbuild['vercode']
newpage = site.Pages[lastbuildpage]
txt = "Build completed at " + time.strftime("%Y-%m-%d %H:%M:%SZ", time.gmtime()) + "\n\n" + wikilog
newpage.save(txt, summary='Build log')
# Redirect from /lastbuild to the most recent build log
- newpage = site.Pages[app['id'] + '/lastbuild']
+ newpage = site.Pages[appid + '/lastbuild']
newpage.save('#REDIRECT [[' + lastbuildpage + ']]', summary='Update redirect')
except:
logging.error("Error while attempting to publish build log")
.format(common.getappname(app), version))
return
- for app in apps:
+ for appid, app in apps.iteritems():
if options.autoonly and app['Auto Update Mode'] in ('None', 'Static'):
- logging.debug("Nothing to do for {0}...".format(app['id']))
+ logging.debug("Nothing to do for {0}...".format(appid))
continue
- logging.info("Processing " + app['id'] + '...')
+ logging.info("Processing " + appid + '...')
checkupdates_app(app)
if not vercodes:
return allapps
- apps = [app for app in allapps if app['id'] in vercodes]
+ apps = {}
+ for appid, app in allapps.iteritems():
+ if appid in vercodes:
+ apps[appid] = app
if len(apps) != len(vercodes):
allids = [app["id"] for app in allapps]
raise FDroidException("No packages specified")
error = False
- for app in apps:
- vc = vercodes[app['id']]
+ for appid, app in apps.iteritems():
+ vc = vercodes[appid]
if not vc:
continue
app['builds'] = [b for b in app['builds'] if b['vercode'] in vc]
- if len(app['builds']) != len(vercodes[app['id']]):
+ if len(app['builds']) != len(vercodes[appid]):
error = True
allvcs = [b['vercode'] for b in app['builds']]
- for v in vercodes[app['id']]:
+ for v in vercodes[appid]:
if v not in allvcs:
- logging.critical("No such vercode %s for app %s" % (v, app['id']))
+ logging.critical("No such vercode %s for app %s" % (v, appid))
if error:
raise FDroidException("Found invalid vercodes for some apps")
gitsvn_cmd += ' -t %s' % i[5:]
elif i.startswith('branches='):
gitsvn_cmd += ' -b %s' % i[9:]
- p = SilentPopen([gitsvn_cmd + " %s %s" % (remote_split[0], self.local)], shell=True)
- if p.returncode != 0:
+ if subprocess.call([gitsvn_cmd + " %s %s" % (remote_split[0], self.local)], shell=True) != 0:
self.clone_failed = True
- raise VCSException("Git svn clone failed", p.output)
+ raise VCSException("Git svn clone failed")
else:
- p = SilentPopen([gitsvn_cmd + " %s %s" % (self.remote, self.local)], shell=True)
- if p.returncode != 0:
+ if subprocess.call([gitsvn_cmd + " %s %s" % (self.remote, self.local)], shell=True) != 0:
self.clone_failed = True
- raise VCSException("Git svn clone failed", p.output)
+ raise VCSException("Git svn clone failed")
self.checkrepo()
else:
self.checkrepo()
sys.exit(1)
# Make sure it's actually new...
- for app in apps:
- if app['id'] == package:
- logging.error("Package " + package + " already exists")
- sys.exit(1)
+ if package in apps:
+ logging.error("Package " + package + " already exists")
+ sys.exit(1)
# Construct the metadata...
- app = metadata.parse_metadata(None)
- app['id'] = package
+ app = metadata.parse_metadata(None)[1]
app['Web Site'] = website
app['Source Code'] = sourcecode
if issuetracker:
allapps = metadata.read_metadata(xref=False)
apps = common.read_app_args(args, allapps, False)
- for app in apps:
- appid = app['id']
+ for appid, app in apps.iteritems():
lastcommit = ''
if app['Disabled']:
# their source repository.
read_srclibs()
- apps = []
+ apps = {}
for basedir in ('metadata', 'tmp'):
if not os.path.exists(basedir):
os.makedirs(basedir)
for metafile in sorted(glob.glob(os.path.join('metadata', '*.txt'))):
- appinfo = parse_metadata(metafile)
+ appid, appinfo = parse_metadata(metafile)
check_metadata(appinfo)
- apps.append(appinfo)
+ apps[appid] = appinfo
if xref:
# Parse all descriptions at load time, just to ensure cross-referencing
# errors are caught early rather than when they hit the build server.
- def linkres(link):
- for app in apps:
- if app['id'] == link:
- return ("fdroid.app:" + link, "Dummy name - don't know yet")
- raise MetaDataException("Cannot resolve app id " + link)
- for app in apps:
+ def linkres(appid):
+ if appid in apps:
+ return ("fdroid:app" + appid, "Dummy name - don't know yet")
+ raise MetaDataException("Cannot resolve app id " + appid)
+
+ for appid, app in apps.iteritems():
try:
description_html(app['Description'], linkres)
except MetaDataException, e:
- raise MetaDataException("Problem with description of " + app['id'] +
+ raise MetaDataException("Problem with description of " + appid +
" - " + str(e))
return apps
#
# Known keys not originating from the metadata are:
#
-# 'id' - the application's package ID
# 'builds' - a list of dictionaries containing build information
# for each defined build
# 'comments' - a list of comments from the metadata file. Each is
#
def parse_metadata(metafile):
+ appid = None
linedesc = None
def add_buildflag(p, thisbuild):
if metafile:
if not isinstance(metafile, file):
metafile = open(metafile, "r")
- thisinfo['id'] = metafile.name[9:-4]
- else:
- thisinfo['id'] = None
+ appid = metafile.name[9:-4]
thisinfo.update(app_defaults)
+ thisinfo['id'] = appid
# General defaults...
thisinfo['builds'] = []
for build in thisinfo['builds']:
fill_build_defaults(build)
- return thisinfo
+ return (appid, thisinfo)
# Write a metadata file.
allapps = metadata.read_metadata()
vercodes = common.read_pkg_args(args, True)
allaliases = []
- for app in allapps:
+ for appid in allapps:
m = md5.new()
- m.update(app['id'])
+ m.update(appid)
keyalias = m.hexdigest()[:8]
if keyalias in allaliases:
logging.error("There is a keyalias collision - publishing halted")
allapps = metadata.read_metadata(xref=True)
apps = common.read_app_args(args, allapps, False)
- for app in apps:
- logging.info("Writing " + app['id'])
- metadata.write_metadata(os.path.join('metadata', app['id']) + '.txt', app)
+ for appid, app in apps.iteritems():
+ logging.info("Writing " + appid)
+ metadata.write_metadata(os.path.join('metadata', appid) + '.txt', app)
logging.info("Finished.")
srclib_dir = os.path.join(build_dir, 'srclib')
extlib_dir = os.path.join(build_dir, 'extlib')
- for app in apps:
+ for appid, app in apps.iteritems():
if app['Disabled']:
- logging.info("Skipping %s: disabled" % app['id'])
+ logging.info("Skipping %s: disabled" % appid)
continue
if not app['builds']:
- logging.info("Skipping %s: no builds specified" % app['id'])
+ logging.info("Skipping %s: no builds specified" % appid)
continue
- logging.info("Processing " + app['id'])
+ logging.info("Processing " + appid)
try:
- build_dir = 'build/' + app['id']
+ build_dir = 'build/' + appid
# Set up vcs interface and make sure we have the latest code...
vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir)
# Do the scan...
buildprobs = common.scan_source(build_dir, root_dir, thisbuild)
for problem in buildprobs:
- problems.append(problem + ' in ' + app['id']
+ problems.append(problem + ' in ' + appid
+ ' ' + thisbuild['version'])
except BuildException as be:
- msg = "Could not scan app %s due to BuildException: %s" % (app['id'], be)
+ msg = "Could not scan app %s due to BuildException: %s" % (appid, be)
problems.append(msg)
except VCSException as vcse:
- msg = "VCS error while scanning app %s: %s" % (app['id'], vcse)
+ msg = "VCS error while scanning app %s: %s" % (appid, vcse)
problems.append(msg)
except Exception:
- msg = "Could not scan app %s due to unknown error: %s" % (app['id'], traceback.format_exc())
+ msg = "Could not scan app %s due to unknown error: %s" % (appid, traceback.format_exc())
problems.append(msg)
logging.info("Finished:")
sys.exit(1)
# Get all metadata-defined apps...
- metaapps = [a for a in metadata.read_metadata() if not a['Disabled']]
+ metaapps = [a for a in metadata.read_metadata().itervalues() if not a['Disabled']]
statsdir = 'stats'
logsdir = os.path.join(statsdir, 'logs')
yield os.path.join(repodir, "icons")
-def update_wiki(apps, apks):
+def update_wiki(apps, sortedids, apks):
"""Update the wiki
:param apps: fully populated list of all applications
site.login(config['wiki_user'], config['wiki_password'])
generated_pages = {}
generated_redirects = {}
- for app in apps:
+
+ for appid in sortedids:
+ app = apps[appid]
+
wikidata = ''
if app['Disabled']:
wikidata += '{{Disabled|' + app['Disabled'] + '}}\n'
for af in app['AntiFeatures'].split(','):
wikidata += '{{AntiFeature|' + af + '}}\n'
wikidata += '{{App|id=%s|name=%s|added=%s|lastupdated=%s|source=%s|tracker=%s|web=%s|donate=%s|flattr=%s|bitcoin=%s|litecoin=%s|dogecoin=%s|license=%s|root=%s}}\n' % (
- app['id'],
+ appid,
app['Name'],
time.strftime('%Y-%m-%d', app['added']) if 'added' in app else '',
time.strftime('%Y-%m-%d', app['lastupdated']) if 'lastupdated' in app else '',
wikidata += "This app provides: %s" % ', '.join(app['Summary'].split(','))
wikidata += app['Summary']
- wikidata += " - [https://f-droid.org/repository/browse/?fdid=" + app['id'] + " view in repository]\n\n"
+ wikidata += " - [https://f-droid.org/repository/browse/?fdid=" + appid + " view in repository]\n\n"
wikidata += "=Description=\n"
wikidata += metadata.description_wiki(app['Description']) + "\n"
wikidata += "=Maintainer Notes=\n"
if 'Maintainer Notes' in app:
wikidata += metadata.description_wiki(app['Maintainer Notes']) + "\n"
- wikidata += "\nMetadata: [https://gitlab.com/fdroid/fdroiddata/blob/master/metadata/{0}.txt current] [https://gitlab.com/fdroid/fdroiddata/commits/master/metadata/{0}.txt history]\n".format(app['id'])
+ wikidata += "\nMetadata: [https://gitlab.com/fdroid/fdroiddata/blob/master/metadata/{0}.txt current] [https://gitlab.com/fdroid/fdroiddata/commits/master/metadata/{0}.txt history]\n".format(appid)
# Get a list of all packages for this application...
apklist = []
cantupdate = False
buildfails = False
for apk in apks:
- if apk['id'] == app['id']:
+ if apk['id'] == appid:
if str(apk['versioncode']) == app['Current Version Code']:
gotcurrentver = True
apklist.append(apk)
buildfails = True
apklist.append({'versioncode': int(thisbuild['vercode']),
'version': thisbuild['version'],
- 'buildproblem': "The build for this version appears to have failed. Check the [[{0}/lastbuild_{1}|build log]].".format(app['id'], thisbuild['vercode'])
+ 'buildproblem': "The build for this version appears to have failed. Check the [[{0}/lastbuild_{1}|build log]].".format(appid, thisbuild['vercode'])
})
if app['Current Version Code'] == '0':
cantupdate = True
# We can't have underscores in the page name, even if they're in
# the package ID, because MediaWiki messes with them...
- pagename = app['id'].replace('_', ' ')
+ pagename = appid.replace('_', ' ')
# Drop a trailing newline, because mediawiki is going to drop it anyway
# and it we don't we'll think the page has changed when it hasn't...
:param apkcache: current apk cache information
:param repodirs: the repo directories to process
"""
- for app in apps:
+ for appid, app in apps.iteritems():
for build in app['builds']:
if build['disable']:
- apkfilename = app['id'] + '_' + str(build['vercode']) + '.apk'
+ apkfilename = appid + '_' + str(build['vercode']) + '.apk'
for repodir in repodirs:
apkpath = os.path.join(repodir, apkfilename)
ascpath = apkpath + ".asc"
repo_pubkey_fingerprint = None
-def make_index(apps, apks, repodir, archive, categories):
+def make_index(apps, sortedids, apks, repodir, archive, categories):
"""Make a repo index.
:param apps: fully populated apps list
root.appendChild(repoel)
- for app in apps:
+ for appid in sortedids:
+ app = apps[appid]
if app['Disabled'] is not None:
continue
# Get a list of the apks for this app...
apklist = []
for apk in apks:
- if apk['id'] == app['id']:
+ if apk['id'] == appid:
apklist.append(apk)
if len(apklist) == 0:
if app['icon']:
addElement('icon', app['icon'], doc, apel)
- def linkres(link):
- for app in apps:
- if app['id'] == link:
- return ("fdroid.app:" + link, app['Name'])
- raise MetaDataException("Cannot resolve app id " + link)
+ def linkres(appid):
+ if appid in apps:
+ return ("fdroid:app" + appid, apps[appid]['Name'])
+ raise MetaDataException("Cannot resolve app id " + appid)
+
addElement('desc',
metadata.description_html(app['Description'], linkres),
doc, apel)
def archive_old_apks(apps, apks, archapks, repodir, archivedir, defaultkeepversions):
- for app in apps:
+ for appid, app in apps.iteritems():
# Get a list of the apks for this app...
apklist = []
for apk in apks:
- if apk['id'] == app['id']:
+ if apk['id'] == appid:
apklist.append(apk)
# Sort the apk list into version order...
# Generate a list of categories...
categories = set()
- for app in apps:
+ for app in apps.itervalues():
categories.update(app['Categories'])
# Read known apks data (will be updated and written back when we've finished)
# metadata files, if requested on the command line)
newmetadata = False
for apk in apks:
- found = False
- for app in apps:
- if app['id'] == apk['id']:
- found = True
- break
- if not found:
+ if apk['id'] not in apps:
if options.create_metadata:
if 'name' not in apk:
logging.error(apk['id'] + ' does not have a name! Skipping...')
# level. When doing this, we use the info from the most recent version's apk.
# We deal with figuring out when the app was added and last updated at the
# same time.
- for app in apps:
+ for appid, app in apps.iteritems():
bestver = 0
added = None
lastupdated = None
for apk in apks + archapks:
- if apk['id'] == app['id']:
+ if apk['id'] == appid:
if apk['versioncode'] > bestver:
bestver = apk['versioncode']
bestapk = apk
if added:
app['added'] = added
else:
- logging.warn("Don't know when " + app['id'] + " was added")
+ logging.warn("Don't know when " + appid + " was added")
if lastupdated:
app['lastupdated'] = lastupdated
else:
- logging.warn("Don't know when " + app['id'] + " was last updated")
+ logging.warn("Don't know when " + appid + " was last updated")
if bestver == 0:
if app['Name'] is None:
- app['Name'] = app['id']
+ app['Name'] = appid
app['icon'] = None
- logging.warn("Application " + app['id'] + " has no packages")
+ logging.warn("Application " + appid + " has no packages")
else:
if app['Name'] is None:
app['Name'] = bestapk['name']
# Sort the app list by name, then the web site doesn't have to by default.
# (we had to wait until we'd scanned the apks to do this, because mostly the
# name comes from there!)
- apps = sorted(apps, key=lambda app: app['Name'].upper())
+ sortedids = sorted(apps.iterkeys(), key=lambda appid: apps[appid]['Name'].upper())
if len(repodirs) > 1:
archive_old_apks(apps, apks, archapks, repodirs[0], repodirs[1], config['archive_older'])
# Make the index for the main repo...
- make_index(apps, apks, repodirs[0], False, categories)
+ make_index(apps, sortedids, apks, repodirs[0], False, categories)
# If there's an archive repo, make the index for it. We already scanned it
# earlier on.
if len(repodirs) > 1:
- make_index(apps, archapks, repodirs[1], True, categories)
+ make_index(apps, sortedids, archapks, repodirs[1], True, categories)
if config['update_stats']:
for line in file(os.path.join('stats', 'latestapps.txt')):
appid = line.rstrip()
data += appid + "\t"
- for app in apps:
- if app['id'] == appid:
- data += app['Name'] + "\t"
- if app['icon'] is not None:
- data += app['icon'] + "\t"
- data += app['License'] + "\n"
- break
+ app = apps[appid]
+ data += app['Name'] + "\t"
+ if app['icon'] is not None:
+ data += app['icon'] + "\t"
+ data += app['License'] + "\n"
f = open(os.path.join(repodirs[0], 'latestapps.dat'), 'w')
f.write(data)
f.close()
# Update the wiki...
if options.wiki:
- update_wiki(apps, apks + archapks)
+ update_wiki(apps, sortedids, apks + archapks)
logging.info("Finished.")