From: Daniel Martí Date: Sat, 28 Nov 2015 12:09:47 +0000 (+0100) Subject: Rework app into a class X-Git-Tag: 0.6.0~88 X-Git-Url: http://www.chiark.greenend.org.uk/ucgi/~ianmdlvl/git?a=commitdiff_plain;h=ab614ab4420cf550dc794e5746d2964270a7f073;p=fdroidserver.git Rework app into a class This simplifies usage, goes from app['Foo'] to app.Foo Also makes static analyzers able to detect invalid attributes as the set is now limited in the class definition. As a bonus, setting of the default field values is now done in the constructor, not separately and manually. --- diff --git a/fdroidserver/build.py b/fdroidserver/build.py index 45d9d429..5e8054da 100644 --- a/fdroidserver/build.py +++ b/fdroidserver/build.py @@ -320,11 +320,11 @@ def build_server(app, thisbuild, vcs, build_dir, output_dir, force): ftp.mkdir('metadata') ftp.mkdir('srclibs') ftp.chdir('metadata') - ftp.put(os.path.join('metadata', app['id'] + '.txt'), - app['id'] + '.txt') + ftp.put(os.path.join('metadata', app.id + '.txt'), + app.id + '.txt') # And patches if there are any... - if os.path.exists(os.path.join('metadata', app['id'])): - send_dir(os.path.join('metadata', app['id'])) + if os.path.exists(os.path.join('metadata', app.id)): + send_dir(os.path.join('metadata', app.id)) ftp.chdir(homedir) # Create the build directory... @@ -375,7 +375,7 @@ def build_server(app, thisbuild, vcs, build_dir, output_dir, force): # (no need if it's a srclib) if (not basesrclib) and os.path.exists(build_dir): ftp.chdir(homedir + '/build') - fv = '.fdroidvcs-' + app['id'] + fv = '.fdroidvcs-' + app.id ftp.put(os.path.join('build', fv), fv) send_dir(build_dir) @@ -389,7 +389,7 @@ def build_server(app, thisbuild, vcs, build_dir, output_dir, force): cmdline += ' --force --test' if options.verbose: cmdline += ' --verbose' - cmdline += " %s:%s" % (app['id'], thisbuild['vercode']) + cmdline += " %s:%s" % (app.id, thisbuild['vercode']) chan.exec_command('bash -c ". ~/.bsenv && ' + cmdline + '"') output = '' while not chan.exit_status_ready(): @@ -406,7 +406,7 @@ def build_server(app, thisbuild, vcs, build_dir, output_dir, force): if returncode != 0: raise BuildException( "Build.py failed on server for {0}:{1}".format( - app['id'], thisbuild['version']), output) + app.id, thisbuild['version']), output) # Retrieve the built files... logging.info("Retrieving build output...") @@ -423,7 +423,7 @@ def build_server(app, thisbuild, vcs, build_dir, output_dir, force): except: raise BuildException( "Build failed for %s:%s - missing output files".format( - app['id'], thisbuild['version']), output) + app.id, thisbuild['version']), output) ftp.close() finally: @@ -543,7 +543,7 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d if p is not None and p.returncode != 0: raise BuildException("Error cleaning %s:%s" % - (app['id'], thisbuild['version']), p.output) + (app.id, thisbuild['version']), p.output) for root, dirs, files in os.walk(build_dir): @@ -612,7 +612,7 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d if p.returncode != 0: raise BuildException("Error running build command for %s:%s" % - (app['id'], thisbuild['version']), p.output) + (app.id, thisbuild['version']), p.output) # Build native stuff if required... if thisbuild['buildjni'] and thisbuild['buildjni'] != ['no']: @@ -640,7 +640,7 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d del manifest_text p = FDroidPopen(cmd, cwd=os.path.join(root_dir, d)) if p.returncode != 0: - raise BuildException("NDK build failed for %s:%s" % (app['id'], thisbuild['version']), p.output) + raise BuildException("NDK build failed for %s:%s" % (app.id, thisbuild['version']), p.output) p = None # Build the release... @@ -702,7 +702,7 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d raise BuildException("Distribute build failed") cid = bconfig.get('app', 'package.domain') + '.' + bconfig.get('app', 'package.name') - if cid != app['id']: + if cid != app.id: raise BuildException("Package ID mismatch between metadata and spec") orientation = bconfig.get('app', 'orientation', 'landscape') @@ -712,7 +712,7 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d cmd = ['./build.py' '--dir', root_dir, '--name', bconfig.get('app', 'title'), - '--package', app['id'], + '--package', app.id, '--version', bconfig.get('app', 'version'), '--orientation', orientation ] @@ -759,8 +759,8 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d bindir = os.path.join(root_dir, 'bin') if p is not None and p.returncode != 0: - raise BuildException("Build failed for %s:%s" % (app['id'], thisbuild['version']), p.output) - logging.info("Successfully built version " + thisbuild['version'] + ' of ' + app['id']) + raise BuildException("Build failed for %s:%s" % (app.id, thisbuild['version']), p.output) + logging.info("Successfully built version " + thisbuild['version'] + ' of ' + app.id) if thisbuild['type'] == 'maven': stdout_apk = '\n'.join([ @@ -860,8 +860,8 @@ def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_d raise BuildException("Could not find version information in build in output") if not foundid: raise BuildException("Could not find package ID in output") - if foundid != app['id']: - raise BuildException("Wrong package ID - build " + foundid + " but expected " + app['id']) + if foundid != app.id: + raise BuildException("Wrong package ID - build " + foundid + " but expected " + app.id) # Some apps (e.g. Timeriffic) have had the bonkers idea of # including the entire changelog in the version number. Remove @@ -941,7 +941,7 @@ def trybuild(app, thisbuild, build_dir, output_dir, also_check_dir, srclib_dir, return False logging.info("Building version %s (%s) of %s" % ( - thisbuild['version'], thisbuild['vercode'], app['id'])) + thisbuild['version'], thisbuild['vercode'], app.id)) if server: # When using server mode, still keep a local cache of the repo, by @@ -1051,7 +1051,7 @@ def main(): apps = common.read_app_args(options.appid, allapps, True) for appid, app in apps.items(): - if (app['Disabled'] and not options.force) or not app['Repo Type'] or not app['builds']: + if (app.Disabled and not options.force) or not app.RepoType or not app.builds: del apps[appid] if not apps: @@ -1059,10 +1059,10 @@ def main(): if options.latest: for app in apps.itervalues(): - for build in reversed(app['builds']): + for build in reversed(app.builds): if build['disable'] and not options.force: continue - app['builds'] = [build] + app.builds = [build] break if options.wiki: @@ -1078,7 +1078,7 @@ def main(): first = True - for thisbuild in app['builds']: + for thisbuild in app.builds: wikilog = None try: @@ -1086,15 +1086,15 @@ def main(): # the source repo. We can reuse it on subsequent builds, if # there are any. if first: - if app['Repo Type'] == 'srclib': - build_dir = os.path.join('build', 'srclib', app['Repo']) + if app.RepoType == 'srclib': + build_dir = os.path.join('build', 'srclib', app.Repo) else: build_dir = os.path.join('build', appid) # Set up vcs interface and make sure we have the latest code... logging.debug("Getting {0} vcs interface for {1}" - .format(app['Repo Type'], app['Repo'])) - vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir) + .format(app.RepoType, app.Repo)) + vcs = common.getvcs(app.RepoType, app.Repo, build_dir) first = False @@ -1105,17 +1105,17 @@ def main(): options.server, options.force, options.onserver, options.refresh): - if app.get('Binaries', None): + if app.Binaries is not None: # This is an app where we build from source, and # verify the apk contents against a developer's # binary. We get that binary now, and save it # alongside our built one in the 'unsigend' # directory. - url = app['Binaries'] + url = app.Binaries url = url.replace('%v', thisbuild['version']) url = url.replace('%c', str(thisbuild['vercode'])) logging.info("...retrieving " + url) - of = "{0}_{1}.apk.binary".format(app['id'], thisbuild['vercode']) + of = "{0}_{1}.apk.binary".format(app.id, thisbuild['vercode']) of = os.path.join(output_dir, of) net.download_file(url, local_filename=of) @@ -1159,7 +1159,7 @@ def main(): logging.error("Error while attempting to publish build log") for app in build_succeeded: - logging.info("success: %s" % (app['id'])) + logging.info("success: %s" % (app.id)) if not options.verbose: for fa in failed_apps: diff --git a/fdroidserver/checkupdates.py b/fdroidserver/checkupdates.py index c68b7553..685d59fe 100644 --- a/fdroidserver/checkupdates.py +++ b/fdroidserver/checkupdates.py @@ -43,10 +43,10 @@ def check_http(app): try: - if 'Update Check Data' not in app: + if not app.UpdateCheckData: raise FDroidException('Missing Update Check Data') - urlcode, codeex, urlver, verex = app['Update Check Data'].split('|') + urlcode, codeex, urlver, verex = app.UpdateCheckData.split('|') vercode = "99999999" if len(urlcode) > 0: @@ -76,7 +76,7 @@ def check_http(app): return (version, vercode) except FDroidException: - msg = "Could not complete http check for app {0} due to unknown error: {1}".format(app['id'], traceback.format_exc()) + msg = "Could not complete http check for app {0} due to unknown error: {1}".format(app.id, traceback.format_exc()) return (None, msg) @@ -90,28 +90,28 @@ def check_tags(app, pattern): try: - if app['Repo Type'] == 'srclib': - build_dir = os.path.join('build', 'srclib', app['Repo']) - repotype = common.getsrclibvcs(app['Repo']) + if app.RepoType == 'srclib': + build_dir = os.path.join('build', 'srclib', app.Repo) + repotype = common.getsrclibvcs(app.Repo) else: - build_dir = os.path.join('build', app['id']) - repotype = app['Repo Type'] + build_dir = os.path.join('build', app.id) + repotype = app.RepoType if repotype not in ('git', 'git-svn', 'hg', 'bzr'): return (None, 'Tags update mode only works for git, hg, bzr and git-svn repositories currently', None) - if repotype == 'git-svn' and ';' not in app['Repo']: + if repotype == 'git-svn' and ';' not in app.Repo: return (None, 'Tags update mode used in git-svn, but the repo was not set up with tags', None) # Set up vcs interface and make sure we have the latest code... - vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir) + vcs = common.getvcs(app.RepoType, app.Repo, build_dir) vcs.gotorevision(None) flavours = [] - if len(app['builds']) > 0: - if app['builds'][-1]['gradle']: - flavours = app['builds'][-1]['gradle'] + if len(app.builds) > 0: + if app.builds[-1]['gradle']: + flavours = app.builds[-1]['gradle'] hpak = None htag = None @@ -161,10 +161,10 @@ def check_tags(app, pattern): return (None, "Couldn't find any version information", None) except VCSException as vcse: - msg = "VCS error while scanning app {0}: {1}".format(app['id'], vcse) + msg = "VCS error while scanning app {0}: {1}".format(app.id, vcse) return (None, msg, None) except Exception: - msg = "Could not scan app {0} due to unknown error: {1}".format(app['id'], traceback.format_exc()) + msg = "Could not scan app {0} due to unknown error: {1}".format(app.id, traceback.format_exc()) return (None, msg, None) @@ -178,15 +178,15 @@ def check_repomanifest(app, branch=None): try: - if app['Repo Type'] == 'srclib': - build_dir = os.path.join('build', 'srclib', app['Repo']) - repotype = common.getsrclibvcs(app['Repo']) + if app.RepoType == 'srclib': + build_dir = os.path.join('build', 'srclib', app.Repo) + repotype = common.getsrclibvcs(app.Repo) else: - build_dir = os.path.join('build', app['id']) - repotype = app['Repo Type'] + build_dir = os.path.join('build', app.id) + repotype = app.RepoType # Set up vcs interface and make sure we have the latest code... - vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir) + vcs = common.getvcs(app.RepoType, app.Repo, build_dir) if repotype == 'git': if branch: @@ -200,9 +200,9 @@ def check_repomanifest(app, branch=None): vcs.gotorevision(None) flavours = [] - if len(app['builds']) > 0: - if app['builds'][-1]['gradle']: - flavours = app['builds'][-1]['gradle'] + if len(app.builds) > 0: + if app.builds[-1]['gradle']: + flavours = app.builds[-1]['gradle'] hpak = None hver = None @@ -229,38 +229,38 @@ def check_repomanifest(app, branch=None): return (None, "Couldn't find any version information") except VCSException as vcse: - msg = "VCS error while scanning app {0}: {1}".format(app['id'], vcse) + msg = "VCS error while scanning app {0}: {1}".format(app.id, vcse) return (None, msg) except Exception: - msg = "Could not scan app {0} due to unknown error: {1}".format(app['id'], traceback.format_exc()) + msg = "Could not scan app {0} due to unknown error: {1}".format(app.id, traceback.format_exc()) return (None, msg) def check_repotrunk(app, branch=None): try: - if app['Repo Type'] == 'srclib': - build_dir = os.path.join('build', 'srclib', app['Repo']) - repotype = common.getsrclibvcs(app['Repo']) + if app.RepoType == 'srclib': + build_dir = os.path.join('build', 'srclib', app.Repo) + repotype = common.getsrclibvcs(app.Repo) else: - build_dir = os.path.join('build', app['id']) - repotype = app['Repo Type'] + build_dir = os.path.join('build', app.id) + repotype = app.RepoType if repotype not in ('git-svn', ): return (None, 'RepoTrunk update mode only makes sense in git-svn repositories') # Set up vcs interface and make sure we have the latest code... - vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir) + vcs = common.getvcs(app.RepoType, app.Repo, build_dir) vcs.gotorevision(None) ref = vcs.getref() return (ref, ref) except VCSException as vcse: - msg = "VCS error while scanning app {0}: {1}".format(app['id'], vcse) + msg = "VCS error while scanning app {0}: {1}".format(app.id, vcse) return (None, msg) except Exception: - msg = "Could not scan app {0} due to unknown error: {1}".format(app['id'], traceback.format_exc()) + msg = "Could not scan app {0} due to unknown error: {1}".format(app.id, traceback.format_exc()) return (None, msg) @@ -269,7 +269,7 @@ def check_repotrunk(app, branch=None): # the details of the current version. def check_gplay(app): time.sleep(15) - url = 'https://play.google.com/store/apps/details?id=' + app['id'] + url = 'https://play.google.com/store/apps/details?id=' + app.id headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:18.0) Gecko/20100101 Firefox/18.0'} req = urllib2.Request(url, None, headers) try: @@ -308,14 +308,14 @@ def dirs_with_manifest(startdir): # subdir relative to the build dir if found, None otherwise. def possible_subdirs(app): - if app['Repo Type'] == 'srclib': - build_dir = os.path.join('build', 'srclib', app['Repo']) + if app.RepoType == 'srclib': + build_dir = os.path.join('build', 'srclib', app.Repo) else: - build_dir = os.path.join('build', app['id']) + build_dir = os.path.join('build', app.id) flavours = [] - if len(app['builds']) > 0: - build = app['builds'][-1] + if len(app.builds) > 0: + build = app.builds[-1] if build['gradle']: flavours = build['gradle'] @@ -330,24 +330,24 @@ def possible_subdirs(app): def fetch_autoname(app, tag): - if not app["Repo Type"] or app['Update Check Mode'] in ('None', 'Static'): + if not app.RepoType or app.UpdateCheckMode in ('None', 'Static'): return None - if app['Repo Type'] == 'srclib': - build_dir = os.path.join('build', 'srclib', app['Repo']) + if app.RepoType == 'srclib': + build_dir = os.path.join('build', 'srclib', app.Repo) else: - build_dir = os.path.join('build', app['id']) + build_dir = os.path.join('build', app.id) try: - vcs = common.getvcs(app["Repo Type"], app["Repo"], build_dir) + vcs = common.getvcs(app.RepoType, app.Repo, build_dir) vcs.gotorevision(tag) except VCSException: return None flavours = [] - if len(app['builds']) > 0: - if app['builds'][-1]['gradle']: - flavours = app['builds'][-1]['gradle'] + if len(app.builds) > 0: + if app.builds[-1]['gradle']: + flavours = app.builds[-1]['gradle'] logging.debug("...fetch auto name from " + build_dir) new_name = None @@ -362,8 +362,8 @@ def fetch_autoname(app, tag): commitmsg = None if new_name: logging.debug("...got autoname '" + new_name + "'") - if new_name != app['Auto Name']: - app['Auto Name'] = new_name + if new_name != app.AutoName: + app.AutoName = new_name if not commitmsg: commitmsg = "Set autoname of {0}".format(common.getappname(app)) else: @@ -382,7 +382,7 @@ def checkupdates_app(app, first=True): msg = None vercode = None noverok = False - mode = app['Update Check Mode'] + mode = app.UpdateCheckMode if mode.startswith('Tags'): pattern = mode[5:] if len(mode) > 4 else None (version, vercode, tag) = check_tags(app, pattern) @@ -408,9 +408,9 @@ def checkupdates_app(app, first=True): version = None msg = 'Invalid update check method' - if version and vercode and app['Vercode Operation']: + if version and vercode and app.VercodeOperation: oldvercode = str(int(vercode)) - op = app['Vercode Operation'].replace("%c", oldvercode) + op = app.VercodeOperation.replace("%c", oldvercode) vercode = str(eval(op)) logging.debug("Applied vercode operation: %s -> %s" % (oldvercode, vercode)) @@ -422,16 +422,16 @@ def checkupdates_app(app, first=True): updating = False if version is None: - logmsg = "...{0} : {1}".format(app['id'], msg) + logmsg = "...{0} : {1}".format(app.id, msg) if noverok: logging.info(logmsg) else: logging.warn(logmsg) - elif vercode == app['Current Version Code']: + elif vercode == app.CurrentVersionCode: logging.info("...up to date") else: - app['Current Version'] = version - app['Current Version Code'] = str(int(vercode)) + app.CurrentVersion = version + app.CurrentVersionCode = str(int(vercode)) updating = True commitmsg = fetch_autoname(app, tag) @@ -443,7 +443,7 @@ def checkupdates_app(app, first=True): commitmsg = 'Update CV of %s to %s' % (name, ver) if options.auto: - mode = app['Auto Update Mode'] + mode = app.AutoUpdateMode if mode in ('None', 'Static'): pass elif mode.startswith('Version '): @@ -457,13 +457,13 @@ def checkupdates_app(app, first=True): suffix = '' gotcur = False latest = None - for build in app['builds']: - if int(build['vercode']) >= int(app['Current Version Code']): + for build in app.builds: + if int(build['vercode']) >= int(app.CurrentVersionCode): gotcur = True if not latest or int(build['vercode']) > int(latest['vercode']): latest = build - if int(latest['vercode']) > int(app['Current Version Code']): + if int(latest['vercode']) > int(app.CurrentVersionCode): logging.info("Refusing to auto update, since the latest build is newer") if not gotcur: @@ -471,21 +471,21 @@ def checkupdates_app(app, first=True): if 'origlines' in newbuild: del newbuild['origlines'] newbuild['disable'] = False - newbuild['vercode'] = app['Current Version Code'] - newbuild['version'] = app['Current Version'] + suffix + newbuild['vercode'] = app.CurrentVersionCode + newbuild['version'] = app.CurrentVersion + suffix logging.info("...auto-generating build for " + newbuild['version']) commit = pattern.replace('%v', newbuild['version']) commit = commit.replace('%c', newbuild['vercode']) newbuild['commit'] = commit - app['builds'].append(newbuild) + app.builds.append(newbuild) name = common.getappname(app) ver = common.getcvname(app) commitmsg = "Update %s to %s" % (name, ver) else: - logging.warn('Invalid auto update mode "' + mode + '" on ' + app['id']) + logging.warn('Invalid auto update mode "' + mode + '" on ' + app.id) if commitmsg: - metadatapath = os.path.join('metadata', app['id'] + '.txt') + metadatapath = os.path.join('metadata', app.id + '.txt') with open(metadatapath, 'w') as f: metadata.write_metadata('txt', f, app) if options.commit: @@ -537,7 +537,7 @@ def main(): else: logging.info("{0} encountered a problem: {1}".format(common.getappname(app), reason)) if version is not None: - stored = app['Current Version'] + stored = app.CurrentVersion if not stored: logging.info("{0} has no Current Version but has version {1} on the Play Store" .format(common.getappname(app), version)) @@ -555,7 +555,7 @@ def main(): for appid, app in apps.iteritems(): - if options.autoonly and app['Auto Update Mode'] in ('None', 'Static'): + if options.autoonly and app.AutoUpdateMode in ('None', 'Static'): logging.debug("Nothing to do for {0}...".format(appid)) continue diff --git a/fdroidserver/common.py b/fdroidserver/common.py index d7260839..87fd3c28 100644 --- a/fdroidserver/common.py +++ b/fdroidserver/common.py @@ -363,10 +363,10 @@ def read_app_args(args, allapps, allow_vercodes=False): vc = vercodes[appid] if not vc: continue - app['builds'] = [b for b in app['builds'] if b['vercode'] in vc] - if len(app['builds']) != len(vercodes[appid]): + app.builds = [b for b in app.builds if b['vercode'] in vc] + if len(app.builds) != len(vercodes[appid]): error = True - allvcs = [b['vercode'] for b in app['builds']] + allvcs = [b['vercode'] for b in app.builds] for v in vercodes[appid]: if v not in allvcs: logging.critical("No such vercode %s for app %s" % (v, appid)) @@ -419,23 +419,23 @@ def apknameinfo(filename): def getapkname(app, build): - return "%s_%s.apk" % (app['id'], build['vercode']) + return "%s_%s.apk" % (app.id, build['vercode']) def getsrcname(app, build): - return "%s_%s_src.tar.gz" % (app['id'], build['vercode']) + return "%s_%s_src.tar.gz" % (app.id, build['vercode']) def getappname(app): - if app['Name']: - return app['Name'] - if app['Auto Name']: - return app['Auto Name'] - return app['id'] + if app.Name: + return app.Name + if app.AutoName: + return app.AutoName + return app.id def getcvname(app): - return '%s (%s)' % (app['Current Version'], app['Current Version Code']) + return '%s (%s)' % (app.CurrentVersion, app.CurrentVersionCode) def getvcs(vcstype, remote, local): @@ -1026,7 +1026,7 @@ psearch_g = re.compile(r'.*(packageName|applicationId) *=* *["\']([^"]+)["\'].*' def app_matches_packagename(app, package): if not package: return False - appid = app['Update Check Name'] or app['id'] + appid = app.UpdateCheckName or app.id if appid is None or appid == "Ignore": return True return appid == package @@ -1037,7 +1037,7 @@ def app_matches_packagename(app, package): # All values returned are strings. def parse_androidmanifests(paths, app): - ignoreversions = app['Update Check Ignore'] + ignoreversions = app.UpdateCheckIgnore ignoresearch = re.compile(ignoreversions).search if ignoreversions else None if not paths: @@ -1277,7 +1277,7 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver= p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=root_dir) if p.returncode != 0: raise BuildException("Error running init command for %s:%s" % - (app['id'], build['version']), p.output) + (app.id, build['version']), p.output) # Apply patches if any if build['patch']: @@ -1285,7 +1285,7 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver= for patch in build['patch']: patch = patch.strip() logging.info("Applying " + patch) - patch_path = os.path.join('metadata', app['id'], patch) + patch_path = os.path.join('metadata', app.id, patch) p = FDroidPopen(['patch', '-p1', '-i', os.path.abspath(patch_path)], cwd=build_dir) if p.returncode != 0: raise BuildException("Failed to apply patch %s" % patch_path) @@ -1460,7 +1460,7 @@ def prepare_source(vcs, app, build, build_dir, srclib_dir, extlib_dir, onserver= p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=root_dir) if p.returncode != 0: raise BuildException("Error running prebuild command for %s:%s" % - (app['id'], build['version']), p.output) + (app.id, build['version']), p.output) # Generate (or update) the ant build file, build.xml... if build['update'] and build['update'] != ['no'] and build['type'] == 'ant': diff --git a/fdroidserver/import.py b/fdroidserver/import.py index 4b419f5a..4ead1547 100644 --- a/fdroidserver/import.py +++ b/fdroidserver/import.py @@ -79,20 +79,20 @@ def get_metadata_from_url(app, url): # Figure out what kind of project it is... projecttype = None - app['Web Site'] = url # by default, we might override it + app.WebSite = url # by default, we might override it if url.startswith('git://'): projecttype = 'git' repo = url repotype = 'git' - app['Source Code'] = "" - app['Web Site'] = "" + app.SourceCode = "" + app.WebSite = "" elif url.startswith('https://github.com'): projecttype = 'github' repo = url repotype = 'git' - app['Source Code'] = url - app['Issue Tracker'] = url + '/issues' - app['Web Site'] = "" + app.SourceCode = url + app.IssueTracker = url + '/issues' + app.WebSite = "" elif url.startswith('https://gitlab.com/'): projecttype = 'gitlab' # git can be fussy with gitlab URLs unless they end in .git @@ -101,16 +101,16 @@ def get_metadata_from_url(app, url): else: repo = url + '.git' repotype = 'git' - app['Source Code'] = url + '/tree/HEAD' - app['Issue Tracker'] = url + '/issues' + app.SourceCode = url + '/tree/HEAD' + app.IssueTracker = url + '/issues' elif url.startswith('https://bitbucket.org/'): if url.endswith('/'): url = url[:-1] projecttype = 'bitbucket' - app['Source Code'] = url + '/src' - app['Issue Tracker'] = url + '/issues' + app.SourceCode = url + '/src' + app.IssueTracker = url + '/issues' # Figure out the repo type and adddress... - repotype, repo = getrepofrompage(app['Source Code']) + repotype, repo = getrepofrompage(app.SourceCode) if not repotype: logging.error("Unable to determine vcs type. " + repo) sys.exit(1) @@ -139,8 +139,8 @@ def get_metadata_from_url(app, url): vcs.gotorevision(options.rev) root_dir = get_subdir(build_dir) - app['Repo Type'] = repotype - app['Repo'] = repo + app.RepoType = repotype + app.Repo = repo return root_dir, build_dir @@ -175,8 +175,8 @@ def main(): apps = metadata.read_metadata() package, app = metadata.get_default_app_info() - app['id'] = None - app['Update Check Mode'] = "Tags" + app.id = None + app.UpdateCheckMode = "Tags" root_dir = None build_dir = None @@ -185,7 +185,7 @@ def main(): root_dir, build_dir = get_metadata_from_url(app, options.url) elif os.path.isdir('.git'): if options.url: - app['Web Site'] = options.url + app.WebSite = options.url root_dir = get_subdir(os.getcwd()) else: logging.error("Specify project url.") @@ -238,7 +238,7 @@ def main(): continue build[flag] = value - app['builds'].append(build) + app.builds.append(build) # Keep the repo directory to save bandwidth... if not os.path.exists('build'): @@ -246,7 +246,7 @@ def main(): if build_dir is not None: shutil.move(build_dir, os.path.join('build', package)) with open('build/.fdroidvcs-' + package, 'w') as f: - f.write(app['Repo Type'] + ' ' + app['Repo']) + f.write(app.RepoType + ' ' + app.Repo) metadatapath = os.path.join('metadata', package + '.txt') with open(metadatapath, 'w') as f: diff --git a/fdroidserver/lint.py b/fdroidserver/lint.py index f7d79255..fae800e8 100644 --- a/fdroidserver/lint.py +++ b/fdroidserver/lint.py @@ -106,7 +106,7 @@ regex_checks = { def check_regexes(app): for f, checks in regex_checks.iteritems(): for m, r in checks: - v = app[f] + v = app.get_field(f) if type(v) == str: if v is None: continue @@ -132,27 +132,27 @@ def get_lastbuild(builds): def check_ucm_tags(app): - lastbuild = get_lastbuild(app['builds']) + lastbuild = get_lastbuild(app.builds) if (lastbuild is not None and lastbuild['commit'] - and app['Update Check Mode'] == 'RepoManifest' + and app.UpdateCheckMode == 'RepoManifest' and not lastbuild['commit'].startswith('unknown') - and lastbuild['vercode'] == app['Current Version Code'] + and lastbuild['vercode'] == app.CurrentVersionCode and not lastbuild['forcevercode'] and any(s in lastbuild['commit'] for s in '.,_-/')): yield "Last used commit '%s' looks like a tag, but Update Check Mode is '%s'" % ( - lastbuild['commit'], app['Update Check Mode']) + lastbuild['commit'], app.UpdateCheckMode) def check_char_limits(app): limits = config['char_limits'] - summ_chars = len(app['Summary']) + summ_chars = len(app.Summary) if summ_chars > limits['Summary']: yield "Summary of length %s is over the %i char limit" % ( summ_chars, limits['Summary']) - desc_charcount = sum(len(l) for l in app['Description']) + desc_charcount = sum(len(l) for l in app.Description) if desc_charcount > limits['Description']: yield "Description of length %s is over the %i char limit" % ( desc_charcount, limits['Description']) @@ -168,31 +168,28 @@ def check_old_links(app): 'gitorious.org', 'code.google.com', ] - if any(s in app['Repo'] for s in usual_sites): + if any(s in app.Repo for s in usual_sites): for f in ['Web Site', 'Source Code', 'Issue Tracker', 'Changelog']: - if any(s in app[f] for s in old_sites): - yield "App is in '%s' but has a link to '%s'" % (app['Repo'], app[f]) + v = app.get_field(f) + if any(s in v for s in old_sites): + yield "App is in '%s' but has a link to '%s'" % (app.Repo, v) def check_useless_fields(app): - if app['Update Check Name'] == app['id']: + if app.UpdateCheckName == app.id: yield "Update Check Name is set to the known app id - it can be removed" filling_ucms = re.compile(r'^(Tags.*|RepoManifest.*)') def check_checkupdates_ran(app): - if filling_ucms.match(app['Update Check Mode']): - if all(app[f] == metadata.app_defaults[f] for f in [ - 'Auto Name', - 'Current Version', - 'Current Version Code', - ]): + if filling_ucms.match(app.UpdateCheckMode): + if not app.AutoName and not app.CurrentVersion and app.CurrentVersionCode == '0': yield "UCM is set but it looks like checkupdates hasn't been run yet" def check_empty_fields(app): - if not app['Categories']: + if not app.Categories: yield "Categories are not set" all_categories = Set([ @@ -217,37 +214,37 @@ all_categories = Set([ def check_categories(app): - for categ in app['Categories']: + for categ in app.Categories: if categ not in all_categories: yield "Category '%s' is not valid" % categ def check_duplicates(app): - if app['Name'] and app['Name'] == app['Auto Name']: - yield "Name '%s' is just the auto name - remove it" % app['Name'] + if app.Name and app.Name == app.AutoName: + yield "Name '%s' is just the auto name - remove it" % app.Name links_seen = set() for f in ['Source Code', 'Web Site', 'Issue Tracker', 'Changelog']: - if not app[f]: + v = app.get_field(f) + if not v: continue - v = app[f].lower() + v = v.lower() if v in links_seen: yield "Duplicate link in '%s': %s" % (f, v) else: links_seen.add(v) - name = app['Name'] or app['Auto Name'] - if app['Summary'] and name: - if app['Summary'].lower() == name.lower(): - yield "Summary '%s' is just the app's name" % app['Summary'] + name = app.Name or app.AutoName + if app.Summary and name: + if app.Summary.lower() == name.lower(): + yield "Summary '%s' is just the app's name" % app.Summary - desc = app['Description'] - if app['Summary'] and desc and len(desc) == 1: - if app['Summary'].lower() == desc[0].lower(): - yield "Description '%s' is just the app's summary" % app['Summary'] + if app.Summary and app.Description and len(app.Description) == 1: + if app.Summary.lower() == app.Description[0].lower(): + yield "Description '%s' is just the app's summary" % app.Summary seenlines = set() - for l in app['Description']: + for l in app.Description: if len(l) < 1: continue if l in seenlines: @@ -259,7 +256,7 @@ desc_url = re.compile(r'(^|[^[])\[([^ ]+)( |\]|$)') def check_mediawiki_links(app): - wholedesc = ' '.join(app['Description']) + wholedesc = ' '.join(app.Description) for um in desc_url.finditer(wholedesc): url = um.group(1) for m, r in http_checks: @@ -271,7 +268,7 @@ def check_bulleted_lists(app): validchars = ['*', '#'] lchar = '' lcount = 0 - for l in app['Description']: + for l in app.Description: if len(l) < 1: lcount = 0 continue @@ -287,7 +284,7 @@ def check_bulleted_lists(app): def check_builds(app): - for build in app['builds']: + for build in app.builds: if build['disable']: continue for s in ['master', 'origin', 'HEAD', 'default', 'trunk']: @@ -318,7 +315,7 @@ def main(): apps = common.read_app_args(options.appid, allapps, False) for appid, app in apps.iteritems(): - if app['Disabled']: + if app.Disabled: continue warns = [] diff --git a/fdroidserver/metadata.py b/fdroidserver/metadata.py index fa7b56ed..34059d8a 100644 --- a/fdroidserver/metadata.py +++ b/fdroidserver/metadata.py @@ -53,43 +53,128 @@ class MetaDataException(Exception): def __str__(self): return self.value -# In the order in which they are laid out on files -app_defaults = OrderedDict([ - ('Disabled', None), - ('AntiFeatures', []), - ('Provides', None), - ('Categories', ['None']), - ('License', 'Unknown'), - ('Web Site', ''), - ('Source Code', ''), - ('Issue Tracker', ''), - ('Changelog', ''), - ('Donate', None), - ('FlattrID', None), - ('Bitcoin', None), - ('Litecoin', None), - ('Name', None), - ('Auto Name', ''), - ('Summary', ''), - ('Description', []), - ('Requires Root', False), - ('Repo Type', ''), - ('Repo', ''), - ('Binaries', None), - ('Maintainer Notes', []), - ('Archive Policy', None), - ('Auto Update Mode', 'None'), - ('Update Check Mode', 'None'), - ('Update Check Ignore', None), - ('Vercode Operation', None), - ('Update Check Name', None), - ('Update Check Data', None), - ('Current Version', ''), - ('Current Version Code', '0'), - ('No Source Since', ''), +app_fields = set([ + 'Disabled', + 'AntiFeatures', + 'Provides', + 'Categories', + 'License', + 'Web Site', + 'Source Code', + 'Issue Tracker', + 'Changelog', + 'Donate', + 'FlattrID', + 'Bitcoin', + 'Litecoin', + 'Name', + 'Auto Name', + 'Summary', + 'Description', + 'Requires Root', + 'Repo Type', + 'Repo', + 'Binaries', + 'Maintainer Notes', + 'Archive Policy', + 'Auto Update Mode', + 'Update Check Mode', + 'Update Check Ignore', + 'Vercode Operation', + 'Update Check Name', + 'Update Check Data', + 'Current Version', + 'Current Version Code', + 'No Source Since', + + 'comments', # For formats that don't do inline comments + 'builds', # For formats that do builds as a list ]) +class App(): + + def __init__(self): + self.Disabled = None + self.AntiFeatures = [] + self.Provides = None + self.Categories = ['None'] + self.License = 'Unknown' + self.WebSite = '' + self.SourceCode = '' + self.IssueTracker = '' + self.Changelog = '' + self.Donate = None + self.FlattrID = None + self.Bitcoin = None + self.Litecoin = None + self.Name = None + self.AutoName = '' + self.Summary = '' + self.Description = [] + self.RequiresRoot = False + self.RepoType = '' + self.Repo = '' + self.Binaries = None + self.MaintainerNotes = [] + self.ArchivePolicy = None + self.AutoUpdateMode = 'None' + self.UpdateCheckMode = 'None' + self.UpdateCheckIgnore = None + self.VercodeOperation = None + self.UpdateCheckName = None + self.UpdateCheckData = None + self.CurrentVersion = '' + self.CurrentVersionCode = '0' + self.NoSourceSince = '' + + self.id = None + self.metadatapath = None + self.builds = [] + self.comments = {} + self.added = None + self.lastupdated = None + + @classmethod + def field_to_attr(cls, f): + return f.replace(' ', '') + + @classmethod + def attr_to_field(cls, k): + if k in app_fields: + return k + f = re.sub(r'([a-z])([A-Z])', r'\1 \2', k) + return f + + def field_dict(self): + return {App.attr_to_field(k): v for k, v in self.__dict__.iteritems()} + + def get_field(self, f): + if f not in app_fields: + raise MetaDataException('Unrecognised app field: ' + f) + k = App.field_to_attr(f) + return getattr(self, k) + + def set_field(self, f, v): + if f not in app_fields: + raise MetaDataException('Unrecognised app field: ' + f) + k = App.field_to_attr(f) + self.__dict__[k] = v + + def append_field(self, f, v): + if f not in app_fields: + raise MetaDataException('Unrecognised app field: ' + f) + k = App.field_to_attr(f) + if k not in self.__dict__: + self.__dict__[k] = [v] + else: + self.__dict__[k].append(v) + + def update_fields(self, d): + for f, v in d.iteritems(): + self.set_field(f, v) + + # In the order in which they are laid out on files # Sorted by their action and their place in the build timeline # These variables can have varying datatypes. For example, anything with @@ -238,13 +323,13 @@ valuetypes = { # Check an app's metadata information for integrity errors -def check_metadata(info): +def check_metadata(app): for v in valuetypes: for field in v.fields: - v.check(info[field], info['id']) - for build in info['builds']: + v.check(app.get_field(field), app.id) + for build in app.builds: for attr in v.attrs: - v.check(build[attr], info['id']) + v.check(build[attr], app.id) # Formatter for descriptions. Create an instance, and call parseline() with @@ -519,11 +604,11 @@ def read_metadata(xref=True): + glob.glob(os.path.join('metadata', '*.json')) + glob.glob(os.path.join('metadata', '*.xml')) + glob.glob(os.path.join('metadata', '*.yaml'))): - appid, appinfo = parse_metadata(metadatapath) - if appid in apps: - raise MetaDataException("Found multiple metadata files for " + appid) - check_metadata(appinfo) - apps[appid] = appinfo + app = parse_metadata(metadatapath) + if app.id in apps: + raise MetaDataException("Found multiple metadata files for " + app.id) + check_metadata(app) + apps[app.id] = app if xref: # Parse all descriptions at load time, just to ensure cross-referencing @@ -535,7 +620,7 @@ def read_metadata(xref=True): for appid, app in apps.iteritems(): try: - description_html(app['Description'], linkres) + description_html(app.Description, linkres) except MetaDataException, e: raise MetaDataException("Problem with description of " + appid + " - " + str(e)) @@ -555,7 +640,7 @@ def metafieldtype(name): return 'buildv2' if name == 'Use Built': return 'obsolete' - if name not in app_defaults: + if name not in app_fields: return 'unknown' return 'string' @@ -603,44 +688,38 @@ def get_default_app_info(metadatapath=None): else: appid, _ = common.get_extension(os.path.basename(metadatapath)) - thisinfo = {} - thisinfo.update(app_defaults) - thisinfo['metadatapath'] = metadatapath + app = App() + app.metadatapath = metadatapath if appid is not None: - thisinfo['id'] = appid - - # General defaults... - thisinfo['builds'] = [] - thisinfo['comments'] = dict() + app.id = appid - return appid, thisinfo + return app def sorted_builds(builds): return sorted(builds, key=lambda build: int(build['vercode'])) -def post_metadata_parse(thisinfo): +def post_metadata_parse(app): - supported_metadata = app_defaults.keys() + ['comments', 'builds', 'id', 'metadatapath'] - for k, v in thisinfo.iteritems(): - if k not in supported_metadata: - raise MetaDataException("Unrecognised metadata: {0}: {1}" - .format(k, v)) + for f in app_fields: + v = app.get_field(f) if type(v) in (float, int): - thisinfo[k] = str(v) + app.set_field(f, str(v)) # convert to the odd internal format - for k in ('Description', 'Maintainer Notes'): - if isinstance(thisinfo[k], basestring): - text = thisinfo[k].rstrip().lstrip() - thisinfo[k] = text.split('\n') + for f in ('Description', 'Maintainer Notes'): + v = app.get_field(f) + if isinstance(v, basestring): + text = v.rstrip().lstrip() + app.set_field(f, text.split('\n')) supported_flags = (flag_defaults.keys() - + ['vercode', 'version', 'versionCode', 'versionName']) + + ['vercode', 'version', 'versionCode', 'versionName', + 'type', 'ndk_path']) esc_newlines = re.compile('\\\\( |\\n)') - for build in thisinfo['builds']: + for build in app.builds: for k, v in build.items(): if k not in supported_flags: raise MetaDataException("Unrecognised build flag: {0}={1}" @@ -683,13 +762,13 @@ def post_metadata_parse(thisinfo): if isinstance(v, bool): build[k] = 'yes' if v else 'no' - if not thisinfo['Description']: - thisinfo['Description'].append('No description available') + if not app.Description: + app.Description = ['No description available'] - for build in thisinfo['builds']: + for build in app.builds: fill_build_defaults(build) - thisinfo['builds'] = sorted_builds(thisinfo['builds']) + app.builds = sorted_builds(app.builds) # Parse metadata for a single application. @@ -772,7 +851,7 @@ def parse_metadata(metadatapath): def parse_json_metadata(metadatapath): - appid, thisinfo = get_default_app_info(metadatapath) + app = get_default_app_info(metadatapath) # fdroid metadata is only strings and booleans, no floats or ints. And # json returns unicode, and fdroidserver still uses plain python strings @@ -781,15 +860,15 @@ def parse_json_metadata(metadatapath): object_hook=_decode_dict, parse_int=lambda s: s, parse_float=lambda s: s) - thisinfo.update(jsoninfo) - post_metadata_parse(thisinfo) + app.update_fields(jsoninfo) + post_metadata_parse(app) - return (appid, thisinfo) + return app def parse_xml_metadata(metadatapath): - appid, thisinfo = get_default_app_info(metadatapath) + app = get_default_app_info(metadatapath) tree = ElementTree.ElementTree(file=metadatapath) root = tree.getroot() @@ -798,54 +877,46 @@ def parse_xml_metadata(metadatapath): logging.critical(metadatapath + ' does not have root as !') sys.exit(1) - supported_metadata = app_defaults.keys() for child in root: if child.tag != 'builds': # builds does not have name="" attrib name = child.attrib['name'] - if name not in supported_metadata: - raise MetaDataException("Unrecognised metadata: <" - + child.tag + ' name="' + name + '">' - + child.text - + "') if child.tag == 'string': - thisinfo[name] = child.text + app.set_field(name, child.text) elif child.tag == 'string-array': items = [] for item in child: items.append(item.text) - thisinfo[name] = items + app.set_field(name, items) elif child.tag == 'builds': - builds = [] for build in child: builddict = dict() for key in build: builddict[key.tag] = key.text - builds.append(builddict) - thisinfo['builds'] = builds + app.builds.append(builddict) # TODO handle this using 0: - wikidata += "This application has partially or entirely been missing source code since version " + app['No Source Since'] + ".\n\n" - if len(app['Current Version']) > 0: - wikidata += "The current (recommended) version is " + app['Current Version'] - wikidata += " (version code " + app['Current Version Code'] + ").\n\n" + wikidata += " (Check mode: " + app.UpdateCheckMode + ") " + wikidata += " (Auto-update mode: " + app.AutoUpdateMode + ")\n\n" + if len(app.NoSourceSince) > 0: + wikidata += "This application has partially or entirely been missing source code since version " + app.NoSourceSince + ".\n\n" + if len(app.CurrentVersion) > 0: + wikidata += "The current (recommended) version is " + app.CurrentVersion + wikidata += " (version code " + app.CurrentVersionCode + ").\n\n" validapks = 0 for apk in apklist: wikidata += "==" + apk['version'] + "==\n" @@ -200,21 +200,21 @@ def update_wiki(apps, sortedids, apks): wikidata += "Version code: " + str(apk['versioncode']) + '\n' wikidata += '\n[[Category:' + wikicat + ']]\n' - if len(app['No Source Since']) > 0: + if len(app.NoSourceSince) > 0: wikidata += '\n[[Category:Apps missing source code]]\n' - if validapks == 0 and not app['Disabled']: + if validapks == 0 and not app.Disabled: wikidata += '\n[[Category:Apps with no packages]]\n' - if cantupdate and not app['Disabled']: + if cantupdate and not app.Disabled: wikidata += "\n[[Category:Apps we can't update]]\n" - if buildfails and not app['Disabled']: + if buildfails and not app.Disabled: wikidata += "\n[[Category:Apps with failing builds]]\n" - elif not gotcurrentver and not cantupdate and not app['Disabled'] and app['Update Check Mode'] != "Static": + elif not gotcurrentver and not cantupdate and not app.Disabled and app.UpdateCheckMode != "Static": wikidata += '\n[[Category:Apps to Update]]\n' - if app['Disabled']: + if app.Disabled: wikidata += '\n[[Category:Apps that are disabled]]\n' - if app['Update Check Mode'] == 'None' and not app['Disabled']: + if app.UpdateCheckMode == 'None' and not app.Disabled: wikidata += '\n[[Category:Apps with no update check]]\n' - for appcat in app['Categories']: + for appcat in app.Categories: wikidata += '\n[[Category:{0}]]\n'.format(appcat) # We can't have underscores in the page name, even if they're in @@ -231,7 +231,7 @@ def update_wiki(apps, sortedids, apks): # Make a redirect from the name to the ID too, unless there's # already an existing page with the name and it isn't a redirect. noclobber = False - apppagename = app['Name'].replace('_', ' ') + apppagename = app.Name.replace('_', ' ') apppagename = apppagename.replace('{', '') apppagename = apppagename.replace('}', ' ') apppagename = apppagename.replace(':', ' ') @@ -290,7 +290,7 @@ def delete_disabled_builds(apps, apkcache, repodirs): :param repodirs: the repo directories to process """ for appid, app in apps.iteritems(): - for build in app['builds']: + for build in app.builds: if not build['disable']: continue apkfilename = appid + '_' + str(build['vercode']) + '.apk' @@ -805,7 +805,7 @@ def make_index(apps, sortedids, apks, repodir, archive, categories): for appid in sortedids: app = apps[appid] - if app['Disabled'] is not None: + if app.Disabled is not None: continue # Get a list of the apks for this app... @@ -818,57 +818,57 @@ def make_index(apps, sortedids, apks, repodir, archive, categories): continue apel = doc.createElement("application") - apel.setAttribute("id", app['id']) + apel.setAttribute("id", app.id) root.appendChild(apel) - addElement('id', app['id'], doc, apel) - if 'added' in app: - addElement('added', time.strftime('%Y-%m-%d', app['added']), doc, apel) - if 'lastupdated' in app: - addElement('lastupdated', time.strftime('%Y-%m-%d', app['lastupdated']), doc, apel) - addElement('name', app['Name'], doc, apel) - addElement('summary', app['Summary'], doc, apel) - if app['icon']: - addElement('icon', app['icon'], doc, apel) + addElement('id', app.id, doc, apel) + if app.added: + addElement('added', time.strftime('%Y-%m-%d', app.added), doc, apel) + if app.lastupdated: + addElement('lastupdated', time.strftime('%Y-%m-%d', app.lastupdated), doc, apel) + addElement('name', app.Name, doc, apel) + addElement('summary', app.Summary, doc, apel) + if app.icon: + addElement('icon', app.icon, doc, apel) def linkres(appid): if appid in apps: - return ("fdroid.app:" + appid, apps[appid]['Name']) + return ("fdroid.app:" + appid, apps[appid].Name) raise MetaDataException("Cannot resolve app id " + appid) addElement('desc', - metadata.description_html(app['Description'], linkres), + metadata.description_html(app.Description, linkres), doc, apel) - addElement('license', app['License'], doc, apel) - if 'Categories' in app and app['Categories']: - addElement('categories', ','.join(app["Categories"]), doc, apel) + addElement('license', app.License, doc, apel) + if app.Categories: + addElement('categories', ','.join(app.Categories), doc, apel) # We put the first (primary) category in LAST, which will have # the desired effect of making clients that only understand one # category see that one. - addElement('category', app["Categories"][0], doc, apel) - addElement('web', app['Web Site'], doc, apel) - addElement('source', app['Source Code'], doc, apel) - addElement('tracker', app['Issue Tracker'], doc, apel) - addElementNonEmpty('changelog', app['Changelog'], doc, apel) - addElementNonEmpty('donate', app['Donate'], doc, apel) - addElementNonEmpty('bitcoin', app['Bitcoin'], doc, apel) - addElementNonEmpty('litecoin', app['Litecoin'], doc, apel) - addElementNonEmpty('flattr', app['FlattrID'], doc, apel) + addElement('category', app.Categories[0], doc, apel) + addElement('web', app.WebSite, doc, apel) + addElement('source', app.SourceCode, doc, apel) + addElement('tracker', app.IssueTracker, doc, apel) + addElementNonEmpty('changelog', app.Changelog, doc, apel) + addElementNonEmpty('donate', app.Donate, doc, apel) + addElementNonEmpty('bitcoin', app.Bitcoin, doc, apel) + addElementNonEmpty('litecoin', app.Litecoin, doc, apel) + addElementNonEmpty('flattr', app.FlattrID, doc, apel) # These elements actually refer to the current version (i.e. which # one is recommended. They are historically mis-named, and need # changing, but stay like this for now to support existing clients. - addElement('marketversion', app['Current Version'], doc, apel) - addElement('marketvercode', app['Current Version Code'], doc, apel) + addElement('marketversion', app.CurrentVersion, doc, apel) + addElement('marketvercode', app.CurrentVersionCode, doc, apel) - if app['AntiFeatures']: - af = app['AntiFeatures'] + if app.AntiFeatures: + af = app.AntiFeatures if af: addElementNonEmpty('antifeatures', ','.join(af), doc, apel) - if app['Provides']: - pv = app['Provides'].split(',') + if app.Provides: + pv = app.Provides.split(',') addElementNonEmpty('provides', ','.join(pv), doc, apel) - if app['Requires Root']: + if app.RequiresRoot: addElement('requirements', 'root', doc, apel) # Sort the apk list into version order, just so the web site @@ -888,7 +888,7 @@ def make_index(apps, sortedids, apks, repodir, archive, categories): # find the APK for the "Current Version" if current_version_code < apk['versioncode']: current_version_code = apk['versioncode'] - if current_version_code < int(app['Current Version Code']): + if current_version_code < int(app.CurrentVersionCode): current_version_file = apk['apkname'] apkel = doc.createElement("package") @@ -920,8 +920,8 @@ def make_index(apps, sortedids, apks, repodir, archive, categories): if current_version_file is not None \ and config['make_current_version_link'] \ and repodir == 'repo': # only create these - sanitized_name = re.sub('''[ '"&%?+=/]''', '', - app[config['current_version_name_source']]) + namefield = config['current_version_name_source'] + sanitized_name = re.sub('''[ '"&%?+=/]''', '', app.get_field(namefield)) apklinkname = sanitized_name + '.apk' current_version_path = os.path.join(repodir, current_version_file) if os.path.islink(apklinkname): @@ -996,8 +996,8 @@ def archive_old_apks(apps, apks, archapks, repodir, archivedir, defaultkeepversi for appid, app in apps.iteritems(): - if app['Archive Policy']: - keepversions = int(app['Archive Policy'][:-9]) + if app.ArchivePolicy: + keepversions = int(app.ArchivePolicy[:-9]) else: keepversions = defaultkeepversions @@ -1163,7 +1163,7 @@ def main(): # Generate a list of categories... categories = set() for app in apps.itervalues(): - categories.update(app['Categories']) + categories.update(app.Categories) # Read known apks data (will be updated and written back when we've finished) knownapks = common.KnownApks() @@ -1234,8 +1234,6 @@ def main(): # same time. for appid, app in apps.iteritems(): bestver = 0 - added = None - lastupdated = None for apk in apks + archapks: if apk['id'] == appid: if apk['versioncode'] > bestver: @@ -1243,34 +1241,30 @@ def main(): bestapk = apk if 'added' in apk: - if not added or apk['added'] < added: - added = apk['added'] - if not lastupdated or apk['added'] > lastupdated: - lastupdated = apk['added'] + if not app.added or apk['added'] < app.added: + app.added = apk['added'] + if not app.lastupdated or apk['added'] > app.lastupdated: + app.lastupdated = apk['added'] - if added: - app['added'] = added - else: + if not app.added: logging.debug("Don't know when " + appid + " was added") - if lastupdated: - app['lastupdated'] = lastupdated - else: + if not app.lastupdated: logging.debug("Don't know when " + appid + " was last updated") if bestver == 0: - if app['Name'] is None: - app['Name'] = app['Auto Name'] or appid - app['icon'] = None + if app.Name is None: + app.Name = app.AutoName or appid + app.icon = None logging.debug("Application " + appid + " has no packages") else: - if app['Name'] is None: - app['Name'] = bestapk['name'] - app['icon'] = bestapk['icon'] if 'icon' in bestapk else None + if app.Name is None: + app.Name = bestapk['name'] + app.icon = bestapk['icon'] if 'icon' in bestapk else None # Sort the app list by name, then the web site doesn't have to by default. # (we had to wait until we'd scanned the apks to do this, because mostly the # name comes from there!) - sortedids = sorted(apps.iterkeys(), key=lambda appid: apps[appid]['Name'].upper()) + sortedids = sorted(apps.iterkeys(), key=lambda appid: apps[appid].Name.upper()) # APKs are placed into multiple repos based on the app package, providing # per-app subscription feeds for nightly builds and things like it @@ -1309,10 +1303,10 @@ def main(): appid = line.rstrip() data += appid + "\t" app = apps[appid] - data += app['Name'] + "\t" - if app['icon'] is not None: - data += app['icon'] + "\t" - data += app['License'] + "\n" + data += app.Name + "\t" + if app.icon is not None: + data += app.icon + "\t" + data += app.License + "\n" with open(os.path.join(repodirs[0], 'latestapps.dat'), 'w') as f: f.write(data) diff --git a/tests/common.TestCase b/tests/common.TestCase index 4e602aac..d2aec276 100755 --- a/tests/common.TestCase +++ b/tests/common.TestCase @@ -116,8 +116,8 @@ class CommonTest(unittest.TestCase): config['sdk_path'] = os.getenv('ANDROID_HOME') config['build_tools'] = 'FAKE_BUILD_TOOLS_VERSION' fdroidserver.common.config = config - app = dict() - app['id'] = 'org.fdroid.froid' + app = fdroidserver.metadata.App() + app.id = 'org.fdroid.froid' build = dict(fdroidserver.metadata.flag_defaults) build['commit'] = 'master' build['forceversion'] = True diff --git a/tests/import.TestCase b/tests/import.TestCase index 97d9225d..1d00a688 100755 --- a/tests/import.TestCase +++ b/tests/import.TestCase @@ -30,12 +30,12 @@ class ImportTest(unittest.TestCase): fdroidserver.common.config['sdk_path'] = '/fake/path/to/android-sdk' url = 'https://gitlab.com/fdroid/fdroidclient' - appid, app = fdroidserver.metadata.get_default_app_info() - app['Update Check Mode'] = "Tags" + app = fdroidserver.metadata.get_default_app_info() + app.UpdateCheckMode = "Tags" root_dir, src_dir = import_proxy.get_metadata_from_url(app, url) - self.assertEquals(app['Repo Type'], 'git') - self.assertEquals(app['Web Site'], 'https://gitlab.com/fdroid/fdroidclient') - self.assertEquals(app['Repo'], 'https://gitlab.com/fdroid/fdroidclient.git') + self.assertEquals(app.RepoType, 'git') + self.assertEquals(app.WebSite, 'https://gitlab.com/fdroid/fdroidclient') + self.assertEquals(app.Repo, 'https://gitlab.com/fdroid/fdroidclient.git') if __name__ == "__main__": diff --git a/tests/metadata.TestCase b/tests/metadata.TestCase index e81d0158..1dc77cc4 100755 --- a/tests/metadata.TestCase +++ b/tests/metadata.TestCase @@ -39,10 +39,15 @@ class MetadataTest(unittest.TestCase): apps = fdroidserver.metadata.read_metadata(xref=True) for appid in ('org.smssecure.smssecure', 'org.adaway', 'net.osmand.plus', 'org.videolan.vlc'): - with open(os.path.join('metadata', appid + '.pickle'), 'r') as f: + app = apps[appid] + savepath = os.path.join('metadata', appid + '.pickle') + self.assertTrue(appid in apps) + with open(savepath, 'r') as f: frompickle = pickle.load(f) - self.assertTrue(appid in apps.keys()) - self.assertEquals(apps[appid], frompickle) + frommeta = app.field_dict() + self.assertEquals(frommeta, frompickle) + # with open(savepath, 'wb') as f: + # pickle.dump(app, f) if __name__ == "__main__": diff --git a/tests/metadata/net.osmand.plus.pickle b/tests/metadata/net.osmand.plus.pickle index bab0dc72..5bc7edd3 100644 --- a/tests/metadata/net.osmand.plus.pickle +++ b/tests/metadata/net.osmand.plus.pickle @@ -491,12 +491,16 @@ S'metadata/net.osmand.plus.xml' p178 sS'Disabled' p179 -NsS'Update Check Name' +NsS'added' p180 -NsS'Vercode Operation' +NsS'lastupdated' p181 -NsS'Current Version' +NsS'Update Check Name' p182 -S'1.9.5' +NsS'Vercode Operation' p183 +NsS'Current Version' +p184 +S'1.9.5' +p185 s. diff --git a/tests/metadata/org.adaway.pickle b/tests/metadata/org.adaway.pickle index 55f4599a..7ae60cd3 100644 --- a/tests/metadata/org.adaway.pickle +++ b/tests/metadata/org.adaway.pickle @@ -2290,4 +2290,8 @@ NsS'Update Check Name' p483 NsS'Vercode Operation' p484 -Ns. \ No newline at end of file +NsS'added' +p485 +NsS'lastupdated' +p486 +Ns. diff --git a/tests/metadata/org.smssecure.smssecure.pickle b/tests/metadata/org.smssecure.smssecure.pickle index de095d93..6f5565e5 100644 --- a/tests/metadata/org.smssecure.smssecure.pickle +++ b/tests/metadata/org.smssecure.smssecure.pickle @@ -772,4 +772,8 @@ NsS'Update Check Name' p227 NsS'Vercode Operation' p228 -Ns. \ No newline at end of file +NsS'added' +p229 +NsS'lastupdated' +p230 +Ns. diff --git a/tests/metadata/org.videolan.vlc.pickle b/tests/metadata/org.videolan.vlc.pickle index 51b8ddf7..a62fd27e 100644 --- a/tests/metadata/org.videolan.vlc.pickle +++ b/tests/metadata/org.videolan.vlc.pickle @@ -5610,14 +5610,18 @@ S'metadata/org.videolan.vlc.yaml' p1519 sS'Disabled' p1520 -NsS'Update Check Name' +NsS'added' p1521 -NsS'Vercode Operation' +NsS'lastupdated' p1522 -S'%c + 5' +NsS'Update Check Name' p1523 -sS'Current Version' +NsS'Vercode Operation' p1524 -S'1.2.6' +S'%c + 5' p1525 +sS'Current Version' +p1526 +S'1.2.6' +p1527 s.