from argparse import ArgumentParser
import logging
+from . import _
from . import common
from . import metadata
from .exception import BuildException, VCSException
def get_gradle_compile_commands(build):
- compileCommands = ['compile', 'releaseCompile']
+ compileCommands = ['compile', 'releaseCompile'
+ 'provided', 'releaseProvided',
+ 'apk', 'releaseApk',
+ 'implementation', 'releaseImplementation',
+ 'api', 'releaseApi',
+ 'compileOnly', 'releaseCompileOnly',
+ 'runtimeOnly', 'releaseRuntimeOnly']
if build.gradle and build.gradle != ['yes']:
compileCommands += [flavor + 'Compile' for flavor in build.gradle]
compileCommands += [flavor + 'ReleaseCompile' for flavor in build.gradle]
return [re.compile(r'\s*' + c, re.IGNORECASE) for c in compileCommands]
-def scan_source(build_dir, build):
+def scan_source(build_dir, build=metadata.Build()):
"""Scan the source code in the given directory (and all subdirectories)
and return the number of fatal problems encountered
"""
]
}
+ whitelisted = [
+ 'firebase-jobdispatcher', # https://github.com/firebase/firebase-jobdispatcher-android/blob/master/LICENSE
+ 'com.firebaseui', # https://github.com/firebase/FirebaseUI-Android/blob/master/LICENSE
+ 'geofire-android' # https://github.com/firebase/geofire-java/blob/master/LICENSE
+ ]
+
+ def is_whitelisted(s):
+ return any(wl in s for wl in whitelisted)
+
def suspects_found(s):
for n, r in usual_suspects.items():
- if r.match(s):
+ if r.match(s) and not is_whitelisted(s):
yield n
gradle_mavenrepo = re.compile(r'maven *{ *(url)? *[\'"]?([^ \'"]*)[\'"]?')
return any(command.match(line) for command in gradle_compile_commands)
# Iterate through all files in the source code
- for dirpath, dirnames, filenames in os.walk(build_dir, topdown=True):
+ for root, dirs, files in os.walk(build_dir, topdown=True):
# It's topdown, so checking the basename is enough
for ignoredir in ('.hg', '.git', '.svn', '.bzr'):
- if ignoredir in dirnames:
- dirnames.remove(ignoredir)
+ if ignoredir in dirs:
+ dirs.remove(ignoredir)
- for curfile in filenames:
+ for curfile in files:
if curfile in ['.DS_Store']:
continue
# Path (relative) to the file
- filepath = os.path.join(dirpath, curfile)
+ filepath = os.path.join(root, curfile)
if os.path.islink(filepath):
continue
path_in_build_dir = os.path.relpath(filepath, build_dir)
- _, ext = common.get_extension(path_in_build_dir)
+ _ignored, ext = common.get_extension(path_in_build_dir)
if ext == 'so':
count += handleproblem('shared library', path_in_build_dir, filepath)
elif ext == 'jar':
for name in suspects_found(curfile):
- count += handleproblem('usual supect \'%s\'' % name, path_in_build_dir, filepath)
- warnproblem('JAR file', path_in_build_dir)
+ count += handleproblem('usual suspect \'%s\'' % name, path_in_build_dir, filepath)
+ if curfile == 'gradle-wrapper.jar':
+ removeproblem('gradle-wrapper.jar', path_in_build_dir, filepath)
+ else:
+ warnproblem('JAR file', path_in_build_dir)
elif ext == 'aar':
warnproblem('AAR file', path_in_build_dir)
for i, line in enumerate(lines):
if is_used_by_gradle(line):
for name in suspects_found(line):
- count += handleproblem('usual supect \'%s\' at line %d' % (name, i + 1), path_in_build_dir, filepath)
+ count += handleproblem('usual suspect \'%s\' at line %d' % (name, i + 1), path_in_build_dir, filepath)
noncomment_lines = [l for l in lines if not common.gradle_comment.match(l)]
joined = re.sub(r'[\n\r\s]+', ' ', ' '.join(noncomment_lines))
for m in gradle_mavenrepo.finditer(joined):
# Parse command line...
parser = ArgumentParser(usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]")
common.setup_global_opts(parser)
- parser.add_argument("appid", nargs='*', help="app-id with optional versionCode in the form APPID[:VERCODE]")
+ parser.add_argument("appid", nargs='*', help=_("applicationId with optional versionCode in the form APPID[:VERCODE]"))
metadata.add_metadata_arguments(parser)
options = parser.parse_args()
metadata.warnings_action = options.W
for appid, app in apps.items():
if app.Disabled:
- logging.info("Skipping %s: disabled" % appid)
- continue
- if not app.builds:
- logging.info("Skipping %s: no builds specified" % appid)
+ logging.info(_("Skipping {appid}: disabled").format(appid=appid))
continue
- logging.info("Processing " + appid)
-
try:
-
if app.RepoType == 'srclib':
build_dir = os.path.join('build', 'srclib', app.Repo)
else:
build_dir = os.path.join('build', appid)
+ if app.builds:
+ logging.info(_("Processing {appid}").format(appid=appid))
+ else:
+ logging.info(_("{appid}: no builds specified, running on current source state")
+ .format(appid=appid))
+ count = scan_source(build_dir)
+ if count > 0:
+ logging.warn(_('Scanner found {count} problems in {appid}:')
+ .format(count=count, appid=appid))
+ probcount += count
+ continue
+
# Set up vcs interface and make sure we have the latest code...
vcs = common.getvcs(app.RepoType, app.Repo, build_dir)
if build.disable:
logging.info("...skipping version %s - %s" % (
build.versionName, build.get('disable', build.commit[1:])))
- else:
- logging.info("...scanning version " + build.versionName)
+ continue
- # Prepare the source code...
- common.prepare_source(vcs, app, build,
- build_dir, srclib_dir,
- extlib_dir, False)
+ logging.info("...scanning version " + build.versionName)
+ # Prepare the source code...
+ common.prepare_source(vcs, app, build,
+ build_dir, srclib_dir,
+ extlib_dir, False)
- # Do the scan...
- count = scan_source(build_dir, build)
- if count > 0:
- logging.warn('Scanner found %d problems in %s (%s)' % (
- count, appid, build.versionCode))
- probcount += count
+ count = scan_source(build_dir, build)
+ if count > 0:
+ logging.warn(_('Scanner found {count} problems in {appid}:{versionCode}:')
+ .format(count=count, appid=appid, versionCode=build.versionCode))
+ probcount += count
except BuildException as be:
logging.warn("Could not scan app %s due to BuildException: %s" % (
appid, traceback.format_exc()))
probcount += 1
- logging.info("Finished:")
- print("%d problems found" % probcount)
+ logging.info(_("Finished"))
+ print(_("%d problems found") % probcount)
if __name__ == "__main__":