chiark / gitweb /
mirror: verify index signature if fingerprint included in URL
authorHans-Christoph Steiner <hans@eds.org>
Wed, 29 Nov 2017 15:54:27 +0000 (16:54 +0100)
committerHans-Christoph Steiner <hans@eds.org>
Wed, 29 Nov 2017 15:54:27 +0000 (16:54 +0100)
This keeps the old handling without signature checks so that it works
without requiring Java/apksigner or the whole config.py parsing stuff.

fdroidserver/mirror.py

index 624085026c94ca4466ef35c59cf1397143194d74..1b7ce1a9a07ffdbe4ad8d0993cac5cfd5867cadf 100644 (file)
@@ -1,20 +1,18 @@
 #!/usr/bin/env python3
 
-import io
 import ipaddress
-import json
 import logging
 import os
+import posixpath
 import socket
 import subprocess
 import sys
-import zipfile
 from argparse import ArgumentParser
-from urllib.parse import urlparse
+import urllib.parse
 
 from . import _
 from . import common
-from . import net
+from . import index
 from . import update
 
 options = None
@@ -25,10 +23,12 @@ def main():
 
     parser = ArgumentParser(usage=_("%(prog)s [options] url"))
     common.setup_global_opts(parser)
-    parser.add_argument("url", nargs='?', help=_("Base URL to mirror"))
+    parser.add_argument("url", nargs='?',
+                        help=_('Base URL to mirror, can include the index signing key '
+                               + 'using the query string: ?fingerprint='))
     parser.add_argument("--archive", action='store_true', default=False,
                         help=_("Also mirror the full archive section"))
-    parser.add_argument("--output-dir", default=os.getcwd(),
+    parser.add_argument("--output-dir", default=None,
                         help=_("The directory to write the mirror to"))
     options = parser.parse_args()
 
@@ -37,11 +37,36 @@ def main():
         parser.print_help()
         sys.exit(1)
 
-    baseurl = options.url
-    basedir = options.output_dir
+    scheme, hostname, path, params, query, fragment = urllib.parse.urlparse(options.url)
+    fingerprint = urllib.parse.parse_qs(query).get('fingerprint')
+
+    def _append_to_url_path(*args):
+        '''Append the list of path components to URL, keeping the rest the same'''
+        newpath = posixpath.join(path, *args)
+        return urllib.parse.urlunparse((scheme, hostname, newpath, params, query, fragment))
+
+    if fingerprint:
+        config = common.read_config(options)
+        if not ('jarsigner' in config or 'apksigner' in config):
+            logging.error(_('Java JDK not found! Install in standard location or set java_paths!'))
+            sys.exit(1)
+
+        def _get_index(section, etag=None):
+            url = _append_to_url_path(section)
+            return index.download_repo_index(url, etag=etag)
+    else:
+        def _get_index(section, etag=None):
+            import io
+            import json
+            import zipfile
+            from . import net
+            url = _append_to_url_path(section, 'index-v1.jar')
+            content, etag = net.http_get(url)
+            with zipfile.ZipFile(io.BytesIO(content)) as zip:
+                jsoncontents = zip.open('index-v1.json').read()
+            data = json.loads(jsoncontents.decode('utf-8'))
+            return data, etag
 
-    url = urlparse(baseurl)
-    hostname = url.netloc
     ip = None
     try:
         ip = ipaddress.ip_address(hostname)
@@ -53,39 +78,40 @@ def main():
                 'A full mirror of f-droid.org requires more than 200GB.'))
         sys.exit(1)
 
-    path = url.path.rstrip('/')
+    path = path.rstrip('/')
     if path.endswith('repo') or path.endswith('archive'):
-        logging.error(_('Do not include "{path}" in URL!').format(path=path.split('/')[-1]))
-        sys.exit(1)
+        logging.warning(_('Do not include "{path}" in URL!')
+                        .format(path=path.split('/')[-1]))
     elif not path.endswith('fdroid'):
         logging.warning(_('{url} does not end with "fdroid", check the URL path!')
-                        .format(url=baseurl))
+                        .format(url=options.url))
 
     icondirs = ['icons', ]
     for density in update.screen_densities:
         icondirs.append('icons-' + density)
 
+    if options.output_dir:
+        basedir = options.output_dir
+    else:
+        basedir = os.path.join(os.getcwd(), hostname, path.strip('/'))
+        os.makedirs(basedir, exist_ok=True)
+
     if options.archive:
         sections = ('repo', 'archive')
     else:
         sections = ('repo', )
 
     for section in sections:
-        sectionurl = baseurl + '/' + section
         sectiondir = os.path.join(basedir, section)
-        repourl = sectionurl + '/index-v1.jar'
 
-        content, etag = net.http_get(repourl)
-        with zipfile.ZipFile(io.BytesIO(content)) as zip:
-            jsoncontents = zip.open('index-v1.json').read()
+        data, etag = _get_index(section)
 
         os.makedirs(sectiondir, exist_ok=True)
         os.chdir(sectiondir)
         for icondir in icondirs:
             os.makedirs(os.path.join(sectiondir, icondir), exist_ok=True)
 
-        data = json.loads(jsoncontents.decode('utf-8'))
-        urls = ''
+        urls = []
         for packageName, packageList in data['packages'].items():
             for package in packageList:
                 to_fetch = []
@@ -98,9 +124,8 @@ def main():
                 for f in to_fetch:
                     if not os.path.exists(f) \
                        or (f.endswith('.apk') and os.path.getsize(f) != package['size']):
-                        url = sectionurl + '/' + f
-                        urls += url + '\n'
-                        urls += url + '.asc\n'
+                        urls.append(_append_to_url_path(section, f))
+                        urls.append(_append_to_url_path(section, f + '.asc'))
 
         for app in data['apps']:
             localized = app.get('localized')
@@ -109,18 +134,19 @@ def main():
                     for k in update.GRAPHIC_NAMES:
                         f = d.get(k)
                         if f:
-                            urls += '/'.join((sectionurl, locale, f)) + '\n'
+                            urls.append(_append_to_url_path(section, app['packageName'], locale, f))
                     for k in update.SCREENSHOT_DIRS:
                         filelist = d.get(k)
                         if filelist:
                             for f in filelist:
-                                urls += '/'.join((sectionurl, locale, k, f)) + '\n'
+                                urls.append(_append_to_url_path(section, app['packageName'], locale, k, f))
 
-        with open('.rsync-input-file', 'w') as fp:
-            fp.write(urls)
+        with open(urls_file, 'w') as fp:
+            for url in urls:
+                fp.write(url.split('?')[0] + '\n')  # wget puts query string in the filename
         subprocess.call(['wget', '--continue', '--user-agent="fdroid mirror"',
-                         '--input-file=.rsync-input-file'])
-        os.remove('.rsync-input-file')
+                         '--input-file=' + urls_file])
+        os.remove(urls_file)
 
         urls = dict()
         for app in data['apps']:
@@ -129,17 +155,18 @@ def main():
                 continue
             icon = app['icon']
             for icondir in icondirs:
-                url = sectionurl + '/' + icondir + '/' + icon
+                url = _append_to_url_path(section, icondir, icon)
                 if icondir not in urls:
-                    urls[icondir] = ''
-                urls[icondir] += url + '\n'
+                    urls[icondir] = []
+                urls[icondir].append(url)
 
         for icondir in icondirs:
             os.chdir(os.path.join(basedir, section, icondir))
-            with open('.rsync-input-file', 'w') as fp:
-                fp.write(urls[icondir])
-            subprocess.call(['wget', '--continue', '--input-file=.rsync-input-file'])
-            os.remove('.rsync-input-file')
+            with open(urls_file, 'w') as fp:
+                for url in urls[icondir]:
+                    fp.write(url.split('?')[0] + '\n')  # wget puts query string in the filename
+            subprocess.call(['wget', '--continue', '--input-file=' + urls_file])
+            os.remove(urls_file)
 
 
 if __name__ == "__main__":