chiark / gitweb /
Merge branch 's3cfg' into 'master'
[fdroidserver.git] / fdroidserver / server.py
1 #!/usr/bin/env python3
2 #
3 # server.py - part of the FDroid server tools
4 # Copyright (C) 2010-15, Ciaran Gultnieks, ciaran@ciarang.com
5 #
6 # This program is free software: you can redistribute it and/or modify
7 # it under the terms of the GNU Affero General Public License as published by
8 # the Free Software Foundation, either version 3 of the License, or
9 # (at your option) any later version.
10 #
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 # GNU Affero General Public License for more details.
15 #
16 # You should have received a copy of the GNU Affero General Public License
17 # along with this program.  If not, see <http://www.gnu.org/licenses/>.
18
19 import sys
20 import glob
21 import hashlib
22 import os
23 import paramiko
24 import pwd
25 import re
26 import subprocess
27 import time
28 from argparse import ArgumentParser
29 import logging
30 import shutil
31
32 from . import _
33 from . import common
34 from .exception import FDroidException
35
36 config = None
37 options = None
38
39 BINARY_TRANSPARENCY_DIR = 'binary_transparency'
40
41 AUTO_S3CFG = '.fdroid-server-update-s3cfg'
42 USER_S3CFG = 's3cfg'
43
44
45 def update_awsbucket(repo_section):
46     '''
47     Upload the contents of the directory `repo_section` (including
48     subdirectories) to the AWS S3 "bucket". The contents of that subdir of the
49     bucket will first be deleted.
50
51     Requires AWS credentials set in config.py: awsaccesskeyid, awssecretkey
52     '''
53
54     logging.debug('Syncing "' + repo_section + '" to Amazon S3 bucket "'
55                   + config['awsbucket'] + '"')
56
57     if common.set_command_in_config('s3cmd'):
58         update_awsbucket_s3cmd(repo_section)
59     else:
60         update_awsbucket_libcloud(repo_section)
61
62
63 def update_awsbucket_s3cmd(repo_section):
64     '''upload using the CLI tool s3cmd, which provides rsync-like sync
65
66     The upload is done in multiple passes to reduce the chance of
67     interfering with an existing client-server interaction.  In the
68     first pass, only new files are uploaded.  In the second pass,
69     changed files are uploaded, overwriting what is on the server.  On
70     the third/last pass, the indexes are uploaded, and any removed
71     files are deleted from the server.  The last pass is the only pass
72     to use a full MD5 checksum of all files to detect changes.
73     '''
74
75     logging.debug(_('Using s3cmd to sync with: {url}')
76                   .format(url=config['awsbucket']))
77
78     if os.path.exists(USER_S3CFG):
79         logging.info(_('Using "{path}" for configuring s3cmd.').format(path=USER_S3CFG))
80         configfilename = USER_S3CFG
81     else:
82         fd = os.open(AUTO_S3CFG, os.O_CREAT | os.O_TRUNC | os.O_WRONLY, 0o600)
83         logging.debug(_('Creating "{path}" for configuring s3cmd.').format(path=AUTO_S3CFG))
84         os.write(fd, '[default]\n'.encode('utf-8'))
85         os.write(fd, ('access_key = ' + config['awsaccesskeyid'] + '\n').encode('utf-8'))
86         os.write(fd, ('secret_key = ' + config['awssecretkey'] + '\n').encode('utf-8'))
87         os.close(fd)
88         configfilename = AUTO_S3CFG
89
90     s3bucketurl = 's3://' + config['awsbucket']
91     s3cmd = [config['s3cmd'], '--config=' + configfilename]
92     if subprocess.call(s3cmd + ['info', s3bucketurl]) != 0:
93         logging.warning(_('Creating new S3 bucket: {url}')
94                         .format(url=s3bucketurl))
95         if subprocess.call(s3cmd + ['mb', s3bucketurl]) != 0:
96             logging.error(_('Failed to create S3 bucket: {url}')
97                           .format(url=s3bucketurl))
98             raise FDroidException()
99
100     s3cmd_sync = s3cmd + ['sync', '--acl-public']
101     if options.verbose:
102         s3cmd_sync += ['--verbose']
103     if options.quiet:
104         s3cmd_sync += ['--quiet']
105     indexxml = os.path.join(repo_section, 'index.xml')
106     indexjar = os.path.join(repo_section, 'index.jar')
107     indexv1jar = os.path.join(repo_section, 'index-v1.jar')
108
109     s3url = s3bucketurl + '/fdroid/'
110     logging.debug('s3cmd sync new files in ' + repo_section + ' to ' + s3url)
111     logging.debug(_('Running first pass with MD5 checking disabled'))
112     if subprocess.call(s3cmd_sync +
113                        ['--no-check-md5', '--skip-existing',
114                         '--exclude', indexxml,
115                         '--exclude', indexjar,
116                         '--exclude', indexv1jar,
117                         repo_section, s3url]) != 0:
118         raise FDroidException()
119     logging.debug('s3cmd sync all files in ' + repo_section + ' to ' + s3url)
120     if subprocess.call(s3cmd_sync +
121                        ['--no-check-md5',
122                         '--exclude', indexxml,
123                         '--exclude', indexjar,
124                         '--exclude', indexv1jar,
125                         repo_section, s3url]) != 0:
126         raise FDroidException()
127
128     logging.debug(_('s3cmd sync indexes {path} to {url} and delete')
129                   .format(path=repo_section, url=s3url))
130     s3cmd_sync.append('--delete-removed')
131     s3cmd_sync.append('--delete-after')
132     if options.no_checksum:
133         s3cmd_sync.append('--no-check-md5')
134     else:
135         s3cmd_sync.append('--check-md5')
136     if subprocess.call(s3cmd_sync + [repo_section, s3url]) != 0:
137         raise FDroidException()
138
139
140 def update_awsbucket_libcloud(repo_section):
141     '''
142     Upload the contents of the directory `repo_section` (including
143     subdirectories) to the AWS S3 "bucket". The contents of that subdir of the
144     bucket will first be deleted.
145
146     Requires AWS credentials set in config.py: awsaccesskeyid, awssecretkey
147     '''
148
149     logging.debug(_('using Apache libcloud to sync with {url}')
150                   .format(url=config['awsbucket']))
151
152     import libcloud.security
153     libcloud.security.VERIFY_SSL_CERT = True
154     from libcloud.storage.types import Provider, ContainerDoesNotExistError
155     from libcloud.storage.providers import get_driver
156
157     if not config.get('awsaccesskeyid') or not config.get('awssecretkey'):
158         raise FDroidException(
159             _('To use awsbucket, awssecretkey and awsaccesskeyid must also be set in config.py!'))
160     awsbucket = config['awsbucket']
161
162     if os.path.exists(USER_S3CFG):
163         raise FDroidException(_('"{path}" exists but s3cmd is not installed!')
164                               .format(path=USER_S3CFG))
165
166     cls = get_driver(Provider.S3)
167     driver = cls(config['awsaccesskeyid'], config['awssecretkey'])
168     try:
169         container = driver.get_container(container_name=awsbucket)
170     except ContainerDoesNotExistError:
171         container = driver.create_container(container_name=awsbucket)
172         logging.info(_('Created new container "{name}"')
173                      .format(name=container.name))
174
175     upload_dir = 'fdroid/' + repo_section
176     objs = dict()
177     for obj in container.list_objects():
178         if obj.name.startswith(upload_dir + '/'):
179             objs[obj.name] = obj
180
181     for root, dirs, files in os.walk(os.path.join(os.getcwd(), repo_section)):
182         for name in files:
183             upload = False
184             file_to_upload = os.path.join(root, name)
185             object_name = 'fdroid/' + os.path.relpath(file_to_upload, os.getcwd())
186             if object_name not in objs:
187                 upload = True
188             else:
189                 obj = objs.pop(object_name)
190                 if obj.size != os.path.getsize(file_to_upload):
191                     upload = True
192                 else:
193                     # if the sizes match, then compare by MD5
194                     md5 = hashlib.md5()
195                     with open(file_to_upload, 'rb') as f:
196                         while True:
197                             data = f.read(8192)
198                             if not data:
199                                 break
200                             md5.update(data)
201                     if obj.hash != md5.hexdigest():
202                         s3url = 's3://' + awsbucket + '/' + obj.name
203                         logging.info(' deleting ' + s3url)
204                         if not driver.delete_object(obj):
205                             logging.warn('Could not delete ' + s3url)
206                         upload = True
207
208             if upload:
209                 logging.debug(' uploading "' + file_to_upload + '"...')
210                 extra = {'acl': 'public-read'}
211                 if file_to_upload.endswith('.sig'):
212                     extra['content_type'] = 'application/pgp-signature'
213                 elif file_to_upload.endswith('.asc'):
214                     extra['content_type'] = 'application/pgp-signature'
215                 logging.info(' uploading ' + os.path.relpath(file_to_upload)
216                              + ' to s3://' + awsbucket + '/' + object_name)
217                 with open(file_to_upload, 'rb') as iterator:
218                     obj = driver.upload_object_via_stream(iterator=iterator,
219                                                           container=container,
220                                                           object_name=object_name,
221                                                           extra=extra)
222     # delete the remnants in the bucket, they do not exist locally
223     while objs:
224         object_name, obj = objs.popitem()
225         s3url = 's3://' + awsbucket + '/' + object_name
226         if object_name.startswith(upload_dir):
227             logging.warn(' deleting ' + s3url)
228             driver.delete_object(obj)
229         else:
230             logging.info(' skipping ' + s3url)
231
232
233 def update_serverwebroot(serverwebroot, repo_section):
234     # use a checksum comparison for accurate comparisons on different
235     # filesystems, for example, FAT has a low resolution timestamp
236     rsyncargs = ['rsync', '--archive', '--delete-after', '--safe-links']
237     if not options.no_checksum:
238         rsyncargs.append('--checksum')
239     if options.verbose:
240         rsyncargs += ['--verbose']
241     if options.quiet:
242         rsyncargs += ['--quiet']
243     if options.identity_file is not None:
244         rsyncargs += ['-e', 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + options.identity_file]
245     elif 'identity_file' in config:
246         rsyncargs += ['-e', 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + config['identity_file']]
247     indexxml = os.path.join(repo_section, 'index.xml')
248     indexjar = os.path.join(repo_section, 'index.jar')
249     indexv1jar = os.path.join(repo_section, 'index-v1.jar')
250     # Upload the first time without the index files and delay the deletion as
251     # much as possible, that keeps the repo functional while this update is
252     # running.  Then once it is complete, rerun the command again to upload
253     # the index files.  Always using the same target with rsync allows for
254     # very strict settings on the receiving server, you can literally specify
255     # the one rsync command that is allowed to run in ~/.ssh/authorized_keys.
256     # (serverwebroot is guaranteed to have a trailing slash in common.py)
257     logging.info('rsyncing ' + repo_section + ' to ' + serverwebroot)
258     if subprocess.call(rsyncargs +
259                        ['--exclude', indexxml, '--exclude', indexjar,
260                         '--exclude', indexv1jar,
261                         repo_section, serverwebroot]) != 0:
262         raise FDroidException()
263     if subprocess.call(rsyncargs + [repo_section, serverwebroot]) != 0:
264         raise FDroidException()
265     # upload "current version" symlinks if requested
266     if config['make_current_version_link'] and repo_section == 'repo':
267         links_to_upload = []
268         for f in glob.glob('*.apk') \
269                 + glob.glob('*.apk.asc') + glob.glob('*.apk.sig'):
270             if os.path.islink(f):
271                 links_to_upload.append(f)
272         if len(links_to_upload) > 0:
273             if subprocess.call(rsyncargs + links_to_upload + [serverwebroot]) != 0:
274                 raise FDroidException()
275
276
277 def sync_from_localcopy(repo_section, local_copy_dir):
278     '''Syncs the repo from "local copy dir" filesystem to this box
279
280     In setups that use offline signing, this is the last step that
281     syncs the repo from the "local copy dir" e.g. a thumb drive to the
282     repo on the local filesystem.  That local repo is then used to
283     push to all the servers that are configured.
284
285     '''
286     logging.info('Syncing from local_copy_dir to this repo.')
287     # trailing slashes have a meaning in rsync which is not needed here, so
288     # make sure both paths have exactly one trailing slash
289     common.local_rsync(options,
290                        os.path.join(local_copy_dir, repo_section).rstrip('/') + '/',
291                        repo_section.rstrip('/') + '/')
292
293     offline_copy = os.path.join(local_copy_dir, BINARY_TRANSPARENCY_DIR)
294     if os.path.exists(os.path.join(offline_copy, '.git')):
295         online_copy = os.path.join(os.getcwd(), BINARY_TRANSPARENCY_DIR)
296         push_binary_transparency(offline_copy, online_copy)
297
298
299 def update_localcopy(repo_section, local_copy_dir):
300     '''copy data from offline to the "local copy dir" filesystem
301
302     This updates the copy of this repo used to shuttle data from an
303     offline signing machine to the online machine, e.g. on a thumb
304     drive.
305
306     '''
307     # local_copy_dir is guaranteed to have a trailing slash in main() below
308     common.local_rsync(options, repo_section, local_copy_dir)
309
310     offline_copy = os.path.join(os.getcwd(), BINARY_TRANSPARENCY_DIR)
311     if os.path.isdir(os.path.join(offline_copy, '.git')):
312         online_copy = os.path.join(local_copy_dir, BINARY_TRANSPARENCY_DIR)
313         push_binary_transparency(offline_copy, online_copy)
314
315
316 def _get_size(start_path='.'):
317     '''get size of all files in a dir https://stackoverflow.com/a/1392549'''
318     total_size = 0
319     for root, dirs, files in os.walk(start_path):
320         for f in files:
321             fp = os.path.join(root, f)
322             total_size += os.path.getsize(fp)
323     return total_size
324
325
326 def update_servergitmirrors(servergitmirrors, repo_section):
327     '''update repo mirrors stored in git repos
328
329     This is a hack to use public git repos as F-Droid repos.  It
330     recreates the git repo from scratch each time, so that there is no
331     history.  That keeps the size of the git repo small.  Services
332     like GitHub or GitLab have a size limit of something like 1 gig.
333     This git repo is only a git repo for the purpose of being hosted.
334     For history, there is the archive section, and there is the binary
335     transparency log.
336
337     '''
338     import git
339     from clint.textui import progress
340     if config.get('local_copy_dir') \
341        and not config.get('sync_from_local_copy_dir'):
342         logging.debug('Offline machine, skipping git mirror generation until `fdroid server update`')
343         return
344
345     # right now we support only 'repo' git-mirroring
346     if repo_section == 'repo':
347         git_mirror_path = 'git-mirror'
348         dotgit = os.path.join(git_mirror_path, '.git')
349         git_repodir = os.path.join(git_mirror_path, 'fdroid', repo_section)
350         if not os.path.isdir(git_repodir):
351             os.makedirs(git_repodir)
352         if os.path.isdir(dotgit) and _get_size(git_mirror_path) > 1000000000:
353             logging.warning('Deleting git-mirror history, repo is too big (1 gig max)')
354             shutil.rmtree(dotgit)
355
356         # rsync is very particular about trailing slashes
357         common.local_rsync(options,
358                            repo_section.rstrip('/') + '/',
359                            git_repodir.rstrip('/') + '/')
360
361         # use custom SSH command if identity_file specified
362         ssh_cmd = 'ssh -oBatchMode=yes'
363         if options.identity_file is not None:
364             ssh_cmd += ' -oIdentitiesOnly=yes -i "%s"' % options.identity_file
365         elif 'identity_file' in config:
366             ssh_cmd += ' -oIdentitiesOnly=yes -i "%s"' % config['identity_file']
367
368         repo = git.Repo.init(git_mirror_path)
369
370         for remote_url in servergitmirrors:
371             hostname = re.sub(r'\W*\w+\W+(\w+).*', r'\1', remote_url)
372             r = git.remote.Remote(repo, hostname)
373             if r in repo.remotes:
374                 r = repo.remote(hostname)
375                 if 'set_url' in dir(r):  # force remote URL if using GitPython 2.x
376                     r.set_url(remote_url)
377             else:
378                 repo.create_remote(hostname, remote_url)
379             logging.info('Mirroring to: ' + remote_url)
380
381         # sadly index.add don't allow the --all parameter
382         logging.debug('Adding all files to git mirror')
383         repo.git.add(all=True)
384         logging.debug('Committing all files into git mirror')
385         repo.index.commit("fdroidserver git-mirror")
386
387         if options.verbose:
388             bar = progress.Bar()
389
390             class MyProgressPrinter(git.RemoteProgress):
391                 def update(self, op_code, current, maximum=None, message=None):
392                     if isinstance(maximum, float):
393                         bar.show(current, maximum)
394             progress = MyProgressPrinter()
395         else:
396             progress = None
397
398         # push for every remote. This will overwrite the git history
399         for remote in repo.remotes:
400             if remote.name == 'gitlab':
401                 logging.debug('Writing .gitlab-ci.yml to deploy to GitLab Pages')
402                 with open(os.path.join(git_mirror_path, ".gitlab-ci.yml"), "wt") as out_file:
403                     out_file.write("""pages:
404   script:
405    - mkdir .public
406    - cp -r * .public/
407    - mv .public public
408   artifacts:
409     paths:
410     - public
411 """)
412
413                 repo.git.add(all=True)
414                 repo.index.commit("fdroidserver git-mirror: Deploy to GitLab Pages")
415
416             logging.debug(_('Pushing to {url}').format(url=remote.url))
417             with repo.git.custom_environment(GIT_SSH_COMMAND=ssh_cmd):
418                 pushinfos = remote.push('master', force=True, set_upstream=True, progress=progress)
419                 for pushinfo in pushinfos:
420                     if pushinfo.flags & (git.remote.PushInfo.ERROR
421                                          | git.remote.PushInfo.REJECTED
422                                          | git.remote.PushInfo.REMOTE_FAILURE
423                                          | git.remote.PushInfo.REMOTE_REJECTED):
424                         raise FDroidException(remote.url + ' push failed: ' + str(pushinfo.flags)
425                                               + ' ' + pushinfo.summary)
426                     else:
427                         logging.debug(remote.url + ': ' + pushinfo.summary)
428
429         if progress:
430             bar.done()
431
432
433 def upload_to_android_observatory(repo_section):
434     # depend on requests and lxml only if users enable AO
435     import requests
436     from lxml.html import fromstring
437
438     if repo_section == 'repo':
439         for f in glob.glob(os.path.join(repo_section, '*.apk')):
440             fpath = f
441             fname = os.path.basename(f)
442             logging.info('Uploading ' + fname + ' to androidobservatory.org')
443
444             # upload the file with a post request
445             r = requests.post('https://androidobservatory.org/upload', files={'apk': (fname, open(fpath, 'rb'))})
446             response = r.text
447             page = r.url
448
449             # from now on XPath will be used to retrieve the message in the HTML
450             # androidobservatory doesn't have a nice API to talk with
451             # so we must scrape the page content
452             tree = fromstring(response)
453             alert = tree.xpath("//html/body/div[@class='container content-container']/div[@class='alert alert-info']")[0]
454
455             message = ""
456             appurl = page
457             for el in alert:
458                 # if the application was added successfully we retrive the url
459                 # if the application was already uploaded we use the redirect page url
460                 if el.attrib.get("href") is not None:
461                     appurl = page + el.attrib["href"][1:]
462                     message += el.text.replace(" here", "") + el.tail
463                 else:
464                     message += el.tail
465             message = message.strip() + " " + appurl
466             logging.info(message)
467
468
469 def upload_to_virustotal(repo_section, vt_apikey):
470     import json
471     import requests
472
473     logging.getLogger("urllib3").setLevel(logging.WARNING)
474     logging.getLogger("requests").setLevel(logging.WARNING)
475
476     if repo_section == 'repo':
477         if not os.path.exists('virustotal'):
478             os.mkdir('virustotal')
479         with open(os.path.join(repo_section, 'index-v1.json')) as fp:
480             index = json.load(fp)
481         for packageName, packages in index['packages'].items():
482             for package in packages:
483                 outputfilename = os.path.join('virustotal',
484                                               packageName + '_' + str(package.get('versionCode'))
485                                               + '_' + package['hash'] + '.json')
486                 if os.path.exists(outputfilename):
487                     logging.debug(package['apkName'] + ' results are in ' + outputfilename)
488                     continue
489                 filename = package['apkName']
490                 repofilename = os.path.join(repo_section, filename)
491                 logging.info('Checking if ' + repofilename + ' is on virustotal')
492
493                 headers = {
494                     "User-Agent": "F-Droid"
495                 }
496                 params = {
497                     'apikey': vt_apikey,
498                     'resource': package['hash'],
499                 }
500                 needs_file_upload = False
501                 while True:
502                     r = requests.post('https://www.virustotal.com/vtapi/v2/file/report',
503                                       params=params, headers=headers)
504                     if r.status_code == 200:
505                         response = r.json()
506                         if response['response_code'] == 0:
507                             needs_file_upload = True
508                         else:
509                             response['filename'] = filename
510                             response['packageName'] = packageName
511                             response['versionCode'] = package.get('versionCode')
512                             response['versionName'] = package.get('versionName')
513                             with open(outputfilename, 'w') as fp:
514                                 json.dump(response, fp, indent=2, sort_keys=True)
515
516                         if response.get('positives') > 0:
517                             logging.warning(repofilename + ' has been flagged by virustotal '
518                                             + str(response['positives']) + ' times:'
519                                             + '\n\t' + response['permalink'])
520                         break
521                     elif r.status_code == 204:
522                         time.sleep(10)  # wait for public API rate limiting
523
524                 if needs_file_upload:
525                     logging.info('Uploading ' + repofilename + ' to virustotal')
526                     files = {
527                         'file': (filename, open(repofilename, 'rb'))
528                     }
529                     r = requests.post('https://www.virustotal.com/vtapi/v2/file/scan',
530                                       params=params, headers=headers, files=files)
531                     response = r.json()
532
533                     logging.info(response['verbose_msg'] + " " + response['permalink'])
534
535
536 def push_binary_transparency(git_repo_path, git_remote):
537     '''push the binary transparency git repo to the specifed remote.
538
539     If the remote is a local directory, make sure it exists, and is a
540     git repo.  This is used to move this git repo from an offline
541     machine onto a flash drive, then onto the online machine. Also,
542     this pulls because pushing to a non-bare git repo is error prone.
543
544     This is also used in offline signing setups, where it then also
545     creates a "local copy dir" git repo that serves to shuttle the git
546     data from the offline machine to the online machine.  In that
547     case, git_remote is a dir on the local file system, e.g. a thumb
548     drive.
549
550     '''
551     import git
552
553     logging.info(_('Pushing binary transparency log to {url}')
554                  .format(url=git_remote))
555
556     if os.path.isdir(os.path.dirname(git_remote)):
557         # from offline machine to thumbdrive
558         remote_path = os.path.abspath(git_repo_path)
559         if not os.path.isdir(os.path.join(git_remote, '.git')):
560             os.makedirs(git_remote, exist_ok=True)
561             thumbdriverepo = git.Repo.init(git_remote)
562             local = thumbdriverepo.create_remote('local', remote_path)
563         else:
564             thumbdriverepo = git.Repo(git_remote)
565             local = git.remote.Remote(thumbdriverepo, 'local')
566             if local in thumbdriverepo.remotes:
567                 local = thumbdriverepo.remote('local')
568                 if 'set_url' in dir(local):  # force remote URL if using GitPython 2.x
569                     local.set_url(remote_path)
570             else:
571                 local = thumbdriverepo.create_remote('local', remote_path)
572         local.pull('master')
573     else:
574         # from online machine to remote on a server on the internet
575         gitrepo = git.Repo(git_repo_path)
576         origin = git.remote.Remote(gitrepo, 'origin')
577         if origin in gitrepo.remotes:
578             origin = gitrepo.remote('origin')
579             if 'set_url' in dir(origin):  # added in GitPython 2.x
580                 origin.set_url(git_remote)
581         else:
582             origin = gitrepo.create_remote('origin', git_remote)
583         origin.push('master')
584
585
586 def main():
587     global config, options
588
589     # Parse command line...
590     parser = ArgumentParser()
591     common.setup_global_opts(parser)
592     parser.add_argument("command", help=_("command to execute, either 'init' or 'update'"))
593     parser.add_argument("-i", "--identity-file", default=None,
594                         help=_("Specify an identity file to provide to SSH for rsyncing"))
595     parser.add_argument("--local-copy-dir", default=None,
596                         help=_("Specify a local folder to sync the repo to"))
597     parser.add_argument("--no-checksum", action="store_true", default=False,
598                         help=_("Don't use rsync checksums"))
599     options = parser.parse_args()
600
601     config = common.read_config(options)
602
603     if options.command != 'init' and options.command != 'update':
604         logging.critical(_("The only commands currently supported are 'init' and 'update'"))
605         sys.exit(1)
606
607     if config.get('nonstandardwebroot') is True:
608         standardwebroot = False
609     else:
610         standardwebroot = True
611
612     for serverwebroot in config.get('serverwebroot', []):
613         # this supports both an ssh host:path and just a path
614         s = serverwebroot.rstrip('/').split(':')
615         if len(s) == 1:
616             fdroiddir = s[0]
617         elif len(s) == 2:
618             host, fdroiddir = s
619         else:
620             logging.error(_('Malformed serverwebroot line:') + ' ' + serverwebroot)
621             sys.exit(1)
622         repobase = os.path.basename(fdroiddir)
623         if standardwebroot and repobase != 'fdroid':
624             logging.error('serverwebroot path does not end with "fdroid", '
625                           + 'perhaps you meant one of these:\n\t'
626                           + serverwebroot.rstrip('/') + '/fdroid\n\t'
627                           + serverwebroot.rstrip('/').rstrip(repobase) + 'fdroid')
628             sys.exit(1)
629
630     if options.local_copy_dir is not None:
631         local_copy_dir = options.local_copy_dir
632     elif config.get('local_copy_dir'):
633         local_copy_dir = config['local_copy_dir']
634     else:
635         local_copy_dir = None
636     if local_copy_dir is not None:
637         fdroiddir = local_copy_dir.rstrip('/')
638         if os.path.exists(fdroiddir) and not os.path.isdir(fdroiddir):
639             logging.error(_('local_copy_dir must be directory, not a file!'))
640             sys.exit(1)
641         if not os.path.exists(os.path.dirname(fdroiddir)):
642             logging.error(_('The root dir for local_copy_dir "{path}" does not exist!')
643                           .format(path=os.path.dirname(fdroiddir)))
644             sys.exit(1)
645         if not os.path.isabs(fdroiddir):
646             logging.error(_('local_copy_dir must be an absolute path!'))
647             sys.exit(1)
648         repobase = os.path.basename(fdroiddir)
649         if standardwebroot and repobase != 'fdroid':
650             logging.error(_('local_copy_dir does not end with "fdroid", '
651                             + 'perhaps you meant: "{path}"')
652                           .format(path=fdroiddir + '/fdroid'))
653             sys.exit(1)
654         if local_copy_dir[-1] != '/':
655             local_copy_dir += '/'
656         local_copy_dir = local_copy_dir.replace('//', '/')
657         if not os.path.exists(fdroiddir):
658             os.mkdir(fdroiddir)
659
660     if not config.get('awsbucket') \
661             and not config.get('serverwebroot') \
662             and not config.get('servergitmirrors') \
663             and not config.get('androidobservatory') \
664             and not config.get('binary_transparency_remote') \
665             and not config.get('virustotal_apikey') \
666             and local_copy_dir is None:
667         logging.warn(_('No option set! Edit your config.py to set at least one of these:')
668                      + '\nserverwebroot, servergitmirrors, local_copy_dir, awsbucket, virustotal_apikey, androidobservatory, or binary_transparency_remote')
669         sys.exit(1)
670
671     repo_sections = ['repo']
672     if config['archive_older'] != 0:
673         repo_sections.append('archive')
674         if not os.path.exists('archive'):
675             os.mkdir('archive')
676     if config['per_app_repos']:
677         repo_sections += common.get_per_app_repos()
678
679     if options.command == 'init':
680         ssh = paramiko.SSHClient()
681         ssh.load_system_host_keys()
682         for serverwebroot in config.get('serverwebroot', []):
683             sshstr, remotepath = serverwebroot.rstrip('/').split(':')
684             if sshstr.find('@') >= 0:
685                 username, hostname = sshstr.split('@')
686             else:
687                 username = pwd.getpwuid(os.getuid())[0]  # get effective uid
688                 hostname = sshstr
689             ssh.connect(hostname, username=username)
690             sftp = ssh.open_sftp()
691             if os.path.basename(remotepath) \
692                     not in sftp.listdir(os.path.dirname(remotepath)):
693                 sftp.mkdir(remotepath, mode=0o755)
694             for repo_section in repo_sections:
695                 repo_path = os.path.join(remotepath, repo_section)
696                 if os.path.basename(repo_path) \
697                         not in sftp.listdir(remotepath):
698                     sftp.mkdir(repo_path, mode=0o755)
699             sftp.close()
700             ssh.close()
701     elif options.command == 'update':
702         for repo_section in repo_sections:
703             if local_copy_dir is not None:
704                 if config['sync_from_local_copy_dir']:
705                     sync_from_localcopy(repo_section, local_copy_dir)
706                 else:
707                     update_localcopy(repo_section, local_copy_dir)
708             for serverwebroot in config.get('serverwebroot', []):
709                 update_serverwebroot(serverwebroot, repo_section)
710             if config.get('servergitmirrors', []):
711                 # update_servergitmirrors will take care of multiple mirrors so don't need a foreach
712                 servergitmirrors = config.get('servergitmirrors', [])
713                 update_servergitmirrors(servergitmirrors, repo_section)
714             if config.get('awsbucket'):
715                 update_awsbucket(repo_section)
716             if config.get('androidobservatory'):
717                 upload_to_android_observatory(repo_section)
718             if config.get('virustotal_apikey'):
719                 upload_to_virustotal(repo_section, config.get('virustotal_apikey'))
720
721             binary_transparency_remote = config.get('binary_transparency_remote')
722             if binary_transparency_remote:
723                 push_binary_transparency(BINARY_TRANSPARENCY_DIR,
724                                          binary_transparency_remote)
725
726     sys.exit(0)
727
728
729 if __name__ == "__main__":
730     main()