chiark / gitweb /
build: log vcs tools version on every build attempt
[fdroidserver.git] / fdroidserver / server.py
1 #!/usr/bin/env python3
2 #
3 # server.py - part of the FDroid server tools
4 # Copyright (C) 2010-15, Ciaran Gultnieks, ciaran@ciarang.com
5 #
6 # This program is free software: you can redistribute it and/or modify
7 # it under the terms of the GNU Affero General Public License as published by
8 # the Free Software Foundation, either version 3 of the License, or
9 # (at your option) any later version.
10 #
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 # GNU Affero General Public License for more details.
15 #
16 # You should have received a copy of the GNU Affero General Public License
17 # along with this program.  If not, see <http://www.gnu.org/licenses/>.
18
19 import sys
20 import glob
21 import hashlib
22 import os
23 import paramiko
24 import pwd
25 import re
26 import subprocess
27 import time
28 from argparse import ArgumentParser
29 import logging
30 import shutil
31
32 from . import _
33 from . import common
34 from .exception import FDroidException
35
36 config = None
37 options = None
38
39 BINARY_TRANSPARENCY_DIR = 'binary_transparency'
40
41
42 def update_awsbucket(repo_section):
43     '''
44     Upload the contents of the directory `repo_section` (including
45     subdirectories) to the AWS S3 "bucket". The contents of that subdir of the
46     bucket will first be deleted.
47
48     Requires AWS credentials set in config.py: awsaccesskeyid, awssecretkey
49     '''
50
51     logging.debug('Syncing "' + repo_section + '" to Amazon S3 bucket "'
52                   + config['awsbucket'] + '"')
53
54     if common.set_command_in_config('s3cmd'):
55         update_awsbucket_s3cmd(repo_section)
56     else:
57         update_awsbucket_libcloud(repo_section)
58
59
60 def update_awsbucket_s3cmd(repo_section):
61     '''upload using the CLI tool s3cmd, which provides rsync-like sync
62
63     The upload is done in multiple passes to reduce the chance of
64     interfering with an existing client-server interaction.  In the
65     first pass, only new files are uploaded.  In the second pass,
66     changed files are uploaded, overwriting what is on the server.  On
67     the third/last pass, the indexes are uploaded, and any removed
68     files are deleted from the server.  The last pass is the only pass
69     to use a full MD5 checksum of all files to detect changes.
70     '''
71
72     logging.debug(_('Using s3cmd to sync with: {url}')
73                   .format(url=config['awsbucket']))
74
75     configfilename = '.s3cfg'
76     fd = os.open(configfilename, os.O_CREAT | os.O_TRUNC | os.O_WRONLY, 0o600)
77     os.write(fd, '[default]\n'.encode('utf-8'))
78     os.write(fd, ('access_key = ' + config['awsaccesskeyid'] + '\n').encode('utf-8'))
79     os.write(fd, ('secret_key = ' + config['awssecretkey'] + '\n').encode('utf-8'))
80     os.close(fd)
81
82     s3bucketurl = 's3://' + config['awsbucket']
83     s3cmd = [config['s3cmd'], '--config=' + configfilename]
84     if subprocess.call(s3cmd + ['info', s3bucketurl]) != 0:
85         logging.warning(_('Creating new S3 bucket: {url}')
86                         .format(url=s3bucketurl))
87         if subprocess.call(s3cmd + ['mb', s3bucketurl]) != 0:
88             logging.error(_('Failed to create S3 bucket: {url}')
89                           .format(url=s3bucketurl))
90             raise FDroidException()
91
92     s3cmd_sync = s3cmd + ['sync', '--acl-public']
93     if options.verbose:
94         s3cmd_sync += ['--verbose']
95     if options.quiet:
96         s3cmd_sync += ['--quiet']
97     indexxml = os.path.join(repo_section, 'index.xml')
98     indexjar = os.path.join(repo_section, 'index.jar')
99     indexv1jar = os.path.join(repo_section, 'index-v1.jar')
100
101     s3url = s3bucketurl + '/fdroid/'
102     logging.debug('s3cmd sync new files in ' + repo_section + ' to ' + s3url)
103     logging.debug(_('Running first pass with MD5 checking disabled'))
104     if subprocess.call(s3cmd_sync +
105                        ['--no-check-md5', '--skip-existing',
106                         '--exclude', indexxml,
107                         '--exclude', indexjar,
108                         '--exclude', indexv1jar,
109                         repo_section, s3url]) != 0:
110         raise FDroidException()
111     logging.debug('s3cmd sync all files in ' + repo_section + ' to ' + s3url)
112     if subprocess.call(s3cmd_sync +
113                        ['--no-check-md5',
114                         '--exclude', indexxml,
115                         '--exclude', indexjar,
116                         '--exclude', indexv1jar,
117                         repo_section, s3url]) != 0:
118         raise FDroidException()
119
120     logging.debug(_('s3cmd sync indexes {path} to {url} and delete')
121                   .format(path=repo_section, url=s3url))
122     s3cmd_sync.append('--delete-removed')
123     s3cmd_sync.append('--delete-after')
124     if options.no_checksum:
125         s3cmd_sync.append('--no-check-md5')
126     else:
127         s3cmd_sync.append('--check-md5')
128     if subprocess.call(s3cmd_sync + [repo_section, s3url]) != 0:
129         raise FDroidException()
130
131
132 def update_awsbucket_libcloud(repo_section):
133     '''
134     Upload the contents of the directory `repo_section` (including
135     subdirectories) to the AWS S3 "bucket". The contents of that subdir of the
136     bucket will first be deleted.
137
138     Requires AWS credentials set in config.py: awsaccesskeyid, awssecretkey
139     '''
140
141     logging.debug(_('using Apache libcloud to sync with {url}')
142                   .format(url=config['awsbucket']))
143
144     import libcloud.security
145     libcloud.security.VERIFY_SSL_CERT = True
146     from libcloud.storage.types import Provider, ContainerDoesNotExistError
147     from libcloud.storage.providers import get_driver
148
149     if not config.get('awsaccesskeyid') or not config.get('awssecretkey'):
150         raise FDroidException(
151             _('To use awsbucket, awssecretkey and awsaccesskeyid must also be set in config.py!'))
152     awsbucket = config['awsbucket']
153
154     cls = get_driver(Provider.S3)
155     driver = cls(config['awsaccesskeyid'], config['awssecretkey'])
156     try:
157         container = driver.get_container(container_name=awsbucket)
158     except ContainerDoesNotExistError:
159         container = driver.create_container(container_name=awsbucket)
160         logging.info(_('Created new container "{name}"')
161                      .format(name=container.name))
162
163     upload_dir = 'fdroid/' + repo_section
164     objs = dict()
165     for obj in container.list_objects():
166         if obj.name.startswith(upload_dir + '/'):
167             objs[obj.name] = obj
168
169     for root, dirs, files in os.walk(os.path.join(os.getcwd(), repo_section)):
170         for name in files:
171             upload = False
172             file_to_upload = os.path.join(root, name)
173             object_name = 'fdroid/' + os.path.relpath(file_to_upload, os.getcwd())
174             if object_name not in objs:
175                 upload = True
176             else:
177                 obj = objs.pop(object_name)
178                 if obj.size != os.path.getsize(file_to_upload):
179                     upload = True
180                 else:
181                     # if the sizes match, then compare by MD5
182                     md5 = hashlib.md5()
183                     with open(file_to_upload, 'rb') as f:
184                         while True:
185                             data = f.read(8192)
186                             if not data:
187                                 break
188                             md5.update(data)
189                     if obj.hash != md5.hexdigest():
190                         s3url = 's3://' + awsbucket + '/' + obj.name
191                         logging.info(' deleting ' + s3url)
192                         if not driver.delete_object(obj):
193                             logging.warn('Could not delete ' + s3url)
194                         upload = True
195
196             if upload:
197                 logging.debug(' uploading "' + file_to_upload + '"...')
198                 extra = {'acl': 'public-read'}
199                 if file_to_upload.endswith('.sig'):
200                     extra['content_type'] = 'application/pgp-signature'
201                 elif file_to_upload.endswith('.asc'):
202                     extra['content_type'] = 'application/pgp-signature'
203                 logging.info(' uploading ' + os.path.relpath(file_to_upload)
204                              + ' to s3://' + awsbucket + '/' + object_name)
205                 with open(file_to_upload, 'rb') as iterator:
206                     obj = driver.upload_object_via_stream(iterator=iterator,
207                                                           container=container,
208                                                           object_name=object_name,
209                                                           extra=extra)
210     # delete the remnants in the bucket, they do not exist locally
211     while objs:
212         object_name, obj = objs.popitem()
213         s3url = 's3://' + awsbucket + '/' + object_name
214         if object_name.startswith(upload_dir):
215             logging.warn(' deleting ' + s3url)
216             driver.delete_object(obj)
217         else:
218             logging.info(' skipping ' + s3url)
219
220
221 def update_serverwebroot(serverwebroot, repo_section):
222     # use a checksum comparison for accurate comparisons on different
223     # filesystems, for example, FAT has a low resolution timestamp
224     rsyncargs = ['rsync', '--archive', '--delete-after', '--safe-links']
225     if not options.no_checksum:
226         rsyncargs.append('--checksum')
227     if options.verbose:
228         rsyncargs += ['--verbose']
229     if options.quiet:
230         rsyncargs += ['--quiet']
231     if options.identity_file is not None:
232         rsyncargs += ['-e', 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + options.identity_file]
233     elif 'identity_file' in config:
234         rsyncargs += ['-e', 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + config['identity_file']]
235     indexxml = os.path.join(repo_section, 'index.xml')
236     indexjar = os.path.join(repo_section, 'index.jar')
237     indexv1jar = os.path.join(repo_section, 'index-v1.jar')
238     # Upload the first time without the index files and delay the deletion as
239     # much as possible, that keeps the repo functional while this update is
240     # running.  Then once it is complete, rerun the command again to upload
241     # the index files.  Always using the same target with rsync allows for
242     # very strict settings on the receiving server, you can literally specify
243     # the one rsync command that is allowed to run in ~/.ssh/authorized_keys.
244     # (serverwebroot is guaranteed to have a trailing slash in common.py)
245     logging.info('rsyncing ' + repo_section + ' to ' + serverwebroot)
246     if subprocess.call(rsyncargs +
247                        ['--exclude', indexxml, '--exclude', indexjar,
248                         '--exclude', indexv1jar,
249                         repo_section, serverwebroot]) != 0:
250         raise FDroidException()
251     if subprocess.call(rsyncargs + [repo_section, serverwebroot]) != 0:
252         raise FDroidException()
253     # upload "current version" symlinks if requested
254     if config['make_current_version_link'] and repo_section == 'repo':
255         links_to_upload = []
256         for f in glob.glob('*.apk') \
257                 + glob.glob('*.apk.asc') + glob.glob('*.apk.sig'):
258             if os.path.islink(f):
259                 links_to_upload.append(f)
260         if len(links_to_upload) > 0:
261             if subprocess.call(rsyncargs + links_to_upload + [serverwebroot]) != 0:
262                 raise FDroidException()
263
264
265 def sync_from_localcopy(repo_section, local_copy_dir):
266     '''Syncs the repo from "local copy dir" filesystem to this box
267
268     In setups that use offline signing, this is the last step that
269     syncs the repo from the "local copy dir" e.g. a thumb drive to the
270     repo on the local filesystem.  That local repo is then used to
271     push to all the servers that are configured.
272
273     '''
274     logging.info('Syncing from local_copy_dir to this repo.')
275     # trailing slashes have a meaning in rsync which is not needed here, so
276     # make sure both paths have exactly one trailing slash
277     common.local_rsync(options,
278                        os.path.join(local_copy_dir, repo_section).rstrip('/') + '/',
279                        repo_section.rstrip('/') + '/')
280
281     offline_copy = os.path.join(local_copy_dir, BINARY_TRANSPARENCY_DIR)
282     if os.path.exists(os.path.join(offline_copy, '.git')):
283         online_copy = os.path.join(os.getcwd(), BINARY_TRANSPARENCY_DIR)
284         push_binary_transparency(offline_copy, online_copy)
285
286
287 def update_localcopy(repo_section, local_copy_dir):
288     '''copy data from offline to the "local copy dir" filesystem
289
290     This updates the copy of this repo used to shuttle data from an
291     offline signing machine to the online machine, e.g. on a thumb
292     drive.
293
294     '''
295     # local_copy_dir is guaranteed to have a trailing slash in main() below
296     common.local_rsync(options, repo_section, local_copy_dir)
297
298     offline_copy = os.path.join(os.getcwd(), BINARY_TRANSPARENCY_DIR)
299     if os.path.isdir(os.path.join(offline_copy, '.git')):
300         online_copy = os.path.join(local_copy_dir, BINARY_TRANSPARENCY_DIR)
301         push_binary_transparency(offline_copy, online_copy)
302
303
304 def _get_size(start_path='.'):
305     '''get size of all files in a dir https://stackoverflow.com/a/1392549'''
306     total_size = 0
307     for root, dirs, files in os.walk(start_path):
308         for f in files:
309             fp = os.path.join(root, f)
310             total_size += os.path.getsize(fp)
311     return total_size
312
313
314 def update_servergitmirrors(servergitmirrors, repo_section):
315     '''update repo mirrors stored in git repos
316
317     This is a hack to use public git repos as F-Droid repos.  It
318     recreates the git repo from scratch each time, so that there is no
319     history.  That keeps the size of the git repo small.  Services
320     like GitHub or GitLab have a size limit of something like 1 gig.
321     This git repo is only a git repo for the purpose of being hosted.
322     For history, there is the archive section, and there is the binary
323     transparency log.
324
325     '''
326     import git
327     from clint.textui import progress
328     if config.get('local_copy_dir') \
329        and not config.get('sync_from_local_copy_dir'):
330         logging.debug('Offline machine, skipping git mirror generation until `fdroid server update`')
331         return
332
333     # right now we support only 'repo' git-mirroring
334     if repo_section == 'repo':
335         git_mirror_path = 'git-mirror'
336         dotgit = os.path.join(git_mirror_path, '.git')
337         git_repodir = os.path.join(git_mirror_path, 'fdroid', repo_section)
338         if not os.path.isdir(git_repodir):
339             os.makedirs(git_repodir)
340         if os.path.isdir(dotgit) and _get_size(git_mirror_path) > 1000000000:
341             logging.warning('Deleting git-mirror history, repo is too big (1 gig max)')
342             shutil.rmtree(dotgit)
343
344         # rsync is very particular about trailing slashes
345         common.local_rsync(options,
346                            repo_section.rstrip('/') + '/',
347                            git_repodir.rstrip('/') + '/')
348
349         # use custom SSH command if identity_file specified
350         ssh_cmd = 'ssh -oBatchMode=yes'
351         if options.identity_file is not None:
352             ssh_cmd += ' -oIdentitiesOnly=yes -i "%s"' % options.identity_file
353         elif 'identity_file' in config:
354             ssh_cmd += ' -oIdentitiesOnly=yes -i "%s"' % config['identity_file']
355
356         repo = git.Repo.init(git_mirror_path)
357
358         for remote_url in servergitmirrors:
359             hostname = re.sub(r'\W*\w+\W+(\w+).*', r'\1', remote_url)
360             r = git.remote.Remote(repo, hostname)
361             if r in repo.remotes:
362                 r = repo.remote(hostname)
363                 if 'set_url' in dir(r):  # force remote URL if using GitPython 2.x
364                     r.set_url(remote_url)
365             else:
366                 repo.create_remote(hostname, remote_url)
367             logging.info('Mirroring to: ' + remote_url)
368
369         # sadly index.add don't allow the --all parameter
370         logging.debug('Adding all files to git mirror')
371         repo.git.add(all=True)
372         logging.debug('Committing all files into git mirror')
373         repo.index.commit("fdroidserver git-mirror")
374
375         if options.verbose:
376             bar = progress.Bar()
377
378             class MyProgressPrinter(git.RemoteProgress):
379                 def update(self, op_code, current, maximum=None, message=None):
380                     if isinstance(maximum, float):
381                         bar.show(current, maximum)
382             progress = MyProgressPrinter()
383         else:
384             progress = None
385
386         # push for every remote. This will overwrite the git history
387         for remote in repo.remotes:
388             if remote.name == 'gitlab':
389                 logging.debug('Writing .gitlab-ci.yml to deploy to GitLab Pages')
390                 with open(os.path.join(git_mirror_path, ".gitlab-ci.yml"), "wt") as out_file:
391                     out_file.write("""pages:
392   script:
393    - mkdir .public
394    - cp -r * .public/
395    - mv .public public
396   artifacts:
397     paths:
398     - public
399 """)
400
401                 repo.git.add(all=True)
402                 repo.index.commit("fdroidserver git-mirror: Deploy to GitLab Pages")
403
404             logging.debug(_('Pushing to {url}').format(url=remote.url))
405             with repo.git.custom_environment(GIT_SSH_COMMAND=ssh_cmd):
406                 pushinfos = remote.push('master', force=True, set_upstream=True, progress=progress)
407                 for pushinfo in pushinfos:
408                     if pushinfo.flags & (git.remote.PushInfo.ERROR
409                                          | git.remote.PushInfo.REJECTED
410                                          | git.remote.PushInfo.REMOTE_FAILURE
411                                          | git.remote.PushInfo.REMOTE_REJECTED):
412                         raise FDroidException(remote.url + ' push failed: ' + str(pushinfo.flags)
413                                               + ' ' + pushinfo.summary)
414                     else:
415                         logging.debug(remote.url + ': ' + pushinfo.summary)
416
417         if progress:
418             bar.done()
419
420
421 def upload_to_android_observatory(repo_section):
422     # depend on requests and lxml only if users enable AO
423     import requests
424     from lxml.html import fromstring
425
426     if repo_section == 'repo':
427         for f in glob.glob(os.path.join(repo_section, '*.apk')):
428             fpath = f
429             fname = os.path.basename(f)
430             logging.info('Uploading ' + fname + ' to androidobservatory.org')
431
432             # upload the file with a post request
433             r = requests.post('https://androidobservatory.org/upload', files={'apk': (fname, open(fpath, 'rb'))})
434             response = r.text
435             page = r.url
436
437             # from now on XPath will be used to retrieve the message in the HTML
438             # androidobservatory doesn't have a nice API to talk with
439             # so we must scrape the page content
440             tree = fromstring(response)
441             alert = tree.xpath("//html/body/div[@class='container content-container']/div[@class='alert alert-info']")[0]
442
443             message = ""
444             appurl = page
445             for el in alert:
446                 # if the application was added successfully we retrive the url
447                 # if the application was already uploaded we use the redirect page url
448                 if el.attrib.get("href") is not None:
449                     appurl = page + el.attrib["href"][1:]
450                     message += el.text.replace(" here", "") + el.tail
451                 else:
452                     message += el.tail
453             message = message.strip() + " " + appurl
454             logging.info(message)
455
456
457 def upload_to_virustotal(repo_section, vt_apikey):
458     import json
459     import requests
460
461     logging.getLogger("urllib3").setLevel(logging.WARNING)
462     logging.getLogger("requests").setLevel(logging.WARNING)
463
464     if repo_section == 'repo':
465         if not os.path.exists('virustotal'):
466             os.mkdir('virustotal')
467         with open(os.path.join(repo_section, 'index-v1.json')) as fp:
468             index = json.load(fp)
469         for packageName, packages in index['packages'].items():
470             for package in packages:
471                 outputfilename = os.path.join('virustotal',
472                                               packageName + '_' + str(package.get('versionCode'))
473                                               + '_' + package['hash'] + '.json')
474                 if os.path.exists(outputfilename):
475                     logging.debug(package['apkName'] + ' results are in ' + outputfilename)
476                     continue
477                 filename = package['apkName']
478                 repofilename = os.path.join(repo_section, filename)
479                 logging.info('Checking if ' + repofilename + ' is on virustotal')
480
481                 headers = {
482                     "User-Agent": "F-Droid"
483                 }
484                 params = {
485                     'apikey': vt_apikey,
486                     'resource': package['hash'],
487                 }
488                 needs_file_upload = False
489                 while True:
490                     r = requests.post('https://www.virustotal.com/vtapi/v2/file/report',
491                                       params=params, headers=headers)
492                     if r.status_code == 200:
493                         response = r.json()
494                         if response['response_code'] == 0:
495                             needs_file_upload = True
496                         else:
497                             response['filename'] = filename
498                             response['packageName'] = packageName
499                             response['versionCode'] = package.get('versionCode')
500                             response['versionName'] = package.get('versionName')
501                             with open(outputfilename, 'w') as fp:
502                                 json.dump(response, fp, indent=2, sort_keys=True)
503
504                         if response.get('positives') > 0:
505                             logging.warning(repofilename + ' has been flagged by virustotal '
506                                             + str(response['positives']) + ' times:'
507                                             + '\n\t' + response['permalink'])
508                         break
509                     elif r.status_code == 204:
510                         time.sleep(10)  # wait for public API rate limiting
511
512                 if needs_file_upload:
513                     logging.info('Uploading ' + repofilename + ' to virustotal')
514                     files = {
515                         'file': (filename, open(repofilename, 'rb'))
516                     }
517                     r = requests.post('https://www.virustotal.com/vtapi/v2/file/scan',
518                                       params=params, headers=headers, files=files)
519                     response = r.json()
520
521                     logging.info(response['verbose_msg'] + " " + response['permalink'])
522
523
524 def push_binary_transparency(git_repo_path, git_remote):
525     '''push the binary transparency git repo to the specifed remote.
526
527     If the remote is a local directory, make sure it exists, and is a
528     git repo.  This is used to move this git repo from an offline
529     machine onto a flash drive, then onto the online machine. Also,
530     this pulls because pushing to a non-bare git repo is error prone.
531
532     This is also used in offline signing setups, where it then also
533     creates a "local copy dir" git repo that serves to shuttle the git
534     data from the offline machine to the online machine.  In that
535     case, git_remote is a dir on the local file system, e.g. a thumb
536     drive.
537
538     '''
539     import git
540
541     logging.info(_('Pushing binary transparency log to {url}')
542                  .format(url=git_remote))
543
544     if os.path.isdir(os.path.dirname(git_remote)):
545         # from offline machine to thumbdrive
546         remote_path = os.path.abspath(git_repo_path)
547         if not os.path.isdir(os.path.join(git_remote, '.git')):
548             os.makedirs(git_remote, exist_ok=True)
549             thumbdriverepo = git.Repo.init(git_remote)
550             local = thumbdriverepo.create_remote('local', remote_path)
551         else:
552             thumbdriverepo = git.Repo(git_remote)
553             local = git.remote.Remote(thumbdriverepo, 'local')
554             if local in thumbdriverepo.remotes:
555                 local = thumbdriverepo.remote('local')
556                 if 'set_url' in dir(local):  # force remote URL if using GitPython 2.x
557                     local.set_url(remote_path)
558             else:
559                 local = thumbdriverepo.create_remote('local', remote_path)
560         local.pull('master')
561     else:
562         # from online machine to remote on a server on the internet
563         gitrepo = git.Repo(git_repo_path)
564         origin = git.remote.Remote(gitrepo, 'origin')
565         if origin in gitrepo.remotes:
566             origin = gitrepo.remote('origin')
567             if 'set_url' in dir(origin):  # added in GitPython 2.x
568                 origin.set_url(git_remote)
569         else:
570             origin = gitrepo.create_remote('origin', git_remote)
571         origin.push('master')
572
573
574 def main():
575     global config, options
576
577     # Parse command line...
578     parser = ArgumentParser()
579     common.setup_global_opts(parser)
580     parser.add_argument("command", help=_("command to execute, either 'init' or 'update'"))
581     parser.add_argument("-i", "--identity-file", default=None,
582                         help=_("Specify an identity file to provide to SSH for rsyncing"))
583     parser.add_argument("--local-copy-dir", default=None,
584                         help=_("Specify a local folder to sync the repo to"))
585     parser.add_argument("--no-checksum", action="store_true", default=False,
586                         help=_("Don't use rsync checksums"))
587     options = parser.parse_args()
588
589     config = common.read_config(options)
590
591     if options.command != 'init' and options.command != 'update':
592         logging.critical(_("The only commands currently supported are 'init' and 'update'"))
593         sys.exit(1)
594
595     if config.get('nonstandardwebroot') is True:
596         standardwebroot = False
597     else:
598         standardwebroot = True
599
600     for serverwebroot in config.get('serverwebroot', []):
601         # this supports both an ssh host:path and just a path
602         s = serverwebroot.rstrip('/').split(':')
603         if len(s) == 1:
604             fdroiddir = s[0]
605         elif len(s) == 2:
606             host, fdroiddir = s
607         else:
608             logging.error(_('Malformed serverwebroot line:') + ' ' + serverwebroot)
609             sys.exit(1)
610         repobase = os.path.basename(fdroiddir)
611         if standardwebroot and repobase != 'fdroid':
612             logging.error('serverwebroot path does not end with "fdroid", '
613                           + 'perhaps you meant one of these:\n\t'
614                           + serverwebroot.rstrip('/') + '/fdroid\n\t'
615                           + serverwebroot.rstrip('/').rstrip(repobase) + 'fdroid')
616             sys.exit(1)
617
618     if options.local_copy_dir is not None:
619         local_copy_dir = options.local_copy_dir
620     elif config.get('local_copy_dir'):
621         local_copy_dir = config['local_copy_dir']
622     else:
623         local_copy_dir = None
624     if local_copy_dir is not None:
625         fdroiddir = local_copy_dir.rstrip('/')
626         if os.path.exists(fdroiddir) and not os.path.isdir(fdroiddir):
627             logging.error(_('local_copy_dir must be directory, not a file!'))
628             sys.exit(1)
629         if not os.path.exists(os.path.dirname(fdroiddir)):
630             logging.error(_('The root dir for local_copy_dir "{path}" does not exist!')
631                           .format(path=os.path.dirname(fdroiddir)))
632             sys.exit(1)
633         if not os.path.isabs(fdroiddir):
634             logging.error(_('local_copy_dir must be an absolute path!'))
635             sys.exit(1)
636         repobase = os.path.basename(fdroiddir)
637         if standardwebroot and repobase != 'fdroid':
638             logging.error(_('local_copy_dir does not end with "fdroid", '
639                             + 'perhaps you meant: "{path}"')
640                           .format(path=fdroiddir + '/fdroid'))
641             sys.exit(1)
642         if local_copy_dir[-1] != '/':
643             local_copy_dir += '/'
644         local_copy_dir = local_copy_dir.replace('//', '/')
645         if not os.path.exists(fdroiddir):
646             os.mkdir(fdroiddir)
647
648     if not config.get('awsbucket') \
649             and not config.get('serverwebroot') \
650             and not config.get('servergitmirrors') \
651             and not config.get('androidobservatory') \
652             and not config.get('binary_transparency_remote') \
653             and not config.get('virustotal_apikey') \
654             and local_copy_dir is None:
655         logging.warn(_('No option set! Edit your config.py to set at least one of these:')
656                      + '\nserverwebroot, servergitmirrors, local_copy_dir, awsbucket, virustotal_apikey, androidobservatory, or binary_transparency_remote')
657         sys.exit(1)
658
659     repo_sections = ['repo']
660     if config['archive_older'] != 0:
661         repo_sections.append('archive')
662         if not os.path.exists('archive'):
663             os.mkdir('archive')
664     if config['per_app_repos']:
665         repo_sections += common.get_per_app_repos()
666
667     if options.command == 'init':
668         ssh = paramiko.SSHClient()
669         ssh.load_system_host_keys()
670         for serverwebroot in config.get('serverwebroot', []):
671             sshstr, remotepath = serverwebroot.rstrip('/').split(':')
672             if sshstr.find('@') >= 0:
673                 username, hostname = sshstr.split('@')
674             else:
675                 username = pwd.getpwuid(os.getuid())[0]  # get effective uid
676                 hostname = sshstr
677             ssh.connect(hostname, username=username)
678             sftp = ssh.open_sftp()
679             if os.path.basename(remotepath) \
680                     not in sftp.listdir(os.path.dirname(remotepath)):
681                 sftp.mkdir(remotepath, mode=0o755)
682             for repo_section in repo_sections:
683                 repo_path = os.path.join(remotepath, repo_section)
684                 if os.path.basename(repo_path) \
685                         not in sftp.listdir(remotepath):
686                     sftp.mkdir(repo_path, mode=0o755)
687             sftp.close()
688             ssh.close()
689     elif options.command == 'update':
690         for repo_section in repo_sections:
691             if local_copy_dir is not None:
692                 if config['sync_from_local_copy_dir']:
693                     sync_from_localcopy(repo_section, local_copy_dir)
694                 else:
695                     update_localcopy(repo_section, local_copy_dir)
696             for serverwebroot in config.get('serverwebroot', []):
697                 update_serverwebroot(serverwebroot, repo_section)
698             if config.get('servergitmirrors', []):
699                 # update_servergitmirrors will take care of multiple mirrors so don't need a foreach
700                 servergitmirrors = config.get('servergitmirrors', [])
701                 update_servergitmirrors(servergitmirrors, repo_section)
702             if config.get('awsbucket'):
703                 update_awsbucket(repo_section)
704             if config.get('androidobservatory'):
705                 upload_to_android_observatory(repo_section)
706             if config.get('virustotal_apikey'):
707                 upload_to_virustotal(repo_section, config.get('virustotal_apikey'))
708
709             binary_transparency_remote = config.get('binary_transparency_remote')
710             if binary_transparency_remote:
711                 push_binary_transparency(BINARY_TRANSPARENCY_DIR,
712                                          binary_transparency_remote)
713
714     sys.exit(0)
715
716
717 if __name__ == "__main__":
718     main()