3 # server.py - part of the FDroid server tools
4 # Copyright (C) 2010-15, Ciaran Gultnieks, ciaran@ciarang.com
6 # This program is free software: you can redistribute it and/or modify
7 # it under the terms of the GNU Affero General Public License as published by
8 # the Free Software Foundation, either version 3 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU Affero General Public License for more details.
16 # You should have received a copy of the GNU Affero General Public License
17 # along with this program. If not, see <http://www.gnu.org/licenses/>.
28 from argparse import ArgumentParser
33 from .exception import FDroidException
38 BINARY_TRANSPARENCY_DIR = 'binary_transparency'
41 def update_awsbucket(repo_section):
43 Upload the contents of the directory `repo_section` (including
44 subdirectories) to the AWS S3 "bucket". The contents of that subdir of the
45 bucket will first be deleted.
47 Requires AWS credentials set in config.py: awsaccesskeyid, awssecretkey
50 logging.debug('Syncing "' + repo_section + '" to Amazon S3 bucket "'
51 + config['awsbucket'] + '"')
53 if common.set_command_in_config('s3cmd'):
54 update_awsbucket_s3cmd(repo_section)
56 update_awsbucket_libcloud(repo_section)
59 def update_awsbucket_s3cmd(repo_section):
60 '''upload using the CLI tool s3cmd, which provides rsync-like sync
62 The upload is done in multiple passes to reduce the chance of
63 interfering with an existing client-server interaction. In the
64 first pass, only new files are uploaded. In the second pass,
65 changed files are uploaded, overwriting what is on the server. On
66 the third/last pass, the indexes are uploaded, and any removed
67 files are deleted from the server. The last pass is the only pass
68 to use a full MD5 checksum of all files to detect changes.
71 logging.debug('using s3cmd to sync with ' + config['awsbucket'])
73 configfilename = '.s3cfg'
74 fd = os.open(configfilename, os.O_CREAT | os.O_TRUNC | os.O_WRONLY, 0o600)
75 os.write(fd, '[default]\n'.encode('utf-8'))
76 os.write(fd, ('access_key = ' + config['awsaccesskeyid'] + '\n').encode('utf-8'))
77 os.write(fd, ('secret_key = ' + config['awssecretkey'] + '\n').encode('utf-8'))
80 s3url = 's3://' + config['awsbucket'] + '/fdroid/'
84 '--config=' + configfilename,
88 s3cmdargs += ['--verbose']
90 s3cmdargs += ['--quiet']
91 indexxml = os.path.join(repo_section, 'index.xml')
92 indexjar = os.path.join(repo_section, 'index.jar')
93 indexv1jar = os.path.join(repo_section, 'index-v1.jar')
94 logging.debug('s3cmd sync new files in ' + repo_section + ' to ' + s3url)
95 if subprocess.call(s3cmdargs +
96 ['--no-check-md5', '--skip-existing',
97 '--exclude', indexxml,
98 '--exclude', indexjar,
99 '--exclude', indexv1jar,
100 repo_section, s3url]) != 0:
101 raise FDroidException()
102 logging.debug('s3cmd sync all files in ' + repo_section + ' to ' + s3url)
103 if subprocess.call(s3cmdargs +
105 '--exclude', indexxml,
106 '--exclude', indexjar,
107 '--exclude', indexv1jar,
108 repo_section, s3url]) != 0:
109 raise FDroidException()
111 logging.debug('s3cmd sync indexes ' + repo_section + ' to ' + s3url + ' and delete')
112 s3cmdargs.append('--delete-removed')
113 s3cmdargs.append('--delete-after')
114 if options.no_checksum:
115 s3cmdargs.append('--no-check-md5')
117 s3cmdargs.append('--check-md5')
118 if subprocess.call(s3cmdargs + [repo_section, s3url]) != 0:
119 raise FDroidException()
122 def update_awsbucket_libcloud(repo_section):
124 Upload the contents of the directory `repo_section` (including
125 subdirectories) to the AWS S3 "bucket". The contents of that subdir of the
126 bucket will first be deleted.
128 Requires AWS credentials set in config.py: awsaccesskeyid, awssecretkey
131 logging.debug('using Apache libcloud to sync with ' + config['awsbucket'])
133 import libcloud.security
134 libcloud.security.VERIFY_SSL_CERT = True
135 from libcloud.storage.types import Provider, ContainerDoesNotExistError
136 from libcloud.storage.providers import get_driver
138 if not config.get('awsaccesskeyid') or not config.get('awssecretkey'):
139 raise FDroidException(
140 'To use awsbucket, you must set awssecretkey and awsaccesskeyid in config.py!')
141 awsbucket = config['awsbucket']
143 cls = get_driver(Provider.S3)
144 driver = cls(config['awsaccesskeyid'], config['awssecretkey'])
146 container = driver.get_container(container_name=awsbucket)
147 except ContainerDoesNotExistError:
148 container = driver.create_container(container_name=awsbucket)
149 logging.info('Created new container "' + container.name + '"')
151 upload_dir = 'fdroid/' + repo_section
153 for obj in container.list_objects():
154 if obj.name.startswith(upload_dir + '/'):
157 for root, _, files in os.walk(os.path.join(os.getcwd(), repo_section)):
160 file_to_upload = os.path.join(root, name)
161 object_name = 'fdroid/' + os.path.relpath(file_to_upload, os.getcwd())
162 if object_name not in objs:
165 obj = objs.pop(object_name)
166 if obj.size != os.path.getsize(file_to_upload):
169 # if the sizes match, then compare by MD5
171 with open(file_to_upload, 'rb') as f:
177 if obj.hash != md5.hexdigest():
178 s3url = 's3://' + awsbucket + '/' + obj.name
179 logging.info(' deleting ' + s3url)
180 if not driver.delete_object(obj):
181 logging.warn('Could not delete ' + s3url)
185 logging.debug(' uploading "' + file_to_upload + '"...')
186 extra = {'acl': 'public-read'}
187 if file_to_upload.endswith('.sig'):
188 extra['content_type'] = 'application/pgp-signature'
189 elif file_to_upload.endswith('.asc'):
190 extra['content_type'] = 'application/pgp-signature'
191 logging.info(' uploading ' + os.path.relpath(file_to_upload)
192 + ' to s3://' + awsbucket + '/' + object_name)
193 with open(file_to_upload, 'rb') as iterator:
194 obj = driver.upload_object_via_stream(iterator=iterator,
196 object_name=object_name,
198 # delete the remnants in the bucket, they do not exist locally
200 object_name, obj = objs.popitem()
201 s3url = 's3://' + awsbucket + '/' + object_name
202 if object_name.startswith(upload_dir):
203 logging.warn(' deleting ' + s3url)
204 driver.delete_object(obj)
206 logging.info(' skipping ' + s3url)
209 def update_serverwebroot(serverwebroot, repo_section):
210 # use a checksum comparison for accurate comparisons on different
211 # filesystems, for example, FAT has a low resolution timestamp
212 rsyncargs = ['rsync', '--archive', '--delete-after', '--safe-links']
213 if not options.no_checksum:
214 rsyncargs.append('--checksum')
216 rsyncargs += ['--verbose']
218 rsyncargs += ['--quiet']
219 if options.identity_file is not None:
220 rsyncargs += ['-e', 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + options.identity_file]
221 elif 'identity_file' in config:
222 rsyncargs += ['-e', 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + config['identity_file']]
223 indexxml = os.path.join(repo_section, 'index.xml')
224 indexjar = os.path.join(repo_section, 'index.jar')
225 indexv1jar = os.path.join(repo_section, 'index-v1.jar')
226 # Upload the first time without the index files and delay the deletion as
227 # much as possible, that keeps the repo functional while this update is
228 # running. Then once it is complete, rerun the command again to upload
229 # the index files. Always using the same target with rsync allows for
230 # very strict settings on the receiving server, you can literally specify
231 # the one rsync command that is allowed to run in ~/.ssh/authorized_keys.
232 # (serverwebroot is guaranteed to have a trailing slash in common.py)
233 logging.info('rsyncing ' + repo_section + ' to ' + serverwebroot)
234 if subprocess.call(rsyncargs +
235 ['--exclude', indexxml, '--exclude', indexjar,
236 '--exclude', indexv1jar,
237 repo_section, serverwebroot]) != 0:
238 raise FDroidException()
239 if subprocess.call(rsyncargs + [repo_section, serverwebroot]) != 0:
240 raise FDroidException()
241 # upload "current version" symlinks if requested
242 if config['make_current_version_link'] and repo_section == 'repo':
244 for f in glob.glob('*.apk') \
245 + glob.glob('*.apk.asc') + glob.glob('*.apk.sig'):
246 if os.path.islink(f):
247 links_to_upload.append(f)
248 if len(links_to_upload) > 0:
249 if subprocess.call(rsyncargs + links_to_upload + [serverwebroot]) != 0:
250 raise FDroidException()
253 def _local_sync(fromdir, todir):
254 rsyncargs = ['rsync', '--recursive', '--safe-links', '--times', '--perms',
255 '--one-file-system', '--delete', '--chmod=Da+rx,Fa-x,a+r,u+w']
256 # use stricter rsync checking on all files since people using offline mode
257 # are already prioritizing security above ease and speed
258 if not options.no_checksum:
259 rsyncargs.append('--checksum')
261 rsyncargs += ['--verbose']
263 rsyncargs += ['--quiet']
264 logging.debug(' '.join(rsyncargs + [fromdir, todir]))
265 if subprocess.call(rsyncargs + [fromdir, todir]) != 0:
266 raise FDroidException()
269 def sync_from_localcopy(repo_section, local_copy_dir):
270 '''Syncs the repo from "local copy dir" filesystem to this box
272 In setups that use offline signing, this is the last step that
273 syncs the repo from the "local copy dir" e.g. a thumb drive to the
274 repo on the local filesystem. That local repo is then used to
275 push to all the servers that are configured.
278 logging.info('Syncing from local_copy_dir to this repo.')
279 # trailing slashes have a meaning in rsync which is not needed here, so
280 # make sure both paths have exactly one trailing slash
281 _local_sync(os.path.join(local_copy_dir, repo_section).rstrip('/') + '/',
282 repo_section.rstrip('/') + '/')
284 offline_copy = os.path.join(local_copy_dir, BINARY_TRANSPARENCY_DIR)
285 if os.path.exists(os.path.join(offline_copy, '.git')):
286 online_copy = os.path.join(os.getcwd(), BINARY_TRANSPARENCY_DIR)
287 push_binary_transparency(offline_copy, online_copy)
290 def update_localcopy(repo_section, local_copy_dir):
291 '''copy data from offline to the "local copy dir" filesystem
293 This updates the copy of this repo used to shuttle data from an
294 offline signing machine to the online machine, e.g. on a thumb
298 # local_copy_dir is guaranteed to have a trailing slash in main() below
299 _local_sync(repo_section, local_copy_dir)
301 offline_copy = os.path.join(os.getcwd(), BINARY_TRANSPARENCY_DIR)
302 if os.path.isdir(os.path.join(offline_copy, '.git')):
303 online_copy = os.path.join(local_copy_dir, BINARY_TRANSPARENCY_DIR)
304 push_binary_transparency(offline_copy, online_copy)
307 def _get_size(start_path='.'):
308 '''get size of all files in a dir https://stackoverflow.com/a/1392549'''
310 for dirpath, dirnames, filenames in os.walk(start_path):
312 fp = os.path.join(dirpath, f)
313 total_size += os.path.getsize(fp)
317 def update_servergitmirrors(servergitmirrors, repo_section):
318 '''update repo mirrors stored in git repos
320 This is a hack to use public git repos as F-Droid repos. It
321 recreates the git repo from scratch each time, so that there is no
322 history. That keeps the size of the git repo small. Services
323 like GitHub or GitLab have a size limit of something like 1 gig.
324 This git repo is only a git repo for the purpose of being hosted.
325 For history, there is the archive section, and there is the binary
330 from clint.textui import progress
331 if config.get('local_copy_dir') \
332 and not config.get('sync_from_local_copy_dir'):
333 logging.debug('Offline machine, skipping git mirror generation until `fdroid server update`')
336 # right now we support only 'repo' git-mirroring
337 if repo_section == 'repo':
338 git_mirror_path = 'git-mirror'
339 dotgit = os.path.join(git_mirror_path, '.git')
340 git_repodir = os.path.join(git_mirror_path, 'fdroid', repo_section)
341 if not os.path.isdir(git_repodir):
342 os.makedirs(git_repodir)
343 if os.path.isdir(dotgit) and _get_size(git_mirror_path) > 1000000000:
344 logging.warning('Deleting git-mirror history, repo is too big (1 gig max)')
345 shutil.rmtree(dotgit)
347 # rsync is very particular about trailing slashes
348 _local_sync(repo_section.rstrip('/') + '/', git_repodir.rstrip('/') + '/')
350 # use custom SSH command if identity_file specified
351 ssh_cmd = 'ssh -oBatchMode=yes'
352 if options.identity_file is not None:
353 ssh_cmd += ' -oIdentitiesOnly=yes -i "%s"' % options.identity_file
354 elif 'identity_file' in config:
355 ssh_cmd += ' -oIdentitiesOnly=yes -i "%s"' % config['identity_file']
357 repo = git.Repo.init(git_mirror_path)
359 for remote_url in servergitmirrors:
360 hostname = re.sub(r'\W*\w+\W+(\w+).*', r'\1', remote_url)
361 r = git.remote.Remote(repo, hostname)
362 if r in repo.remotes:
363 r = repo.remote(hostname)
364 if 'set_url' in dir(r): # force remote URL if using GitPython 2.x
365 r.set_url(remote_url)
367 repo.create_remote(hostname, remote_url)
368 logging.info('Mirroring to: ' + remote_url)
370 # sadly index.add don't allow the --all parameter
371 logging.debug('Adding all files to git mirror')
372 repo.git.add(all=True)
373 logging.debug('Committing all files into git mirror')
374 repo.index.commit("fdroidserver git-mirror")
379 class MyProgressPrinter(git.RemoteProgress):
380 def update(self, op_code, current, maximum=None, message=None):
381 if isinstance(maximum, float):
382 bar.show(current, maximum)
383 progress = MyProgressPrinter()
387 # push for every remote. This will overwrite the git history
388 for remote in repo.remotes:
389 if remote.name == 'gitlab':
390 logging.debug('Writing .gitlab-ci.yml to deploy to GitLab Pages')
391 with open(os.path.join(git_mirror_path, ".gitlab-ci.yml"), "wt") as out_file:
392 out_file.write("""pages:
402 repo.git.add(all=True)
403 repo.index.commit("fdroidserver git-mirror: Deploy to GitLab Pages")
405 logging.debug('Pushing to ' + remote.url)
406 with repo.git.custom_environment(GIT_SSH_COMMAND=ssh_cmd):
407 pushinfos = remote.push('master', force=True, set_upstream=True, progress=progress)
408 for pushinfo in pushinfos:
409 if pushinfo.flags & (git.remote.PushInfo.ERROR
410 | git.remote.PushInfo.REJECTED
411 | git.remote.PushInfo.REMOTE_FAILURE
412 | git.remote.PushInfo.REMOTE_REJECTED):
413 raise FDroidException(remote.url + ' push failed: ' + str(pushinfo.flags)
414 + ' ' + pushinfo.summary)
416 logging.debug(remote.url + ': ' + pushinfo.summary)
422 def upload_to_android_observatory(repo_section):
423 # depend on requests and lxml only if users enable AO
425 from lxml.html import fromstring
427 if repo_section == 'repo':
428 for f in glob.glob(os.path.join(repo_section, '*.apk')):
430 fname = os.path.basename(f)
431 logging.info('Uploading ' + fname + ' to androidobservatory.org')
433 # upload the file with a post request
434 r = requests.post('https://androidobservatory.org/upload', files={'apk': (fname, open(fpath, 'rb'))})
438 # from now on XPath will be used to retrieve the message in the HTML
439 # androidobservatory doesn't have a nice API to talk with
440 # so we must scrape the page content
441 tree = fromstring(response)
442 alert = tree.xpath("//html/body/div[@class='container content-container']/div[@class='alert alert-info']")[0]
447 # if the application was added successfully we retrive the url
448 # if the application was already uploaded we use the redirect page url
449 if el.attrib.get("href") is not None:
450 appurl = page + el.attrib["href"][1:]
451 message += el.text.replace(" here", "") + el.tail
454 message = message.strip() + " " + appurl
455 logging.info(message)
458 def upload_to_virustotal(repo_section, vt_apikey):
462 logging.getLogger("urllib3").setLevel(logging.WARNING)
463 logging.getLogger("requests").setLevel(logging.WARNING)
465 if repo_section == 'repo':
466 if not os.path.exists('virustotal'):
467 os.mkdir('virustotal')
468 with open(os.path.join(repo_section, 'index-v1.json')) as fp:
469 index = json.load(fp)
470 for packageName, packages in index['packages'].items():
471 for package in packages:
472 outputfilename = os.path.join('virustotal',
473 packageName + '_' + str(package.get('versionCode'))
474 + '_' + package['hash'] + '.json')
475 if os.path.exists(outputfilename):
476 logging.debug(package['apkName'] + ' results are in ' + outputfilename)
478 filename = package['apkName']
479 repofilename = os.path.join(repo_section, filename)
480 logging.info('Checking if ' + repofilename + ' is on virustotal')
483 "User-Agent": "F-Droid"
487 'resource': package['hash'],
489 needs_file_upload = False
491 r = requests.post('https://www.virustotal.com/vtapi/v2/file/report',
492 params=params, headers=headers)
493 if r.status_code == 200:
495 if response['response_code'] == 0:
496 needs_file_upload = True
498 response['filename'] = filename
499 response['packageName'] = packageName
500 response['versionCode'] = package.get('versionCode')
501 response['versionName'] = package.get('versionName')
502 with open(outputfilename, 'w') as fp:
503 json.dump(response, fp, indent=2, sort_keys=True)
505 if response.get('positives') > 0:
506 logging.warning(repofilename + ' has been flagged by virustotal '
507 + str(response['positives']) + ' times:'
508 + '\n\t' + response['permalink'])
510 elif r.status_code == 204:
511 time.sleep(10) # wait for public API rate limiting
513 if needs_file_upload:
514 logging.info('Uploading ' + repofilename + ' to virustotal')
516 'file': (filename, open(repofilename, 'rb'))
518 r = requests.post('https://www.virustotal.com/vtapi/v2/file/scan',
519 params=params, headers=headers, files=files)
522 logging.info(response['verbose_msg'] + " " + response['permalink'])
525 def push_binary_transparency(git_repo_path, git_remote):
526 '''push the binary transparency git repo to the specifed remote.
528 If the remote is a local directory, make sure it exists, and is a
529 git repo. This is used to move this git repo from an offline
530 machine onto a flash drive, then onto the online machine. Also,
531 this pulls because pushing to a non-bare git repo is error prone.
533 This is also used in offline signing setups, where it then also
534 creates a "local copy dir" git repo that serves to shuttle the git
535 data from the offline machine to the online machine. In that
536 case, git_remote is a dir on the local file system, e.g. a thumb
542 logging.info('Pushing binary transparency log to ' + git_remote)
544 if os.path.isdir(os.path.dirname(git_remote)):
545 # from offline machine to thumbdrive
546 remote_path = os.path.abspath(git_repo_path)
547 if not os.path.isdir(os.path.join(git_remote, '.git')):
548 os.makedirs(git_remote, exist_ok=True)
549 thumbdriverepo = git.Repo.init(git_remote)
550 local = thumbdriverepo.create_remote('local', remote_path)
552 thumbdriverepo = git.Repo(git_remote)
553 local = git.remote.Remote(thumbdriverepo, 'local')
554 if local in thumbdriverepo.remotes:
555 local = thumbdriverepo.remote('local')
556 if 'set_url' in dir(local): # force remote URL if using GitPython 2.x
557 local.set_url(remote_path)
559 local = thumbdriverepo.create_remote('local', remote_path)
562 # from online machine to remote on a server on the internet
563 gitrepo = git.Repo(git_repo_path)
564 origin = git.remote.Remote(gitrepo, 'origin')
565 if origin in gitrepo.remotes:
566 origin = gitrepo.remote('origin')
567 if 'set_url' in dir(origin): # added in GitPython 2.x
568 origin.set_url(git_remote)
570 origin = gitrepo.create_remote('origin', git_remote)
571 origin.push('master')
575 global config, options
577 # Parse command line...
578 parser = ArgumentParser()
579 common.setup_global_opts(parser)
580 parser.add_argument("command", help="command to execute, either 'init' or 'update'")
581 parser.add_argument("-i", "--identity-file", default=None,
582 help="Specify an identity file to provide to SSH for rsyncing")
583 parser.add_argument("--local-copy-dir", default=None,
584 help="Specify a local folder to sync the repo to")
585 parser.add_argument("--no-checksum", action="store_true", default=False,
586 help="Don't use rsync checksums")
587 options = parser.parse_args()
589 config = common.read_config(options)
591 if options.command != 'init' and options.command != 'update':
592 logging.critical("The only commands currently supported are 'init' and 'update'")
595 if config.get('nonstandardwebroot') is True:
596 standardwebroot = False
598 standardwebroot = True
600 for serverwebroot in config.get('serverwebroot', []):
601 # this supports both an ssh host:path and just a path
602 s = serverwebroot.rstrip('/').split(':')
608 logging.error('Malformed serverwebroot line: ' + serverwebroot)
610 repobase = os.path.basename(fdroiddir)
611 if standardwebroot and repobase != 'fdroid':
612 logging.error('serverwebroot path does not end with "fdroid", '
613 + 'perhaps you meant one of these:\n\t'
614 + serverwebroot.rstrip('/') + '/fdroid\n\t'
615 + serverwebroot.rstrip('/').rstrip(repobase) + 'fdroid')
618 if options.local_copy_dir is not None:
619 local_copy_dir = options.local_copy_dir
620 elif config.get('local_copy_dir'):
621 local_copy_dir = config['local_copy_dir']
623 local_copy_dir = None
624 if local_copy_dir is not None:
625 fdroiddir = local_copy_dir.rstrip('/')
626 if os.path.exists(fdroiddir) and not os.path.isdir(fdroiddir):
627 logging.error('local_copy_dir must be directory, not a file!')
629 if not os.path.exists(os.path.dirname(fdroiddir)):
630 logging.error('The root dir for local_copy_dir "'
631 + os.path.dirname(fdroiddir)
632 + '" does not exist!')
634 if not os.path.isabs(fdroiddir):
635 logging.error('local_copy_dir must be an absolute path!')
637 repobase = os.path.basename(fdroiddir)
638 if standardwebroot and repobase != 'fdroid':
639 logging.error('local_copy_dir does not end with "fdroid", '
640 + 'perhaps you meant: ' + fdroiddir + '/fdroid')
642 if local_copy_dir[-1] != '/':
643 local_copy_dir += '/'
644 local_copy_dir = local_copy_dir.replace('//', '/')
645 if not os.path.exists(fdroiddir):
648 if not config.get('awsbucket') \
649 and not config.get('serverwebroot') \
650 and not config.get('servergitmirrors') \
651 and not config.get('androidobservatory') \
652 and not config.get('binary_transparency_remote') \
653 and not config.get('virustotal_apikey') \
654 and local_copy_dir is None:
655 logging.warn('No option set! Edit your config.py to set at least one among:\n'
656 + 'serverwebroot, servergitmirrors, local_copy_dir, awsbucket, virustotal_apikey, androidobservatory, or binary_transparency_remote')
659 repo_sections = ['repo']
660 if config['archive_older'] != 0:
661 repo_sections.append('archive')
662 if not os.path.exists('archive'):
664 if config['per_app_repos']:
665 repo_sections += common.get_per_app_repos()
667 if options.command == 'init':
668 ssh = paramiko.SSHClient()
669 ssh.load_system_host_keys()
670 for serverwebroot in config.get('serverwebroot', []):
671 sshstr, remotepath = serverwebroot.rstrip('/').split(':')
672 if sshstr.find('@') >= 0:
673 username, hostname = sshstr.split('@')
675 username = pwd.getpwuid(os.getuid())[0] # get effective uid
677 ssh.connect(hostname, username=username)
678 sftp = ssh.open_sftp()
679 if os.path.basename(remotepath) \
680 not in sftp.listdir(os.path.dirname(remotepath)):
681 sftp.mkdir(remotepath, mode=0o755)
682 for repo_section in repo_sections:
683 repo_path = os.path.join(remotepath, repo_section)
684 if os.path.basename(repo_path) \
685 not in sftp.listdir(remotepath):
686 sftp.mkdir(repo_path, mode=0o755)
689 elif options.command == 'update':
690 for repo_section in repo_sections:
691 if local_copy_dir is not None:
692 if config['sync_from_local_copy_dir']:
693 sync_from_localcopy(repo_section, local_copy_dir)
695 update_localcopy(repo_section, local_copy_dir)
696 for serverwebroot in config.get('serverwebroot', []):
697 update_serverwebroot(serverwebroot, repo_section)
698 if config.get('servergitmirrors', []):
699 # update_servergitmirrors will take care of multiple mirrors so don't need a foreach
700 servergitmirrors = config.get('servergitmirrors', [])
701 update_servergitmirrors(servergitmirrors, repo_section)
702 if config.get('awsbucket'):
703 update_awsbucket(repo_section)
704 if config.get('androidobservatory'):
705 upload_to_android_observatory(repo_section)
706 if config.get('virustotal_apikey'):
707 upload_to_virustotal(repo_section, config.get('virustotal_apikey'))
709 binary_transparency_remote = config.get('binary_transparency_remote')
710 if binary_transparency_remote:
711 push_binary_transparency(BINARY_TRANSPARENCY_DIR,
712 binary_transparency_remote)
717 if __name__ == "__main__":