3 # server.py - part of the FDroid server tools
4 # Copyright (C) 2010-15, Ciaran Gultnieks, ciaran@ciarang.com
6 # This program is free software: you can redistribute it and/or modify
7 # it under the terms of the GNU Affero General Public License as published by
8 # the Free Software Foundation, either version 3 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU Affero General Public License for more details.
16 # You should have received a copy of the GNU Affero General Public License
17 # along with this program. If not, see <http://www.gnu.org/licenses/>.
28 from argparse import ArgumentParser
34 from .exception import FDroidException
39 BINARY_TRANSPARENCY_DIR = 'binary_transparency'
42 def update_awsbucket(repo_section):
44 Upload the contents of the directory `repo_section` (including
45 subdirectories) to the AWS S3 "bucket". The contents of that subdir of the
46 bucket will first be deleted.
48 Requires AWS credentials set in config.py: awsaccesskeyid, awssecretkey
51 logging.debug('Syncing "' + repo_section + '" to Amazon S3 bucket "'
52 + config['awsbucket'] + '"')
54 if common.set_command_in_config('s3cmd'):
55 update_awsbucket_s3cmd(repo_section)
57 update_awsbucket_libcloud(repo_section)
60 def update_awsbucket_s3cmd(repo_section):
61 '''upload using the CLI tool s3cmd, which provides rsync-like sync
63 The upload is done in multiple passes to reduce the chance of
64 interfering with an existing client-server interaction. In the
65 first pass, only new files are uploaded. In the second pass,
66 changed files are uploaded, overwriting what is on the server. On
67 the third/last pass, the indexes are uploaded, and any removed
68 files are deleted from the server. The last pass is the only pass
69 to use a full MD5 checksum of all files to detect changes.
72 logging.debug('using s3cmd to sync with ' + config['awsbucket'])
74 configfilename = '.s3cfg'
75 fd = os.open(configfilename, os.O_CREAT | os.O_TRUNC | os.O_WRONLY, 0o600)
76 os.write(fd, '[default]\n'.encode('utf-8'))
77 os.write(fd, ('access_key = ' + config['awsaccesskeyid'] + '\n').encode('utf-8'))
78 os.write(fd, ('secret_key = ' + config['awssecretkey'] + '\n').encode('utf-8'))
81 s3url = 's3://' + config['awsbucket'] + '/fdroid/'
85 '--config=' + configfilename,
89 s3cmdargs += ['--verbose']
91 s3cmdargs += ['--quiet']
92 indexxml = os.path.join(repo_section, 'index.xml')
93 indexjar = os.path.join(repo_section, 'index.jar')
94 indexv1jar = os.path.join(repo_section, 'index-v1.jar')
95 logging.debug('s3cmd sync new files in ' + repo_section + ' to ' + s3url)
96 if subprocess.call(s3cmdargs +
97 ['--no-check-md5', '--skip-existing',
98 '--exclude', indexxml,
99 '--exclude', indexjar,
100 '--exclude', indexv1jar,
101 repo_section, s3url]) != 0:
102 raise FDroidException()
103 logging.debug('s3cmd sync all files in ' + repo_section + ' to ' + s3url)
104 if subprocess.call(s3cmdargs +
106 '--exclude', indexxml,
107 '--exclude', indexjar,
108 '--exclude', indexv1jar,
109 repo_section, s3url]) != 0:
110 raise FDroidException()
112 logging.debug('s3cmd sync indexes ' + repo_section + ' to ' + s3url + ' and delete')
113 s3cmdargs.append('--delete-removed')
114 s3cmdargs.append('--delete-after')
115 if options.no_checksum:
116 s3cmdargs.append('--no-check-md5')
118 s3cmdargs.append('--check-md5')
119 if subprocess.call(s3cmdargs + [repo_section, s3url]) != 0:
120 raise FDroidException()
123 def update_awsbucket_libcloud(repo_section):
125 Upload the contents of the directory `repo_section` (including
126 subdirectories) to the AWS S3 "bucket". The contents of that subdir of the
127 bucket will first be deleted.
129 Requires AWS credentials set in config.py: awsaccesskeyid, awssecretkey
132 logging.debug('using Apache libcloud to sync with ' + config['awsbucket'])
134 import libcloud.security
135 libcloud.security.VERIFY_SSL_CERT = True
136 from libcloud.storage.types import Provider, ContainerDoesNotExistError
137 from libcloud.storage.providers import get_driver
139 if not config.get('awsaccesskeyid') or not config.get('awssecretkey'):
140 raise FDroidException(
141 'To use awsbucket, you must set awssecretkey and awsaccesskeyid in config.py!')
142 awsbucket = config['awsbucket']
144 cls = get_driver(Provider.S3)
145 driver = cls(config['awsaccesskeyid'], config['awssecretkey'])
147 container = driver.get_container(container_name=awsbucket)
148 except ContainerDoesNotExistError:
149 container = driver.create_container(container_name=awsbucket)
150 logging.info('Created new container "' + container.name + '"')
152 upload_dir = 'fdroid/' + repo_section
154 for obj in container.list_objects():
155 if obj.name.startswith(upload_dir + '/'):
158 for root, dirs, files in os.walk(os.path.join(os.getcwd(), repo_section)):
161 file_to_upload = os.path.join(root, name)
162 object_name = 'fdroid/' + os.path.relpath(file_to_upload, os.getcwd())
163 if object_name not in objs:
166 obj = objs.pop(object_name)
167 if obj.size != os.path.getsize(file_to_upload):
170 # if the sizes match, then compare by MD5
172 with open(file_to_upload, 'rb') as f:
178 if obj.hash != md5.hexdigest():
179 s3url = 's3://' + awsbucket + '/' + obj.name
180 logging.info(' deleting ' + s3url)
181 if not driver.delete_object(obj):
182 logging.warn('Could not delete ' + s3url)
186 logging.debug(' uploading "' + file_to_upload + '"...')
187 extra = {'acl': 'public-read'}
188 if file_to_upload.endswith('.sig'):
189 extra['content_type'] = 'application/pgp-signature'
190 elif file_to_upload.endswith('.asc'):
191 extra['content_type'] = 'application/pgp-signature'
192 logging.info(' uploading ' + os.path.relpath(file_to_upload)
193 + ' to s3://' + awsbucket + '/' + object_name)
194 with open(file_to_upload, 'rb') as iterator:
195 obj = driver.upload_object_via_stream(iterator=iterator,
197 object_name=object_name,
199 # delete the remnants in the bucket, they do not exist locally
201 object_name, obj = objs.popitem()
202 s3url = 's3://' + awsbucket + '/' + object_name
203 if object_name.startswith(upload_dir):
204 logging.warn(' deleting ' + s3url)
205 driver.delete_object(obj)
207 logging.info(' skipping ' + s3url)
210 def update_serverwebroot(serverwebroot, repo_section):
211 # use a checksum comparison for accurate comparisons on different
212 # filesystems, for example, FAT has a low resolution timestamp
213 rsyncargs = ['rsync', '--archive', '--delete-after', '--safe-links']
214 if not options.no_checksum:
215 rsyncargs.append('--checksum')
217 rsyncargs += ['--verbose']
219 rsyncargs += ['--quiet']
220 if options.identity_file is not None:
221 rsyncargs += ['-e', 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + options.identity_file]
222 elif 'identity_file' in config:
223 rsyncargs += ['-e', 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + config['identity_file']]
224 indexxml = os.path.join(repo_section, 'index.xml')
225 indexjar = os.path.join(repo_section, 'index.jar')
226 indexv1jar = os.path.join(repo_section, 'index-v1.jar')
227 # Upload the first time without the index files and delay the deletion as
228 # much as possible, that keeps the repo functional while this update is
229 # running. Then once it is complete, rerun the command again to upload
230 # the index files. Always using the same target with rsync allows for
231 # very strict settings on the receiving server, you can literally specify
232 # the one rsync command that is allowed to run in ~/.ssh/authorized_keys.
233 # (serverwebroot is guaranteed to have a trailing slash in common.py)
234 logging.info('rsyncing ' + repo_section + ' to ' + serverwebroot)
235 if subprocess.call(rsyncargs +
236 ['--exclude', indexxml, '--exclude', indexjar,
237 '--exclude', indexv1jar,
238 repo_section, serverwebroot]) != 0:
239 raise FDroidException()
240 if subprocess.call(rsyncargs + [repo_section, serverwebroot]) != 0:
241 raise FDroidException()
242 # upload "current version" symlinks if requested
243 if config['make_current_version_link'] and repo_section == 'repo':
245 for f in glob.glob('*.apk') \
246 + glob.glob('*.apk.asc') + glob.glob('*.apk.sig'):
247 if os.path.islink(f):
248 links_to_upload.append(f)
249 if len(links_to_upload) > 0:
250 if subprocess.call(rsyncargs + links_to_upload + [serverwebroot]) != 0:
251 raise FDroidException()
254 def _local_sync(fromdir, todir):
255 rsyncargs = ['rsync', '--recursive', '--safe-links', '--times', '--perms',
256 '--one-file-system', '--delete', '--chmod=Da+rx,Fa-x,a+r,u+w']
257 # use stricter rsync checking on all files since people using offline mode
258 # are already prioritizing security above ease and speed
259 if not options.no_checksum:
260 rsyncargs.append('--checksum')
262 rsyncargs += ['--verbose']
264 rsyncargs += ['--quiet']
265 logging.debug(' '.join(rsyncargs + [fromdir, todir]))
266 if subprocess.call(rsyncargs + [fromdir, todir]) != 0:
267 raise FDroidException()
270 def sync_from_localcopy(repo_section, local_copy_dir):
271 '''Syncs the repo from "local copy dir" filesystem to this box
273 In setups that use offline signing, this is the last step that
274 syncs the repo from the "local copy dir" e.g. a thumb drive to the
275 repo on the local filesystem. That local repo is then used to
276 push to all the servers that are configured.
279 logging.info('Syncing from local_copy_dir to this repo.')
280 # trailing slashes have a meaning in rsync which is not needed here, so
281 # make sure both paths have exactly one trailing slash
282 _local_sync(os.path.join(local_copy_dir, repo_section).rstrip('/') + '/',
283 repo_section.rstrip('/') + '/')
285 offline_copy = os.path.join(local_copy_dir, BINARY_TRANSPARENCY_DIR)
286 if os.path.exists(os.path.join(offline_copy, '.git')):
287 online_copy = os.path.join(os.getcwd(), BINARY_TRANSPARENCY_DIR)
288 push_binary_transparency(offline_copy, online_copy)
291 def update_localcopy(repo_section, local_copy_dir):
292 '''copy data from offline to the "local copy dir" filesystem
294 This updates the copy of this repo used to shuttle data from an
295 offline signing machine to the online machine, e.g. on a thumb
299 # local_copy_dir is guaranteed to have a trailing slash in main() below
300 _local_sync(repo_section, local_copy_dir)
302 offline_copy = os.path.join(os.getcwd(), BINARY_TRANSPARENCY_DIR)
303 if os.path.isdir(os.path.join(offline_copy, '.git')):
304 online_copy = os.path.join(local_copy_dir, BINARY_TRANSPARENCY_DIR)
305 push_binary_transparency(offline_copy, online_copy)
308 def _get_size(start_path='.'):
309 '''get size of all files in a dir https://stackoverflow.com/a/1392549'''
311 for root, dirs, files in os.walk(start_path):
313 fp = os.path.join(root, f)
314 total_size += os.path.getsize(fp)
318 def update_servergitmirrors(servergitmirrors, repo_section):
319 '''update repo mirrors stored in git repos
321 This is a hack to use public git repos as F-Droid repos. It
322 recreates the git repo from scratch each time, so that there is no
323 history. That keeps the size of the git repo small. Services
324 like GitHub or GitLab have a size limit of something like 1 gig.
325 This git repo is only a git repo for the purpose of being hosted.
326 For history, there is the archive section, and there is the binary
331 from clint.textui import progress
332 if config.get('local_copy_dir') \
333 and not config.get('sync_from_local_copy_dir'):
334 logging.debug('Offline machine, skipping git mirror generation until `fdroid server update`')
337 # right now we support only 'repo' git-mirroring
338 if repo_section == 'repo':
339 git_mirror_path = 'git-mirror'
340 dotgit = os.path.join(git_mirror_path, '.git')
341 git_repodir = os.path.join(git_mirror_path, 'fdroid', repo_section)
342 if not os.path.isdir(git_repodir):
343 os.makedirs(git_repodir)
344 if os.path.isdir(dotgit) and _get_size(git_mirror_path) > 1000000000:
345 logging.warning('Deleting git-mirror history, repo is too big (1 gig max)')
346 shutil.rmtree(dotgit)
348 # rsync is very particular about trailing slashes
349 _local_sync(repo_section.rstrip('/') + '/', git_repodir.rstrip('/') + '/')
351 # use custom SSH command if identity_file specified
352 ssh_cmd = 'ssh -oBatchMode=yes'
353 if options.identity_file is not None:
354 ssh_cmd += ' -oIdentitiesOnly=yes -i "%s"' % options.identity_file
355 elif 'identity_file' in config:
356 ssh_cmd += ' -oIdentitiesOnly=yes -i "%s"' % config['identity_file']
358 repo = git.Repo.init(git_mirror_path)
360 for remote_url in servergitmirrors:
361 hostname = re.sub(r'\W*\w+\W+(\w+).*', r'\1', remote_url)
362 r = git.remote.Remote(repo, hostname)
363 if r in repo.remotes:
364 r = repo.remote(hostname)
365 if 'set_url' in dir(r): # force remote URL if using GitPython 2.x
366 r.set_url(remote_url)
368 repo.create_remote(hostname, remote_url)
369 logging.info('Mirroring to: ' + remote_url)
371 # sadly index.add don't allow the --all parameter
372 logging.debug('Adding all files to git mirror')
373 repo.git.add(all=True)
374 logging.debug('Committing all files into git mirror')
375 repo.index.commit("fdroidserver git-mirror")
380 class MyProgressPrinter(git.RemoteProgress):
381 def update(self, op_code, current, maximum=None, message=None):
382 if isinstance(maximum, float):
383 bar.show(current, maximum)
384 progress = MyProgressPrinter()
388 # push for every remote. This will overwrite the git history
389 for remote in repo.remotes:
390 if remote.name == 'gitlab':
391 logging.debug('Writing .gitlab-ci.yml to deploy to GitLab Pages')
392 with open(os.path.join(git_mirror_path, ".gitlab-ci.yml"), "wt") as out_file:
393 out_file.write("""pages:
403 repo.git.add(all=True)
404 repo.index.commit("fdroidserver git-mirror: Deploy to GitLab Pages")
406 logging.debug('Pushing to ' + remote.url)
407 with repo.git.custom_environment(GIT_SSH_COMMAND=ssh_cmd):
408 pushinfos = remote.push('master', force=True, set_upstream=True, progress=progress)
409 for pushinfo in pushinfos:
410 if pushinfo.flags & (git.remote.PushInfo.ERROR
411 | git.remote.PushInfo.REJECTED
412 | git.remote.PushInfo.REMOTE_FAILURE
413 | git.remote.PushInfo.REMOTE_REJECTED):
414 raise FDroidException(remote.url + ' push failed: ' + str(pushinfo.flags)
415 + ' ' + pushinfo.summary)
417 logging.debug(remote.url + ': ' + pushinfo.summary)
423 def upload_to_android_observatory(repo_section):
424 # depend on requests and lxml only if users enable AO
426 from lxml.html import fromstring
428 if repo_section == 'repo':
429 for f in glob.glob(os.path.join(repo_section, '*.apk')):
431 fname = os.path.basename(f)
432 logging.info('Uploading ' + fname + ' to androidobservatory.org')
434 # upload the file with a post request
435 r = requests.post('https://androidobservatory.org/upload', files={'apk': (fname, open(fpath, 'rb'))})
439 # from now on XPath will be used to retrieve the message in the HTML
440 # androidobservatory doesn't have a nice API to talk with
441 # so we must scrape the page content
442 tree = fromstring(response)
443 alert = tree.xpath("//html/body/div[@class='container content-container']/div[@class='alert alert-info']")[0]
448 # if the application was added successfully we retrive the url
449 # if the application was already uploaded we use the redirect page url
450 if el.attrib.get("href") is not None:
451 appurl = page + el.attrib["href"][1:]
452 message += el.text.replace(" here", "") + el.tail
455 message = message.strip() + " " + appurl
456 logging.info(message)
459 def upload_to_virustotal(repo_section, vt_apikey):
463 logging.getLogger("urllib3").setLevel(logging.WARNING)
464 logging.getLogger("requests").setLevel(logging.WARNING)
466 if repo_section == 'repo':
467 if not os.path.exists('virustotal'):
468 os.mkdir('virustotal')
469 with open(os.path.join(repo_section, 'index-v1.json')) as fp:
470 index = json.load(fp)
471 for packageName, packages in index['packages'].items():
472 for package in packages:
473 outputfilename = os.path.join('virustotal',
474 packageName + '_' + str(package.get('versionCode'))
475 + '_' + package['hash'] + '.json')
476 if os.path.exists(outputfilename):
477 logging.debug(package['apkName'] + ' results are in ' + outputfilename)
479 filename = package['apkName']
480 repofilename = os.path.join(repo_section, filename)
481 logging.info('Checking if ' + repofilename + ' is on virustotal')
484 "User-Agent": "F-Droid"
488 'resource': package['hash'],
490 needs_file_upload = False
492 r = requests.post('https://www.virustotal.com/vtapi/v2/file/report',
493 params=params, headers=headers)
494 if r.status_code == 200:
496 if response['response_code'] == 0:
497 needs_file_upload = True
499 response['filename'] = filename
500 response['packageName'] = packageName
501 response['versionCode'] = package.get('versionCode')
502 response['versionName'] = package.get('versionName')
503 with open(outputfilename, 'w') as fp:
504 json.dump(response, fp, indent=2, sort_keys=True)
506 if response.get('positives') > 0:
507 logging.warning(repofilename + ' has been flagged by virustotal '
508 + str(response['positives']) + ' times:'
509 + '\n\t' + response['permalink'])
511 elif r.status_code == 204:
512 time.sleep(10) # wait for public API rate limiting
514 if needs_file_upload:
515 logging.info('Uploading ' + repofilename + ' to virustotal')
517 'file': (filename, open(repofilename, 'rb'))
519 r = requests.post('https://www.virustotal.com/vtapi/v2/file/scan',
520 params=params, headers=headers, files=files)
523 logging.info(response['verbose_msg'] + " " + response['permalink'])
526 def push_binary_transparency(git_repo_path, git_remote):
527 '''push the binary transparency git repo to the specifed remote.
529 If the remote is a local directory, make sure it exists, and is a
530 git repo. This is used to move this git repo from an offline
531 machine onto a flash drive, then onto the online machine. Also,
532 this pulls because pushing to a non-bare git repo is error prone.
534 This is also used in offline signing setups, where it then also
535 creates a "local copy dir" git repo that serves to shuttle the git
536 data from the offline machine to the online machine. In that
537 case, git_remote is a dir on the local file system, e.g. a thumb
543 logging.info('Pushing binary transparency log to ' + git_remote)
545 if os.path.isdir(os.path.dirname(git_remote)):
546 # from offline machine to thumbdrive
547 remote_path = os.path.abspath(git_repo_path)
548 if not os.path.isdir(os.path.join(git_remote, '.git')):
549 os.makedirs(git_remote, exist_ok=True)
550 thumbdriverepo = git.Repo.init(git_remote)
551 local = thumbdriverepo.create_remote('local', remote_path)
553 thumbdriverepo = git.Repo(git_remote)
554 local = git.remote.Remote(thumbdriverepo, 'local')
555 if local in thumbdriverepo.remotes:
556 local = thumbdriverepo.remote('local')
557 if 'set_url' in dir(local): # force remote URL if using GitPython 2.x
558 local.set_url(remote_path)
560 local = thumbdriverepo.create_remote('local', remote_path)
563 # from online machine to remote on a server on the internet
564 gitrepo = git.Repo(git_repo_path)
565 origin = git.remote.Remote(gitrepo, 'origin')
566 if origin in gitrepo.remotes:
567 origin = gitrepo.remote('origin')
568 if 'set_url' in dir(origin): # added in GitPython 2.x
569 origin.set_url(git_remote)
571 origin = gitrepo.create_remote('origin', git_remote)
572 origin.push('master')
576 global config, options
578 # Parse command line...
579 parser = ArgumentParser()
580 common.setup_global_opts(parser)
581 parser.add_argument("command", help=_("command to execute, either 'init' or 'update'"))
582 parser.add_argument("-i", "--identity-file", default=None,
583 help=_("Specify an identity file to provide to SSH for rsyncing"))
584 parser.add_argument("--local-copy-dir", default=None,
585 help=_("Specify a local folder to sync the repo to"))
586 parser.add_argument("--no-checksum", action="store_true", default=False,
587 help=_("Don't use rsync checksums"))
588 options = parser.parse_args()
590 config = common.read_config(options)
592 if options.command != 'init' and options.command != 'update':
593 logging.critical(_("The only commands currently supported are 'init' and 'update'"))
596 if config.get('nonstandardwebroot') is True:
597 standardwebroot = False
599 standardwebroot = True
601 for serverwebroot in config.get('serverwebroot', []):
602 # this supports both an ssh host:path and just a path
603 s = serverwebroot.rstrip('/').split(':')
609 logging.error(_('Malformed serverwebroot line:') + ' ' + serverwebroot)
611 repobase = os.path.basename(fdroiddir)
612 if standardwebroot and repobase != 'fdroid':
613 logging.error('serverwebroot path does not end with "fdroid", '
614 + 'perhaps you meant one of these:\n\t'
615 + serverwebroot.rstrip('/') + '/fdroid\n\t'
616 + serverwebroot.rstrip('/').rstrip(repobase) + 'fdroid')
619 if options.local_copy_dir is not None:
620 local_copy_dir = options.local_copy_dir
621 elif config.get('local_copy_dir'):
622 local_copy_dir = config['local_copy_dir']
624 local_copy_dir = None
625 if local_copy_dir is not None:
626 fdroiddir = local_copy_dir.rstrip('/')
627 if os.path.exists(fdroiddir) and not os.path.isdir(fdroiddir):
628 logging.error(_('local_copy_dir must be directory, not a file!'))
630 if not os.path.exists(os.path.dirname(fdroiddir)):
631 logging.error('The root dir for local_copy_dir "'
632 + os.path.dirname(fdroiddir)
633 + '" does not exist!')
635 if not os.path.isabs(fdroiddir):
636 logging.error(_('local_copy_dir must be an absolute path!'))
638 repobase = os.path.basename(fdroiddir)
639 if standardwebroot and repobase != 'fdroid':
640 logging.error('local_copy_dir does not end with "fdroid", '
641 + 'perhaps you meant: ' + fdroiddir + '/fdroid')
643 if local_copy_dir[-1] != '/':
644 local_copy_dir += '/'
645 local_copy_dir = local_copy_dir.replace('//', '/')
646 if not os.path.exists(fdroiddir):
649 if not config.get('awsbucket') \
650 and not config.get('serverwebroot') \
651 and not config.get('servergitmirrors') \
652 and not config.get('androidobservatory') \
653 and not config.get('binary_transparency_remote') \
654 and not config.get('virustotal_apikey') \
655 and local_copy_dir is None:
656 logging.warn(_('No option set! Edit your config.py to set at least one of these:')
657 + '\nserverwebroot, servergitmirrors, local_copy_dir, awsbucket, virustotal_apikey, androidobservatory, or binary_transparency_remote')
660 repo_sections = ['repo']
661 if config['archive_older'] != 0:
662 repo_sections.append('archive')
663 if not os.path.exists('archive'):
665 if config['per_app_repos']:
666 repo_sections += common.get_per_app_repos()
668 if options.command == 'init':
669 ssh = paramiko.SSHClient()
670 ssh.load_system_host_keys()
671 for serverwebroot in config.get('serverwebroot', []):
672 sshstr, remotepath = serverwebroot.rstrip('/').split(':')
673 if sshstr.find('@') >= 0:
674 username, hostname = sshstr.split('@')
676 username = pwd.getpwuid(os.getuid())[0] # get effective uid
678 ssh.connect(hostname, username=username)
679 sftp = ssh.open_sftp()
680 if os.path.basename(remotepath) \
681 not in sftp.listdir(os.path.dirname(remotepath)):
682 sftp.mkdir(remotepath, mode=0o755)
683 for repo_section in repo_sections:
684 repo_path = os.path.join(remotepath, repo_section)
685 if os.path.basename(repo_path) \
686 not in sftp.listdir(remotepath):
687 sftp.mkdir(repo_path, mode=0o755)
690 elif options.command == 'update':
691 for repo_section in repo_sections:
692 if local_copy_dir is not None:
693 if config['sync_from_local_copy_dir']:
694 sync_from_localcopy(repo_section, local_copy_dir)
696 update_localcopy(repo_section, local_copy_dir)
697 for serverwebroot in config.get('serverwebroot', []):
698 update_serverwebroot(serverwebroot, repo_section)
699 if config.get('servergitmirrors', []):
700 # update_servergitmirrors will take care of multiple mirrors so don't need a foreach
701 servergitmirrors = config.get('servergitmirrors', [])
702 update_servergitmirrors(servergitmirrors, repo_section)
703 if config.get('awsbucket'):
704 update_awsbucket(repo_section)
705 if config.get('androidobservatory'):
706 upload_to_android_observatory(repo_section)
707 if config.get('virustotal_apikey'):
708 upload_to_virustotal(repo_section, config.get('virustotal_apikey'))
710 binary_transparency_remote = config.get('binary_transparency_remote')
711 if binary_transparency_remote:
712 push_binary_transparency(BINARY_TRANSPARENCY_DIR,
713 binary_transparency_remote)
718 if __name__ == "__main__":