3 # server.py - part of the FDroid server tools
4 # Copyright (C) 2010-15, Ciaran Gultnieks, ciaran@ciarang.com
6 # This program is free software: you can redistribute it and/or modify
7 # it under the terms of the GNU Affero General Public License as published by
8 # the Free Software Foundation, either version 3 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU Affero General Public License for more details.
16 # You should have received a copy of the GNU Affero General Public License
17 # along with this program. If not, see <http://www.gnu.org/licenses/>.
28 from argparse import ArgumentParser
33 from .exception import FDroidException
38 BINARY_TRANSPARENCY_DIR = 'binary_transparency'
41 def update_awsbucket(repo_section):
43 Upload the contents of the directory `repo_section` (including
44 subdirectories) to the AWS S3 "bucket". The contents of that subdir of the
45 bucket will first be deleted.
47 Requires AWS credentials set in config.py: awsaccesskeyid, awssecretkey
50 logging.debug('Syncing "' + repo_section + '" to Amazon S3 bucket "'
51 + config['awsbucket'] + '"')
53 if common.set_command_in_config('s3cmd'):
54 update_awsbucket_s3cmd(repo_section)
56 update_awsbucket_libcloud(repo_section)
59 def update_awsbucket_s3cmd(repo_section):
60 '''upload using the CLI tool s3cmd, which provides rsync-like sync
62 The upload is done in multiple passes to reduce the chance of
63 interfering with an existing client-server interaction. In the
64 first pass, only new files are uploaded. In the second pass,
65 changed files are uploaded, overwriting what is on the server. On
66 the third/last pass, the indexes are uploaded, and any removed
67 files are deleted from the server. The last pass is the only pass
68 to use a full MD5 checksum of all files to detect changes.
71 logging.debug('using s3cmd to sync with ' + config['awsbucket'])
73 configfilename = '.s3cfg'
74 fd = os.open(configfilename, os.O_CREAT | os.O_TRUNC | os.O_WRONLY, 0o600)
75 os.write(fd, '[default]\n'.encode('utf-8'))
76 os.write(fd, ('access_key = ' + config['awsaccesskeyid'] + '\n').encode('utf-8'))
77 os.write(fd, ('secret_key = ' + config['awssecretkey'] + '\n').encode('utf-8'))
80 s3url = 's3://' + config['awsbucket'] + '/fdroid/'
84 '--config=' + configfilename,
88 s3cmdargs += ['--verbose']
90 s3cmdargs += ['--quiet']
91 indexxml = os.path.join(repo_section, 'index.xml')
92 indexjar = os.path.join(repo_section, 'index.jar')
93 indexv1jar = os.path.join(repo_section, 'index-v1.jar')
94 logging.debug('s3cmd sync new files in ' + repo_section + ' to ' + s3url)
95 if subprocess.call(s3cmdargs +
96 ['--no-check-md5', '--skip-existing',
97 '--exclude', indexxml,
98 '--exclude', indexjar,
99 '--exclude', indexv1jar,
100 repo_section, s3url]) != 0:
101 raise FDroidException()
102 logging.debug('s3cmd sync all files in ' + repo_section + ' to ' + s3url)
103 if subprocess.call(s3cmdargs +
105 '--exclude', indexxml,
106 '--exclude', indexjar,
107 '--exclude', indexv1jar,
108 repo_section, s3url]) != 0:
109 raise FDroidException()
111 logging.debug('s3cmd sync indexes ' + repo_section + ' to ' + s3url + ' and delete')
112 s3cmdargs.append('--delete-removed')
113 s3cmdargs.append('--delete-after')
114 if options.no_checksum:
115 s3cmdargs.append('--no-check-md5')
117 s3cmdargs.append('--check-md5')
118 if subprocess.call(s3cmdargs + [repo_section, s3url]) != 0:
119 raise FDroidException()
122 def update_awsbucket_libcloud(repo_section):
124 Upload the contents of the directory `repo_section` (including
125 subdirectories) to the AWS S3 "bucket". The contents of that subdir of the
126 bucket will first be deleted.
128 Requires AWS credentials set in config.py: awsaccesskeyid, awssecretkey
131 logging.debug('using Apache libcloud to sync with ' + config['awsbucket'])
133 import libcloud.security
134 libcloud.security.VERIFY_SSL_CERT = True
135 from libcloud.storage.types import Provider, ContainerDoesNotExistError
136 from libcloud.storage.providers import get_driver
138 if not config.get('awsaccesskeyid') or not config.get('awssecretkey'):
139 raise FDroidException(
140 'To use awsbucket, you must set awssecretkey and awsaccesskeyid in config.py!')
141 awsbucket = config['awsbucket']
143 cls = get_driver(Provider.S3)
144 driver = cls(config['awsaccesskeyid'], config['awssecretkey'])
146 container = driver.get_container(container_name=awsbucket)
147 except ContainerDoesNotExistError:
148 container = driver.create_container(container_name=awsbucket)
149 logging.info('Created new container "' + container.name + '"')
151 upload_dir = 'fdroid/' + repo_section
153 for obj in container.list_objects():
154 if obj.name.startswith(upload_dir + '/'):
157 for root, _, files in os.walk(os.path.join(os.getcwd(), repo_section)):
160 file_to_upload = os.path.join(root, name)
161 object_name = 'fdroid/' + os.path.relpath(file_to_upload, os.getcwd())
162 if object_name not in objs:
165 obj = objs.pop(object_name)
166 if obj.size != os.path.getsize(file_to_upload):
169 # if the sizes match, then compare by MD5
171 with open(file_to_upload, 'rb') as f:
177 if obj.hash != md5.hexdigest():
178 s3url = 's3://' + awsbucket + '/' + obj.name
179 logging.info(' deleting ' + s3url)
180 if not driver.delete_object(obj):
181 logging.warn('Could not delete ' + s3url)
185 logging.debug(' uploading "' + file_to_upload + '"...')
186 extra = {'acl': 'public-read'}
187 if file_to_upload.endswith('.sig'):
188 extra['content_type'] = 'application/pgp-signature'
189 elif file_to_upload.endswith('.asc'):
190 extra['content_type'] = 'application/pgp-signature'
191 logging.info(' uploading ' + os.path.relpath(file_to_upload)
192 + ' to s3://' + awsbucket + '/' + object_name)
193 with open(file_to_upload, 'rb') as iterator:
194 obj = driver.upload_object_via_stream(iterator=iterator,
196 object_name=object_name,
198 # delete the remnants in the bucket, they do not exist locally
200 object_name, obj = objs.popitem()
201 s3url = 's3://' + awsbucket + '/' + object_name
202 if object_name.startswith(upload_dir):
203 logging.warn(' deleting ' + s3url)
204 driver.delete_object(obj)
206 logging.info(' skipping ' + s3url)
209 def update_serverwebroot(serverwebroot, repo_section):
210 # use a checksum comparison for accurate comparisons on different
211 # filesystems, for example, FAT has a low resolution timestamp
212 rsyncargs = ['rsync', '--archive', '--delete-after', '--safe-links']
213 if not options.no_checksum:
214 rsyncargs.append('--checksum')
216 rsyncargs += ['--verbose']
218 rsyncargs += ['--quiet']
219 if options.identity_file is not None:
220 rsyncargs += ['-e', 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + options.identity_file]
221 elif 'identity_file' in config:
222 rsyncargs += ['-e', 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + config['identity_file']]
223 indexxml = os.path.join(repo_section, 'index.xml')
224 indexjar = os.path.join(repo_section, 'index.jar')
225 indexv1jar = os.path.join(repo_section, 'index-v1.jar')
226 # Upload the first time without the index files and delay the deletion as
227 # much as possible, that keeps the repo functional while this update is
228 # running. Then once it is complete, rerun the command again to upload
229 # the index files. Always using the same target with rsync allows for
230 # very strict settings on the receiving server, you can literally specify
231 # the one rsync command that is allowed to run in ~/.ssh/authorized_keys.
232 # (serverwebroot is guaranteed to have a trailing slash in common.py)
233 logging.info('rsyncing ' + repo_section + ' to ' + serverwebroot)
234 if subprocess.call(rsyncargs +
235 ['--exclude', indexxml, '--exclude', indexjar,
236 '--exclude', indexv1jar,
237 repo_section, serverwebroot]) != 0:
238 raise FDroidException()
239 if subprocess.call(rsyncargs + [repo_section, serverwebroot]) != 0:
240 raise FDroidException()
241 # upload "current version" symlinks if requested
242 if config['make_current_version_link'] and repo_section == 'repo':
244 for f in glob.glob('*.apk') \
245 + glob.glob('*.apk.asc') + glob.glob('*.apk.sig'):
246 if os.path.islink(f):
247 links_to_upload.append(f)
248 if len(links_to_upload) > 0:
249 if subprocess.call(rsyncargs + links_to_upload + [serverwebroot]) != 0:
250 raise FDroidException()
253 def _local_sync(fromdir, todir):
254 rsyncargs = ['rsync', '--recursive', '--safe-links', '--times', '--perms',
255 '--one-file-system', '--delete', '--chmod=Da+rx,Fa-x,a+r,u+w']
256 # use stricter rsync checking on all files since people using offline mode
257 # are already prioritizing security above ease and speed
258 if not options.no_checksum:
259 rsyncargs.append('--checksum')
261 rsyncargs += ['--verbose']
263 rsyncargs += ['--quiet']
264 logging.debug(' '.join(rsyncargs + [fromdir, todir]))
265 if subprocess.call(rsyncargs + [fromdir, todir]) != 0:
266 raise FDroidException()
269 def sync_from_localcopy(repo_section, local_copy_dir):
270 '''Syncs the repo from "local copy dir" filesystem to this box
272 In setups that use offline signing, this is the last step that
273 syncs the repo from the "local copy dir" e.g. a thumb drive to the
274 repo on the local filesystem. That local repo is then used to
275 push to all the servers that are configured.
278 logging.info('Syncing from local_copy_dir to this repo.')
279 # trailing slashes have a meaning in rsync which is not needed here, so
280 # make sure both paths have exactly one trailing slash
281 _local_sync(os.path.join(local_copy_dir, repo_section).rstrip('/') + '/',
282 repo_section.rstrip('/') + '/')
284 offline_copy = os.path.join(local_copy_dir, BINARY_TRANSPARENCY_DIR)
285 if os.path.exists(os.path.join(offline_copy, '.git')):
286 online_copy = os.path.join(os.getcwd(), BINARY_TRANSPARENCY_DIR)
287 push_binary_transparency(offline_copy, online_copy)
290 def update_localcopy(repo_section, local_copy_dir):
291 '''copy data from offline to the "local copy dir" filesystem
293 This updates the copy of this repo used to shuttle data from an
294 offline signing machine to the online machine, e.g. on a thumb
298 # local_copy_dir is guaranteed to have a trailing slash in main() below
299 _local_sync(repo_section, local_copy_dir)
301 offline_copy = os.path.join(os.getcwd(), BINARY_TRANSPARENCY_DIR)
302 if os.path.isdir(os.path.join(offline_copy, '.git')):
303 online_copy = os.path.join(local_copy_dir, BINARY_TRANSPARENCY_DIR)
304 push_binary_transparency(offline_copy, online_copy)
307 def _get_size(start_path='.'):
308 '''get size of all files in a dir https://stackoverflow.com/a/1392549'''
310 for dirpath, dirnames, filenames in os.walk(start_path):
312 fp = os.path.join(dirpath, f)
313 total_size += os.path.getsize(fp)
317 def update_servergitmirrors(servergitmirrors, repo_section):
318 '''update repo mirrors stored in git repos
320 This is a hack to use public git repos as F-Droid repos. It
321 recreates the git repo from scratch each time, so that there is no
322 history. That keeps the size of the git repo small. Services
323 like GitHub or GitLab have a size limit of something like 1 gig.
324 This git repo is only a git repo for the purpose of being hosted.
325 For history, there is the archive section, and there is the binary
330 from clint.textui import progress
331 if config.get('local_copy_dir') \
332 and not config.get('sync_from_local_copy_dir'):
333 logging.debug('Offline machine, skipping git mirror generation until `fdroid server update`')
336 # right now we support only 'repo' git-mirroring
337 if repo_section == 'repo':
338 git_mirror_path = 'git-mirror'
339 dotgit = os.path.join(git_mirror_path, '.git')
340 git_repodir = os.path.join(git_mirror_path, 'fdroid', repo_section)
341 if not os.path.isdir(git_repodir):
342 os.makedirs(git_repodir)
343 if os.path.isdir(dotgit) and _get_size(git_mirror_path) > 1000000000:
344 logging.warning('Deleting git-mirror history, repo is too big (1 gig max)')
345 shutil.rmtree(dotgit)
347 # rsync is very particular about trailing slashes
348 _local_sync(repo_section.rstrip('/') + '/', git_repodir.rstrip('/') + '/')
350 # use custom SSH command if identity_file specified
351 ssh_cmd = 'ssh -oBatchMode=yes'
352 if options.identity_file is not None:
353 ssh_cmd += ' -oIdentitiesOnly=yes -i "%s"' % options.identity_file
354 elif 'identity_file' in config:
355 ssh_cmd += ' -oIdentitiesOnly=yes -i "%s"' % config['identity_file']
357 repo = git.Repo.init(git_mirror_path)
359 for remote_url in servergitmirrors:
360 hostname = re.sub(r'\W*\w+\W+(\w+).*', r'\1', remote_url)
361 r = git.remote.Remote(repo, hostname)
362 if r in repo.remotes:
363 r = repo.remote(hostname)
364 if 'set_url' in dir(r): # force remote URL if using GitPython 2.x
365 r.set_url(remote_url)
367 repo.create_remote(hostname, remote_url)
368 logging.info('Mirroring to: ' + remote_url)
370 # sadly index.add don't allow the --all parameter
371 logging.debug('Adding all files to git mirror')
372 repo.git.add(all=True)
373 logging.debug('Committing all files into git mirror')
374 repo.index.commit("fdroidserver git-mirror")
379 class MyProgressPrinter(git.RemoteProgress):
380 def update(self, op_code, current, maximum=None, message=None):
381 if isinstance(maximum, float):
382 bar.show(current, maximum)
383 progress = MyProgressPrinter()
387 # push for every remote. This will overwrite the git history
388 for remote in repo.remotes:
390 if remote.name == 'gitlab':
391 logging.debug('Writing .gitlab-ci.yml to deploy to GitLab Pages')
392 with open(os.path.join(git_mirror_path, ".gitlab-ci.yml"), "wt") as out_file:
393 out_file.write("""pages:
403 repo.git.add(all=True)
404 repo.index.commit("fdroidserver git-mirror: Deploy to GitLab Pages")
406 logging.debug('Pushing to ' + remote.url)
407 with repo.git.custom_environment(GIT_SSH_COMMAND=ssh_cmd):
408 remote.push(branch, force=True, set_upstream=True, progress=progress)
410 # Reset the gitlab specific stuff before the next remote.
411 if remote.name == 'gitlab':
412 logging.debug('Removing .gitlab-ci.yml now that it has successfully deployed')
413 repo.index.reset('HEAD^')
414 repo.index.checkout(force=True)
420 def upload_to_android_observatory(repo_section):
421 # depend on requests and lxml only if users enable AO
423 from lxml.html import fromstring
425 if repo_section == 'repo':
426 for f in glob.glob(os.path.join(repo_section, '*.apk')):
428 fname = os.path.basename(f)
429 logging.info('Uploading ' + fname + ' to androidobservatory.org')
431 # upload the file with a post request
432 r = requests.post('https://androidobservatory.org/upload', files={'apk': (fname, open(fpath, 'rb'))})
436 # from now on XPath will be used to retrieve the message in the HTML
437 # androidobservatory doesn't have a nice API to talk with
438 # so we must scrape the page content
439 tree = fromstring(response)
440 alert = tree.xpath("//html/body/div[@class='container content-container']/div[@class='alert alert-info']")[0]
445 # if the application was added successfully we retrive the url
446 # if the application was already uploaded we use the redirect page url
447 if el.attrib.get("href") is not None:
448 appurl = page + el.attrib["href"][1:]
449 message += el.text.replace(" here", "") + el.tail
452 message = message.strip() + " " + appurl
453 logging.info(message)
456 def upload_to_virustotal(repo_section, vt_apikey):
460 logging.getLogger("urllib3").setLevel(logging.WARNING)
461 logging.getLogger("requests").setLevel(logging.WARNING)
463 if repo_section == 'repo':
464 if not os.path.exists('virustotal'):
465 os.mkdir('virustotal')
466 with open(os.path.join(repo_section, 'index-v1.json')) as fp:
467 index = json.load(fp)
468 for packageName, packages in index['packages'].items():
469 for package in packages:
470 outputfilename = os.path.join('virustotal',
471 packageName + '_' + str(package.get('versionCode'))
472 + '_' + package['hash'] + '.json')
473 if os.path.exists(outputfilename):
474 logging.debug(package['apkName'] + ' results are in ' + outputfilename)
476 filename = package['apkName']
477 repofilename = os.path.join(repo_section, filename)
478 logging.info('Checking if ' + repofilename + ' is on virustotal')
481 "User-Agent": "F-Droid"
485 'resource': package['hash'],
487 needs_file_upload = False
489 r = requests.post('https://www.virustotal.com/vtapi/v2/file/report',
490 params=params, headers=headers)
491 if r.status_code == 200:
493 if response['response_code'] == 0:
494 needs_file_upload = True
496 response['filename'] = filename
497 response['packageName'] = packageName
498 response['versionCode'] = package.get('versionCode')
499 response['versionName'] = package.get('versionName')
500 with open(outputfilename, 'w') as fp:
501 json.dump(response, fp, indent=2, sort_keys=True)
503 if response.get('positives') > 0:
504 logging.warning(repofilename + ' has been flagged by virustotal '
505 + str(response['positives']) + ' times:'
506 + '\n\t' + response['permalink'])
508 elif r.status_code == 204:
509 time.sleep(10) # wait for public API rate limiting
511 if needs_file_upload:
512 logging.info('Uploading ' + repofilename + ' to virustotal')
514 'file': (filename, open(repofilename, 'rb'))
516 r = requests.post('https://www.virustotal.com/vtapi/v2/file/scan',
517 params=params, headers=headers, files=files)
520 logging.info(response['verbose_msg'] + " " + response['permalink'])
523 def push_binary_transparency(git_repo_path, git_remote):
524 '''push the binary transparency git repo to the specifed remote.
526 If the remote is a local directory, make sure it exists, and is a
527 git repo. This is used to move this git repo from an offline
528 machine onto a flash drive, then onto the online machine. Also,
529 this pulls because pushing to a non-bare git repo is error prone.
531 This is also used in offline signing setups, where it then also
532 creates a "local copy dir" git repo that serves to shuttle the git
533 data from the offline machine to the online machine. In that
534 case, git_remote is a dir on the local file system, e.g. a thumb
540 logging.info('Pushing binary transparency log to ' + git_remote)
542 if os.path.isdir(os.path.dirname(git_remote)):
543 # from offline machine to thumbdrive
544 remote_path = os.path.abspath(git_repo_path)
545 if not os.path.isdir(os.path.join(git_remote, '.git')):
546 os.makedirs(git_remote, exist_ok=True)
547 thumbdriverepo = git.Repo.init(git_remote)
548 local = thumbdriverepo.create_remote('local', remote_path)
550 thumbdriverepo = git.Repo(git_remote)
551 local = git.remote.Remote(thumbdriverepo, 'local')
552 if local in thumbdriverepo.remotes:
553 local = thumbdriverepo.remote('local')
554 if 'set_url' in dir(local): # force remote URL if using GitPython 2.x
555 local.set_url(remote_path)
557 local = thumbdriverepo.create_remote('local', remote_path)
560 # from online machine to remote on a server on the internet
561 gitrepo = git.Repo(git_repo_path)
562 origin = git.remote.Remote(gitrepo, 'origin')
563 if origin in gitrepo.remotes:
564 origin = gitrepo.remote('origin')
565 if 'set_url' in dir(origin): # added in GitPython 2.x
566 origin.set_url(git_remote)
568 origin = gitrepo.create_remote('origin', git_remote)
569 origin.push('master')
573 global config, options
575 # Parse command line...
576 parser = ArgumentParser()
577 common.setup_global_opts(parser)
578 parser.add_argument("command", help="command to execute, either 'init' or 'update'")
579 parser.add_argument("-i", "--identity-file", default=None,
580 help="Specify an identity file to provide to SSH for rsyncing")
581 parser.add_argument("--local-copy-dir", default=None,
582 help="Specify a local folder to sync the repo to")
583 parser.add_argument("--no-checksum", action="store_true", default=False,
584 help="Don't use rsync checksums")
585 options = parser.parse_args()
587 config = common.read_config(options)
589 if options.command != 'init' and options.command != 'update':
590 logging.critical("The only commands currently supported are 'init' and 'update'")
593 if config.get('nonstandardwebroot') is True:
594 standardwebroot = False
596 standardwebroot = True
598 for serverwebroot in config.get('serverwebroot', []):
599 # this supports both an ssh host:path and just a path
600 s = serverwebroot.rstrip('/').split(':')
606 logging.error('Malformed serverwebroot line: ' + serverwebroot)
608 repobase = os.path.basename(fdroiddir)
609 if standardwebroot and repobase != 'fdroid':
610 logging.error('serverwebroot path does not end with "fdroid", '
611 + 'perhaps you meant one of these:\n\t'
612 + serverwebroot.rstrip('/') + '/fdroid\n\t'
613 + serverwebroot.rstrip('/').rstrip(repobase) + 'fdroid')
616 if options.local_copy_dir is not None:
617 local_copy_dir = options.local_copy_dir
618 elif config.get('local_copy_dir'):
619 local_copy_dir = config['local_copy_dir']
621 local_copy_dir = None
622 if local_copy_dir is not None:
623 fdroiddir = local_copy_dir.rstrip('/')
624 if os.path.exists(fdroiddir) and not os.path.isdir(fdroiddir):
625 logging.error('local_copy_dir must be directory, not a file!')
627 if not os.path.exists(os.path.dirname(fdroiddir)):
628 logging.error('The root dir for local_copy_dir "'
629 + os.path.dirname(fdroiddir)
630 + '" does not exist!')
632 if not os.path.isabs(fdroiddir):
633 logging.error('local_copy_dir must be an absolute path!')
635 repobase = os.path.basename(fdroiddir)
636 if standardwebroot and repobase != 'fdroid':
637 logging.error('local_copy_dir does not end with "fdroid", '
638 + 'perhaps you meant: ' + fdroiddir + '/fdroid')
640 if local_copy_dir[-1] != '/':
641 local_copy_dir += '/'
642 local_copy_dir = local_copy_dir.replace('//', '/')
643 if not os.path.exists(fdroiddir):
646 if not config.get('awsbucket') \
647 and not config.get('serverwebroot') \
648 and not config.get('servergitmirrors') \
649 and not config.get('androidobservatory') \
650 and not config.get('binary_transparency_remote') \
651 and not config.get('virustotal_apikey') \
652 and local_copy_dir is None:
653 logging.warn('No option set! Edit your config.py to set at least one among:\n'
654 + 'serverwebroot, servergitmirrors, local_copy_dir, awsbucket, virustotal_apikey, androidobservatory, or binary_transparency_remote')
657 repo_sections = ['repo']
658 if config['archive_older'] != 0:
659 repo_sections.append('archive')
660 if not os.path.exists('archive'):
662 if config['per_app_repos']:
663 repo_sections += common.get_per_app_repos()
665 if options.command == 'init':
666 ssh = paramiko.SSHClient()
667 ssh.load_system_host_keys()
668 for serverwebroot in config.get('serverwebroot', []):
669 sshstr, remotepath = serverwebroot.rstrip('/').split(':')
670 if sshstr.find('@') >= 0:
671 username, hostname = sshstr.split('@')
673 username = pwd.getpwuid(os.getuid())[0] # get effective uid
675 ssh.connect(hostname, username=username)
676 sftp = ssh.open_sftp()
677 if os.path.basename(remotepath) \
678 not in sftp.listdir(os.path.dirname(remotepath)):
679 sftp.mkdir(remotepath, mode=0o755)
680 for repo_section in repo_sections:
681 repo_path = os.path.join(remotepath, repo_section)
682 if os.path.basename(repo_path) \
683 not in sftp.listdir(remotepath):
684 sftp.mkdir(repo_path, mode=0o755)
687 elif options.command == 'update':
688 for repo_section in repo_sections:
689 if local_copy_dir is not None:
690 if config['sync_from_local_copy_dir']:
691 sync_from_localcopy(repo_section, local_copy_dir)
693 update_localcopy(repo_section, local_copy_dir)
694 for serverwebroot in config.get('serverwebroot', []):
695 update_serverwebroot(serverwebroot, repo_section)
696 if config.get('servergitmirrors', []):
697 # update_servergitmirrors will take care of multiple mirrors so don't need a foreach
698 servergitmirrors = config.get('servergitmirrors', [])
699 update_servergitmirrors(servergitmirrors, repo_section)
700 if config.get('awsbucket'):
701 update_awsbucket(repo_section)
702 if config.get('androidobservatory'):
703 upload_to_android_observatory(repo_section)
704 if config.get('virustotal_apikey'):
705 upload_to_virustotal(repo_section, config.get('virustotal_apikey'))
707 binary_transparency_remote = config.get('binary_transparency_remote')
708 if binary_transparency_remote:
709 push_binary_transparency(BINARY_TRANSPARENCY_DIR,
710 binary_transparency_remote)
715 if __name__ == "__main__":