3 # server.py - part of the FDroid server tools
4 # Copyright (C) 2010-15, Ciaran Gultnieks, ciaran@ciarang.com
6 # This program is free software: you can redistribute it and/or modify
7 # it under the terms of the GNU Affero General Public License as published by
8 # the Free Software Foundation, either version 3 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU Affero General Public License for more details.
16 # You should have received a copy of the GNU Affero General Public License
17 # along with this program. If not, see <http://www.gnu.org/licenses/>.
28 from argparse import ArgumentParser
34 from .exception import FDroidException
38 start_timestamp = time.gmtime()
40 BINARY_TRANSPARENCY_DIR = 'binary_transparency'
42 AUTO_S3CFG = '.fdroid-server-update-s3cfg'
46 def update_awsbucket(repo_section):
48 Upload the contents of the directory `repo_section` (including
49 subdirectories) to the AWS S3 "bucket". The contents of that subdir of the
50 bucket will first be deleted.
52 Requires AWS credentials set in config.py: awsaccesskeyid, awssecretkey
55 logging.debug('Syncing "' + repo_section + '" to Amazon S3 bucket "'
56 + config['awsbucket'] + '"')
58 if common.set_command_in_config('s3cmd'):
59 update_awsbucket_s3cmd(repo_section)
61 update_awsbucket_libcloud(repo_section)
64 def update_awsbucket_s3cmd(repo_section):
65 '''upload using the CLI tool s3cmd, which provides rsync-like sync
67 The upload is done in multiple passes to reduce the chance of
68 interfering with an existing client-server interaction. In the
69 first pass, only new files are uploaded. In the second pass,
70 changed files are uploaded, overwriting what is on the server. On
71 the third/last pass, the indexes are uploaded, and any removed
72 files are deleted from the server. The last pass is the only pass
73 to use a full MD5 checksum of all files to detect changes.
76 logging.debug(_('Using s3cmd to sync with: {url}')
77 .format(url=config['awsbucket']))
79 if os.path.exists(USER_S3CFG):
80 logging.info(_('Using "{path}" for configuring s3cmd.').format(path=USER_S3CFG))
81 configfilename = USER_S3CFG
83 fd = os.open(AUTO_S3CFG, os.O_CREAT | os.O_TRUNC | os.O_WRONLY, 0o600)
84 logging.debug(_('Creating "{path}" for configuring s3cmd.').format(path=AUTO_S3CFG))
85 os.write(fd, '[default]\n'.encode('utf-8'))
86 os.write(fd, ('access_key = ' + config['awsaccesskeyid'] + '\n').encode('utf-8'))
87 os.write(fd, ('secret_key = ' + config['awssecretkey'] + '\n').encode('utf-8'))
89 configfilename = AUTO_S3CFG
91 s3bucketurl = 's3://' + config['awsbucket']
92 s3cmd = [config['s3cmd'], '--config=' + configfilename]
93 if subprocess.call(s3cmd + ['info', s3bucketurl]) != 0:
94 logging.warning(_('Creating new S3 bucket: {url}')
95 .format(url=s3bucketurl))
96 if subprocess.call(s3cmd + ['mb', s3bucketurl]) != 0:
97 logging.error(_('Failed to create S3 bucket: {url}')
98 .format(url=s3bucketurl))
99 raise FDroidException()
101 s3cmd_sync = s3cmd + ['sync', '--acl-public']
103 s3cmd_sync += ['--verbose']
105 s3cmd_sync += ['--quiet']
106 indexxml = os.path.join(repo_section, 'index.xml')
107 indexjar = os.path.join(repo_section, 'index.jar')
108 indexv1jar = os.path.join(repo_section, 'index-v1.jar')
110 s3url = s3bucketurl + '/fdroid/'
111 logging.debug('s3cmd sync new files in ' + repo_section + ' to ' + s3url)
112 logging.debug(_('Running first pass with MD5 checking disabled'))
113 if subprocess.call(s3cmd_sync +
114 ['--no-check-md5', '--skip-existing',
115 '--exclude', indexxml,
116 '--exclude', indexjar,
117 '--exclude', indexv1jar,
118 repo_section, s3url]) != 0:
119 raise FDroidException()
120 logging.debug('s3cmd sync all files in ' + repo_section + ' to ' + s3url)
121 if subprocess.call(s3cmd_sync +
123 '--exclude', indexxml,
124 '--exclude', indexjar,
125 '--exclude', indexv1jar,
126 repo_section, s3url]) != 0:
127 raise FDroidException()
129 logging.debug(_('s3cmd sync indexes {path} to {url} and delete')
130 .format(path=repo_section, url=s3url))
131 s3cmd_sync.append('--delete-removed')
132 s3cmd_sync.append('--delete-after')
133 if options.no_checksum:
134 s3cmd_sync.append('--no-check-md5')
136 s3cmd_sync.append('--check-md5')
137 if subprocess.call(s3cmd_sync + [repo_section, s3url]) != 0:
138 raise FDroidException()
141 def update_awsbucket_libcloud(repo_section):
143 Upload the contents of the directory `repo_section` (including
144 subdirectories) to the AWS S3 "bucket". The contents of that subdir of the
145 bucket will first be deleted.
147 Requires AWS credentials set in config.py: awsaccesskeyid, awssecretkey
150 logging.debug(_('using Apache libcloud to sync with {url}')
151 .format(url=config['awsbucket']))
153 import libcloud.security
154 libcloud.security.VERIFY_SSL_CERT = True
155 from libcloud.storage.types import Provider, ContainerDoesNotExistError
156 from libcloud.storage.providers import get_driver
158 if not config.get('awsaccesskeyid') or not config.get('awssecretkey'):
159 raise FDroidException(
160 _('To use awsbucket, awssecretkey and awsaccesskeyid must also be set in config.py!'))
161 awsbucket = config['awsbucket']
163 if os.path.exists(USER_S3CFG):
164 raise FDroidException(_('"{path}" exists but s3cmd is not installed!')
165 .format(path=USER_S3CFG))
167 cls = get_driver(Provider.S3)
168 driver = cls(config['awsaccesskeyid'], config['awssecretkey'])
170 container = driver.get_container(container_name=awsbucket)
171 except ContainerDoesNotExistError:
172 container = driver.create_container(container_name=awsbucket)
173 logging.info(_('Created new container "{name}"')
174 .format(name=container.name))
176 upload_dir = 'fdroid/' + repo_section
178 for obj in container.list_objects():
179 if obj.name.startswith(upload_dir + '/'):
182 for root, dirs, files in os.walk(os.path.join(os.getcwd(), repo_section)):
185 file_to_upload = os.path.join(root, name)
186 object_name = 'fdroid/' + os.path.relpath(file_to_upload, os.getcwd())
187 if object_name not in objs:
190 obj = objs.pop(object_name)
191 if obj.size != os.path.getsize(file_to_upload):
194 # if the sizes match, then compare by MD5
196 with open(file_to_upload, 'rb') as f:
202 if obj.hash != md5.hexdigest():
203 s3url = 's3://' + awsbucket + '/' + obj.name
204 logging.info(' deleting ' + s3url)
205 if not driver.delete_object(obj):
206 logging.warn('Could not delete ' + s3url)
210 logging.debug(' uploading "' + file_to_upload + '"...')
211 extra = {'acl': 'public-read'}
212 if file_to_upload.endswith('.sig'):
213 extra['content_type'] = 'application/pgp-signature'
214 elif file_to_upload.endswith('.asc'):
215 extra['content_type'] = 'application/pgp-signature'
216 logging.info(' uploading ' + os.path.relpath(file_to_upload)
217 + ' to s3://' + awsbucket + '/' + object_name)
218 with open(file_to_upload, 'rb') as iterator:
219 obj = driver.upload_object_via_stream(iterator=iterator,
221 object_name=object_name,
223 # delete the remnants in the bucket, they do not exist locally
225 object_name, obj = objs.popitem()
226 s3url = 's3://' + awsbucket + '/' + object_name
227 if object_name.startswith(upload_dir):
228 logging.warn(' deleting ' + s3url)
229 driver.delete_object(obj)
231 logging.info(' skipping ' + s3url)
234 def update_serverwebroot(serverwebroot, repo_section):
235 # use a checksum comparison for accurate comparisons on different
236 # filesystems, for example, FAT has a low resolution timestamp
237 rsyncargs = ['rsync', '--archive', '--delete-after', '--safe-links']
238 if not options.no_checksum:
239 rsyncargs.append('--checksum')
241 rsyncargs += ['--verbose']
243 rsyncargs += ['--quiet']
244 if options.identity_file is not None:
245 rsyncargs += ['-e', 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + options.identity_file]
246 elif 'identity_file' in config:
247 rsyncargs += ['-e', 'ssh -oBatchMode=yes -oIdentitiesOnly=yes -i ' + config['identity_file']]
248 indexxml = os.path.join(repo_section, 'index.xml')
249 indexjar = os.path.join(repo_section, 'index.jar')
250 indexv1jar = os.path.join(repo_section, 'index-v1.jar')
251 # Upload the first time without the index files and delay the deletion as
252 # much as possible, that keeps the repo functional while this update is
253 # running. Then once it is complete, rerun the command again to upload
254 # the index files. Always using the same target with rsync allows for
255 # very strict settings on the receiving server, you can literally specify
256 # the one rsync command that is allowed to run in ~/.ssh/authorized_keys.
257 # (serverwebroot is guaranteed to have a trailing slash in common.py)
258 logging.info('rsyncing ' + repo_section + ' to ' + serverwebroot)
259 if subprocess.call(rsyncargs +
260 ['--exclude', indexxml, '--exclude', indexjar,
261 '--exclude', indexv1jar,
262 repo_section, serverwebroot]) != 0:
263 raise FDroidException()
264 if subprocess.call(rsyncargs + [repo_section, serverwebroot]) != 0:
265 raise FDroidException()
266 # upload "current version" symlinks if requested
267 if config['make_current_version_link'] and repo_section == 'repo':
269 for f in glob.glob('*.apk') \
270 + glob.glob('*.apk.asc') + glob.glob('*.apk.sig'):
271 if os.path.islink(f):
272 links_to_upload.append(f)
273 if len(links_to_upload) > 0:
274 if subprocess.call(rsyncargs + links_to_upload + [serverwebroot]) != 0:
275 raise FDroidException()
278 def sync_from_localcopy(repo_section, local_copy_dir):
279 '''Syncs the repo from "local copy dir" filesystem to this box
281 In setups that use offline signing, this is the last step that
282 syncs the repo from the "local copy dir" e.g. a thumb drive to the
283 repo on the local filesystem. That local repo is then used to
284 push to all the servers that are configured.
287 logging.info('Syncing from local_copy_dir to this repo.')
288 # trailing slashes have a meaning in rsync which is not needed here, so
289 # make sure both paths have exactly one trailing slash
290 common.local_rsync(options,
291 os.path.join(local_copy_dir, repo_section).rstrip('/') + '/',
292 repo_section.rstrip('/') + '/')
294 offline_copy = os.path.join(local_copy_dir, BINARY_TRANSPARENCY_DIR)
295 if os.path.exists(os.path.join(offline_copy, '.git')):
296 online_copy = os.path.join(os.getcwd(), BINARY_TRANSPARENCY_DIR)
297 push_binary_transparency(offline_copy, online_copy)
300 def update_localcopy(repo_section, local_copy_dir):
301 '''copy data from offline to the "local copy dir" filesystem
303 This updates the copy of this repo used to shuttle data from an
304 offline signing machine to the online machine, e.g. on a thumb
308 # local_copy_dir is guaranteed to have a trailing slash in main() below
309 common.local_rsync(options, repo_section, local_copy_dir)
311 offline_copy = os.path.join(os.getcwd(), BINARY_TRANSPARENCY_DIR)
312 if os.path.isdir(os.path.join(offline_copy, '.git')):
313 online_copy = os.path.join(local_copy_dir, BINARY_TRANSPARENCY_DIR)
314 push_binary_transparency(offline_copy, online_copy)
317 def _get_size(start_path='.'):
318 '''get size of all files in a dir https://stackoverflow.com/a/1392549'''
320 for root, dirs, files in os.walk(start_path):
322 fp = os.path.join(root, f)
323 total_size += os.path.getsize(fp)
327 def update_servergitmirrors(servergitmirrors, repo_section):
328 '''update repo mirrors stored in git repos
330 This is a hack to use public git repos as F-Droid repos. It
331 recreates the git repo from scratch each time, so that there is no
332 history. That keeps the size of the git repo small. Services
333 like GitHub or GitLab have a size limit of something like 1 gig.
334 This git repo is only a git repo for the purpose of being hosted.
335 For history, there is the archive section, and there is the binary
340 from clint.textui import progress
341 if config.get('local_copy_dir') \
342 and not config.get('sync_from_local_copy_dir'):
343 logging.debug('Offline machine, skipping git mirror generation until `fdroid server update`')
346 # right now we support only 'repo' git-mirroring
347 if repo_section == 'repo':
348 git_mirror_path = 'git-mirror'
349 dotgit = os.path.join(git_mirror_path, '.git')
350 git_repodir = os.path.join(git_mirror_path, 'fdroid', repo_section)
351 if not os.path.isdir(git_repodir):
352 os.makedirs(git_repodir)
353 if os.path.isdir(dotgit) and _get_size(git_mirror_path) > 1000000000:
354 logging.warning('Deleting git-mirror history, repo is too big (1 gig max)')
355 shutil.rmtree(dotgit)
357 # rsync is very particular about trailing slashes
358 common.local_rsync(options,
359 repo_section.rstrip('/') + '/',
360 git_repodir.rstrip('/') + '/')
362 # use custom SSH command if identity_file specified
363 ssh_cmd = 'ssh -oBatchMode=yes'
364 if options.identity_file is not None:
365 ssh_cmd += ' -oIdentitiesOnly=yes -i "%s"' % options.identity_file
366 elif 'identity_file' in config:
367 ssh_cmd += ' -oIdentitiesOnly=yes -i "%s"' % config['identity_file']
369 repo = git.Repo.init(git_mirror_path)
371 for remote_url in servergitmirrors:
372 hostname = re.sub(r'\W*\w+\W+(\w+).*', r'\1', remote_url)
373 r = git.remote.Remote(repo, hostname)
374 if r in repo.remotes:
375 r = repo.remote(hostname)
376 if 'set_url' in dir(r): # force remote URL if using GitPython 2.x
377 r.set_url(remote_url)
379 repo.create_remote(hostname, remote_url)
380 logging.info('Mirroring to: ' + remote_url)
382 # sadly index.add don't allow the --all parameter
383 logging.debug('Adding all files to git mirror')
384 repo.git.add(all=True)
385 logging.debug('Committing all files into git mirror')
386 repo.index.commit("fdroidserver git-mirror")
391 class MyProgressPrinter(git.RemoteProgress):
392 def update(self, op_code, current, maximum=None, message=None):
393 if isinstance(maximum, float):
394 bar.show(current, maximum)
395 progress = MyProgressPrinter()
399 # push for every remote. This will overwrite the git history
400 for remote in repo.remotes:
401 if remote.name == 'gitlab':
402 logging.debug('Writing .gitlab-ci.yml to deploy to GitLab Pages')
403 with open(os.path.join(git_mirror_path, ".gitlab-ci.yml"), "wt") as out_file:
404 out_file.write("""pages:
414 repo.git.add(all=True)
415 repo.index.commit("fdroidserver git-mirror: Deploy to GitLab Pages")
417 logging.debug(_('Pushing to {url}').format(url=remote.url))
418 with repo.git.custom_environment(GIT_SSH_COMMAND=ssh_cmd):
419 pushinfos = remote.push('master', force=True, set_upstream=True, progress=progress)
420 for pushinfo in pushinfos:
421 if pushinfo.flags & (git.remote.PushInfo.ERROR
422 | git.remote.PushInfo.REJECTED
423 | git.remote.PushInfo.REMOTE_FAILURE
424 | git.remote.PushInfo.REMOTE_REJECTED):
425 raise FDroidException(remote.url + ' push failed: ' + str(pushinfo.flags)
426 + ' ' + pushinfo.summary)
428 logging.debug(remote.url + ': ' + pushinfo.summary)
434 def upload_to_android_observatory(repo_section):
435 # depend on requests and lxml only if users enable AO
437 from lxml.html import fromstring
439 if repo_section == 'repo':
440 for f in glob.glob(os.path.join(repo_section, '*.apk')):
442 fname = os.path.basename(f)
443 logging.info('Uploading ' + fname + ' to androidobservatory.org')
445 # upload the file with a post request
446 r = requests.post('https://androidobservatory.org/upload', files={'apk': (fname, open(fpath, 'rb'))})
450 # from now on XPath will be used to retrieve the message in the HTML
451 # androidobservatory doesn't have a nice API to talk with
452 # so we must scrape the page content
453 tree = fromstring(response)
454 alert = tree.xpath("//html/body/div[@class='container content-container']/div[@class='alert alert-info']")[0]
459 # if the application was added successfully we retrive the url
460 # if the application was already uploaded we use the redirect page url
461 if el.attrib.get("href") is not None:
462 appurl = page + el.attrib["href"][1:]
463 message += el.text.replace(" here", "") + el.tail
466 message = message.strip() + " " + appurl
467 logging.info(message)
470 def upload_to_virustotal(repo_section, vt_apikey):
474 logging.getLogger("urllib3").setLevel(logging.WARNING)
475 logging.getLogger("requests").setLevel(logging.WARNING)
477 if repo_section == 'repo':
478 if not os.path.exists('virustotal'):
479 os.mkdir('virustotal')
480 with open(os.path.join(repo_section, 'index-v1.json')) as fp:
481 index = json.load(fp)
482 for packageName, packages in index['packages'].items():
483 for package in packages:
484 outputfilename = os.path.join('virustotal',
485 packageName + '_' + str(package.get('versionCode'))
486 + '_' + package['hash'] + '.json')
487 if os.path.exists(outputfilename):
488 logging.debug(package['apkName'] + ' results are in ' + outputfilename)
490 filename = package['apkName']
491 repofilename = os.path.join(repo_section, filename)
492 logging.info('Checking if ' + repofilename + ' is on virustotal')
495 "User-Agent": "F-Droid"
499 'resource': package['hash'],
501 needs_file_upload = False
503 r = requests.post('https://www.virustotal.com/vtapi/v2/file/report',
504 params=params, headers=headers)
505 if r.status_code == 200:
507 if response['response_code'] == 0:
508 needs_file_upload = True
510 response['filename'] = filename
511 response['packageName'] = packageName
512 response['versionCode'] = package.get('versionCode')
513 response['versionName'] = package.get('versionName')
514 with open(outputfilename, 'w') as fp:
515 json.dump(response, fp, indent=2, sort_keys=True)
517 if response.get('positives', 0) > 0:
518 logging.warning(repofilename + ' has been flagged by virustotal '
519 + str(response['positives']) + ' times:'
520 + '\n\t' + response['permalink'])
522 elif r.status_code == 204:
523 time.sleep(10) # wait for public API rate limiting
525 if needs_file_upload:
526 logging.info('Uploading ' + repofilename + ' to virustotal')
528 'file': (filename, open(repofilename, 'rb'))
530 r = requests.post('https://www.virustotal.com/vtapi/v2/file/scan',
531 params=params, headers=headers, files=files)
534 logging.info(response['verbose_msg'] + " " + response['permalink'])
537 def push_binary_transparency(git_repo_path, git_remote):
538 '''push the binary transparency git repo to the specifed remote.
540 If the remote is a local directory, make sure it exists, and is a
541 git repo. This is used to move this git repo from an offline
542 machine onto a flash drive, then onto the online machine. Also,
543 this pulls because pushing to a non-bare git repo is error prone.
545 This is also used in offline signing setups, where it then also
546 creates a "local copy dir" git repo that serves to shuttle the git
547 data from the offline machine to the online machine. In that
548 case, git_remote is a dir on the local file system, e.g. a thumb
554 logging.info(_('Pushing binary transparency log to {url}')
555 .format(url=git_remote))
557 if os.path.isdir(os.path.dirname(git_remote)):
558 # from offline machine to thumbdrive
559 remote_path = os.path.abspath(git_repo_path)
560 if not os.path.isdir(os.path.join(git_remote, '.git')):
561 os.makedirs(git_remote, exist_ok=True)
562 thumbdriverepo = git.Repo.init(git_remote)
563 local = thumbdriverepo.create_remote('local', remote_path)
565 thumbdriverepo = git.Repo(git_remote)
566 local = git.remote.Remote(thumbdriverepo, 'local')
567 if local in thumbdriverepo.remotes:
568 local = thumbdriverepo.remote('local')
569 if 'set_url' in dir(local): # force remote URL if using GitPython 2.x
570 local.set_url(remote_path)
572 local = thumbdriverepo.create_remote('local', remote_path)
575 # from online machine to remote on a server on the internet
576 gitrepo = git.Repo(git_repo_path)
577 origin = git.remote.Remote(gitrepo, 'origin')
578 if origin in gitrepo.remotes:
579 origin = gitrepo.remote('origin')
580 if 'set_url' in dir(origin): # added in GitPython 2.x
581 origin.set_url(git_remote)
583 origin = gitrepo.create_remote('origin', git_remote)
584 origin.push('master')
590 site = mwclient.Site((config['wiki_protocol'], config['wiki_server']),
591 path=config['wiki_path'])
592 site.login(config['wiki_user'], config['wiki_password'])
594 # Write a page with the last build log for this version code
595 wiki_page_path = 'deploy_' + time.strftime('%s', start_timestamp)
596 newpage = site.Pages[wiki_page_path]
598 txt += "* command line: <code>" + ' '.join(sys.argv) + "</code>\n"
599 txt += "* started at " + common.get_wiki_timestamp(start_timestamp) + '\n'
600 txt += "* completed at " + common.get_wiki_timestamp() + '\n'
602 newpage.save(txt, summary='Run log')
603 newpage = site.Pages['deploy']
604 newpage.save('#REDIRECT [[' + wiki_page_path + ']]', summary='Update redirect')
605 except Exception as e:
606 logging.error(_('Error while attempting to publish log: %s') % e)
610 global config, options
612 # Parse command line...
613 parser = ArgumentParser()
614 common.setup_global_opts(parser)
615 parser.add_argument("command", help=_("command to execute, either 'init' or 'update'"))
616 parser.add_argument("-i", "--identity-file", default=None,
617 help=_("Specify an identity file to provide to SSH for rsyncing"))
618 parser.add_argument("--local-copy-dir", default=None,
619 help=_("Specify a local folder to sync the repo to"))
620 parser.add_argument("--no-checksum", action="store_true", default=False,
621 help=_("Don't use rsync checksums"))
622 options = parser.parse_args()
624 config = common.read_config(options)
626 if options.command != 'init' and options.command != 'update':
627 logging.critical(_("The only commands currently supported are 'init' and 'update'"))
630 if config.get('nonstandardwebroot') is True:
631 standardwebroot = False
633 standardwebroot = True
635 for serverwebroot in config.get('serverwebroot', []):
636 # this supports both an ssh host:path and just a path
637 s = serverwebroot.rstrip('/').split(':')
643 logging.error(_('Malformed serverwebroot line:') + ' ' + serverwebroot)
645 repobase = os.path.basename(fdroiddir)
646 if standardwebroot and repobase != 'fdroid':
647 logging.error('serverwebroot path does not end with "fdroid", '
648 + 'perhaps you meant one of these:\n\t'
649 + serverwebroot.rstrip('/') + '/fdroid\n\t'
650 + serverwebroot.rstrip('/').rstrip(repobase) + 'fdroid')
653 if options.local_copy_dir is not None:
654 local_copy_dir = options.local_copy_dir
655 elif config.get('local_copy_dir'):
656 local_copy_dir = config['local_copy_dir']
658 local_copy_dir = None
659 if local_copy_dir is not None:
660 fdroiddir = local_copy_dir.rstrip('/')
661 if os.path.exists(fdroiddir) and not os.path.isdir(fdroiddir):
662 logging.error(_('local_copy_dir must be directory, not a file!'))
664 if not os.path.exists(os.path.dirname(fdroiddir)):
665 logging.error(_('The root dir for local_copy_dir "{path}" does not exist!')
666 .format(path=os.path.dirname(fdroiddir)))
668 if not os.path.isabs(fdroiddir):
669 logging.error(_('local_copy_dir must be an absolute path!'))
671 repobase = os.path.basename(fdroiddir)
672 if standardwebroot and repobase != 'fdroid':
673 logging.error(_('local_copy_dir does not end with "fdroid", '
674 + 'perhaps you meant: "{path}"')
675 .format(path=fdroiddir + '/fdroid'))
677 if local_copy_dir[-1] != '/':
678 local_copy_dir += '/'
679 local_copy_dir = local_copy_dir.replace('//', '/')
680 if not os.path.exists(fdroiddir):
683 if not config.get('awsbucket') \
684 and not config.get('serverwebroot') \
685 and not config.get('servergitmirrors') \
686 and not config.get('androidobservatory') \
687 and not config.get('binary_transparency_remote') \
688 and not config.get('virustotal_apikey') \
689 and local_copy_dir is None:
690 logging.warn(_('No option set! Edit your config.py to set at least one of these:')
691 + '\nserverwebroot, servergitmirrors, local_copy_dir, awsbucket, virustotal_apikey, androidobservatory, or binary_transparency_remote')
694 repo_sections = ['repo']
695 if config['archive_older'] != 0:
696 repo_sections.append('archive')
697 if not os.path.exists('archive'):
699 if config['per_app_repos']:
700 repo_sections += common.get_per_app_repos()
702 if options.command == 'init':
703 ssh = paramiko.SSHClient()
704 ssh.load_system_host_keys()
705 for serverwebroot in config.get('serverwebroot', []):
706 sshstr, remotepath = serverwebroot.rstrip('/').split(':')
707 if sshstr.find('@') >= 0:
708 username, hostname = sshstr.split('@')
710 username = pwd.getpwuid(os.getuid())[0] # get effective uid
712 ssh.connect(hostname, username=username)
713 sftp = ssh.open_sftp()
714 if os.path.basename(remotepath) \
715 not in sftp.listdir(os.path.dirname(remotepath)):
716 sftp.mkdir(remotepath, mode=0o755)
717 for repo_section in repo_sections:
718 repo_path = os.path.join(remotepath, repo_section)
719 if os.path.basename(repo_path) \
720 not in sftp.listdir(remotepath):
721 sftp.mkdir(repo_path, mode=0o755)
724 elif options.command == 'update':
725 for repo_section in repo_sections:
726 if local_copy_dir is not None:
727 if config['sync_from_local_copy_dir']:
728 sync_from_localcopy(repo_section, local_copy_dir)
730 update_localcopy(repo_section, local_copy_dir)
731 for serverwebroot in config.get('serverwebroot', []):
732 update_serverwebroot(serverwebroot, repo_section)
733 if config.get('servergitmirrors', []):
734 # update_servergitmirrors will take care of multiple mirrors so don't need a foreach
735 servergitmirrors = config.get('servergitmirrors', [])
736 update_servergitmirrors(servergitmirrors, repo_section)
737 if config.get('awsbucket'):
738 update_awsbucket(repo_section)
739 if config.get('androidobservatory'):
740 upload_to_android_observatory(repo_section)
741 if config.get('virustotal_apikey'):
742 upload_to_virustotal(repo_section, config.get('virustotal_apikey'))
744 binary_transparency_remote = config.get('binary_transparency_remote')
745 if binary_transparency_remote:
746 push_binary_transparency(BINARY_TRANSPARENCY_DIR,
747 binary_transparency_remote)
749 if config.get('wiki_server') and config.get('wiki_path'):
755 if __name__ == "__main__":