2 # -*- coding: utf-8 -*-
4 # server.py - part of the FDroid server tools
5 # Copyright (C) 2010-15, Ciaran Gultnieks, ciaran@ciarang.com
7 # This program is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU Affero General Public License as published by
9 # the Free Software Foundation, either version 3 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU Affero General Public License for more details.
17 # You should have received a copy of the GNU Affero General Public License
18 # along with this program. If not, see <http://www.gnu.org/licenses/>.
27 from argparse import ArgumentParser
35 def update_awsbucket(repo_section):
37 Upload the contents of the directory `repo_section` (including
38 subdirectories) to the AWS S3 "bucket". The contents of that subdir of the
39 bucket will first be deleted.
41 Requires AWS credentials set in config.py: awsaccesskeyid, awssecretkey
44 logging.debug('Syncing "' + repo_section + '" to Amazon S3 bucket "'
45 + config['awsbucket'] + '"')
47 import libcloud.security
48 libcloud.security.VERIFY_SSL_CERT = True
49 from libcloud.storage.types import Provider, ContainerDoesNotExistError
50 from libcloud.storage.providers import get_driver
52 if not config.get('awsaccesskeyid') or not config.get('awssecretkey'):
53 logging.error('To use awsbucket, you must set awssecretkey and awsaccesskeyid in config.py!')
55 awsbucket = config['awsbucket']
57 cls = get_driver(Provider.S3)
58 driver = cls(config['awsaccesskeyid'], config['awssecretkey'])
60 container = driver.get_container(container_name=awsbucket)
61 except ContainerDoesNotExistError:
62 container = driver.create_container(container_name=awsbucket)
63 logging.info('Created new container "' + container.name + '"')
65 upload_dir = 'fdroid/' + repo_section
67 for obj in container.list_objects():
68 if obj.name.startswith(upload_dir + '/'):
71 for root, _, files in os.walk(os.path.join(os.getcwd(), repo_section)):
74 file_to_upload = os.path.join(root, name)
75 object_name = 'fdroid/' + os.path.relpath(file_to_upload, os.getcwd())
76 if object_name not in objs:
79 obj = objs.pop(object_name)
80 if obj.size != os.path.getsize(file_to_upload):
83 # if the sizes match, then compare by MD5
85 with open(file_to_upload, 'rb') as f:
91 if obj.hash != md5.hexdigest():
92 s3url = 's3://' + awsbucket + '/' + obj.name
93 logging.info(' deleting ' + s3url)
94 if not driver.delete_object(obj):
95 logging.warn('Could not delete ' + s3url)
99 logging.debug(' uploading "' + file_to_upload + '"...')
100 extra = {'acl': 'public-read'}
101 if file_to_upload.endswith('.sig'):
102 extra['content_type'] = 'application/pgp-signature'
103 elif file_to_upload.endswith('.asc'):
104 extra['content_type'] = 'application/pgp-signature'
105 logging.info(' uploading ' + os.path.relpath(file_to_upload)
106 + ' to s3://' + awsbucket + '/' + object_name)
107 with open(file_to_upload, 'rb') as iterator:
108 obj = driver.upload_object_via_stream(iterator=iterator,
110 object_name=object_name,
112 # delete the remnants in the bucket, they do not exist locally
114 object_name, obj = objs.popitem()
115 s3url = 's3://' + awsbucket + '/' + object_name
116 if object_name.startswith(upload_dir):
117 logging.warn(' deleting ' + s3url)
118 driver.delete_object(obj)
120 logging.info(' skipping ' + s3url)
123 def update_serverwebroot(serverwebroot, repo_section):
124 # use a checksum comparison for accurate comparisons on different
125 # filesystems, for example, FAT has a low resolution timestamp
126 rsyncargs = ['rsync', '--archive', '--delete-after', '--safe-links']
127 if not options.no_checksum:
128 rsyncargs.append('--checksum')
130 rsyncargs += ['--verbose']
132 rsyncargs += ['--quiet']
133 if options.identity_file is not None:
134 rsyncargs += ['-e', 'ssh -i ' + options.identity_file]
135 if 'identity_file' in config:
136 rsyncargs += ['-e', 'ssh -i ' + config['identity_file']]
137 indexxml = os.path.join(repo_section, 'index.xml')
138 indexjar = os.path.join(repo_section, 'index.jar')
139 # Upload the first time without the index files and delay the deletion as
140 # much as possible, that keeps the repo functional while this update is
141 # running. Then once it is complete, rerun the command again to upload
142 # the index files. Always using the same target with rsync allows for
143 # very strict settings on the receiving server, you can literally specify
144 # the one rsync command that is allowed to run in ~/.ssh/authorized_keys.
145 # (serverwebroot is guaranteed to have a trailing slash in common.py)
146 logging.info('rsyncing ' + repo_section + ' to ' + serverwebroot)
147 if subprocess.call(rsyncargs +
148 ['--exclude', indexxml, '--exclude', indexjar,
149 repo_section, serverwebroot]) != 0:
151 if subprocess.call(rsyncargs + [repo_section, serverwebroot]) != 0:
153 # upload "current version" symlinks if requested
154 if config['make_current_version_link'] and repo_section == 'repo':
156 for f in glob.glob('*.apk') \
157 + glob.glob('*.apk.asc') + glob.glob('*.apk.sig'):
158 if os.path.islink(f):
159 links_to_upload.append(f)
160 if len(links_to_upload) > 0:
161 if subprocess.call(rsyncargs + links_to_upload + [serverwebroot]) != 0:
165 def _local_sync(fromdir, todir):
166 rsyncargs = ['rsync', '--recursive', '--safe-links', '--times', '--perms',
167 '--one-file-system', '--delete', '--chmod=Da+rx,Fa-x,a+r,u+w']
168 # use stricter rsync checking on all files since people using offline mode
169 # are already prioritizing security above ease and speed
170 if not options.no_checksum:
171 rsyncargs.append('--checksum')
173 rsyncargs += ['--verbose']
175 rsyncargs += ['--quiet']
176 logging.debug(' '.join(rsyncargs + [fromdir, todir]))
177 if subprocess.call(rsyncargs + [fromdir, todir]) != 0:
181 def sync_from_localcopy(repo_section, local_copy_dir):
182 logging.info('Syncing from local_copy_dir to this repo.')
183 # trailing slashes have a meaning in rsync which is not needed here, so
184 # make sure both paths have exactly one trailing slash
185 _local_sync(os.path.join(local_copy_dir, repo_section).rstrip('/') + '/',
186 repo_section.rstrip('/') + '/')
189 def update_localcopy(repo_section, local_copy_dir):
190 # local_copy_dir is guaranteed to have a trailing slash in main() below
191 _local_sync(repo_section, local_copy_dir)
195 global config, options
197 # Parse command line...
198 parser = ArgumentParser()
199 parser.add_argument("command", help="command to execute, either 'init' or 'update'")
200 parser.add_argument("-i", "--identity-file", default=None,
201 help="Specify an identity file to provide to SSH for rsyncing")
202 parser.add_argument("--local-copy-dir", default=None,
203 help="Specify a local folder to sync the repo to")
204 parser.add_argument("--sync-from-local-copy-dir", action="store_true", default=False,
205 help="Before uploading to servers, sync from local copy dir")
206 parser.add_argument("-v", "--verbose", action="store_true", default=False,
207 help="Spew out even more information than normal")
208 parser.add_argument("-q", "--quiet", action="store_true", default=False,
209 help="Restrict output to warnings and errors")
210 parser.add_argument("--no-checksum", action="store_true", default=False,
211 help="Don't use rsync checksums")
212 options = parser.parse_args()
214 config = common.read_config(options)
216 if options.command != 'init' and options.command != 'update':
217 logging.critical("The only commands currently supported are 'init' and 'update'")
220 if config.get('nonstandardwebroot') is True:
221 standardwebroot = False
223 standardwebroot = True
225 for serverwebroot in config.get('serverwebroot', []):
226 # this supports both an ssh host:path and just a path
227 s = serverwebroot.rstrip('/').split(':')
233 logging.error('Malformed serverwebroot line: ' + serverwebroot)
235 repobase = os.path.basename(fdroiddir)
236 if standardwebroot and repobase != 'fdroid':
237 logging.error('serverwebroot path does not end with "fdroid", '
238 + 'perhaps you meant one of these:\n\t'
239 + serverwebroot.rstrip('/') + '/fdroid\n\t'
240 + serverwebroot.rstrip('/').rstrip(repobase) + 'fdroid')
243 if options.local_copy_dir is not None:
244 local_copy_dir = options.local_copy_dir
245 elif config.get('local_copy_dir'):
246 local_copy_dir = config['local_copy_dir']
248 local_copy_dir = None
249 if local_copy_dir is not None:
250 fdroiddir = local_copy_dir.rstrip('/')
251 if os.path.exists(fdroiddir) and not os.path.isdir(fdroiddir):
252 logging.error('local_copy_dir must be directory, not a file!')
254 if not os.path.exists(os.path.dirname(fdroiddir)):
255 logging.error('The root dir for local_copy_dir "'
256 + os.path.dirname(fdroiddir)
257 + '" does not exist!')
259 if not os.path.isabs(fdroiddir):
260 logging.error('local_copy_dir must be an absolute path!')
262 repobase = os.path.basename(fdroiddir)
263 if standardwebroot and repobase != 'fdroid':
264 logging.error('local_copy_dir does not end with "fdroid", '
265 + 'perhaps you meant: ' + fdroiddir + '/fdroid')
267 if local_copy_dir[-1] != '/':
268 local_copy_dir += '/'
269 local_copy_dir = local_copy_dir.replace('//', '/')
270 if not os.path.exists(fdroiddir):
273 if not config.get('awsbucket') \
274 and not config.get('serverwebroot') \
275 and local_copy_dir is None:
276 logging.warn('No serverwebroot, local_copy_dir, or awsbucket set!'
277 + 'Edit your config.py to set at least one.')
280 repo_sections = ['repo']
281 if config['archive_older'] != 0:
282 repo_sections.append('archive')
283 if not os.path.exists('archive'):
285 if config['per_app_repos']:
286 repo_sections += common.get_per_app_repos()
288 if options.command == 'init':
289 ssh = paramiko.SSHClient()
290 ssh.load_system_host_keys()
291 for serverwebroot in config.get('serverwebroot', []):
292 sshstr, remotepath = serverwebroot.rstrip('/').split(':')
293 if sshstr.find('@') >= 0:
294 username, hostname = sshstr.split('@')
296 username = pwd.getpwuid(os.getuid())[0] # get effective uid
298 ssh.connect(hostname, username=username)
299 sftp = ssh.open_sftp()
300 if os.path.basename(remotepath) \
301 not in sftp.listdir(os.path.dirname(remotepath)):
302 sftp.mkdir(remotepath, mode=0755)
303 for repo_section in repo_sections:
304 repo_path = os.path.join(remotepath, repo_section)
305 if os.path.basename(repo_path) \
306 not in sftp.listdir(remotepath):
307 sftp.mkdir(repo_path, mode=0755)
310 elif options.command == 'update':
311 for repo_section in repo_sections:
312 if local_copy_dir is not None:
313 if config['sync_from_local_copy_dir'] and os.path.exists(repo_section):
314 sync_from_localcopy(repo_section, local_copy_dir)
316 update_localcopy(repo_section, local_copy_dir)
317 for serverwebroot in config.get('serverwebroot', []):
318 update_serverwebroot(serverwebroot, repo_section)
319 if config.get('awsbucket'):
320 update_awsbucket(repo_section)
324 if __name__ == "__main__":