2 # -*- coding: utf-8 -*-
4 # server.py - part of the FDroid server tools
5 # Copyright (C) 2010-13, Ciaran Gultnieks, ciaran@ciarang.com
7 # This program is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU Affero General Public License as published by
9 # the Free Software Foundation, either version 3 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU Affero General Public License for more details.
17 # You should have received a copy of the GNU Affero General Public License
18 # along with this program. If not, see <http://www.gnu.org/licenses/>.
26 from optparse import OptionParser
34 def update_awsbucket(repo_section):
36 Upload the contents of the directory `repo_section` (including
37 subdirectories) to the AWS S3 "bucket". The contents of that subdir of the
38 bucket will first be deleted.
40 Requires AWS credentials set in config.py: awsaccesskeyid, awssecretkey
43 import libcloud.security
44 libcloud.security.VERIFY_SSL_CERT = True
45 from libcloud.storage.types import Provider, ContainerDoesNotExistError
46 from libcloud.storage.providers import get_driver
48 if not config.get('awsaccesskeyid') or not config.get('awssecretkey'):
49 logging.error('To use awsbucket, you must set awssecretkey and awsaccesskeyid in config.py!')
51 awsbucket = config['awsbucket']
53 cls = get_driver(Provider.S3)
54 driver = cls(config['awsaccesskeyid'], config['awssecretkey'])
56 container = driver.get_container(container_name=awsbucket)
57 except ContainerDoesNotExistError:
58 container = driver.create_container(container_name=awsbucket)
59 logging.info('Created new container "' + container.name + '"')
61 upload_dir = 'fdroid/' + repo_section
63 for obj in container.list_objects():
64 if obj.name.startswith(upload_dir + '/'):
67 for root, _, files in os.walk(os.path.join(os.getcwd(), repo_section)):
70 file_to_upload = os.path.join(root, name)
71 object_name = 'fdroid/' + os.path.relpath(file_to_upload, os.getcwd())
72 if object_name not in objs:
75 obj = objs.pop(object_name)
76 if obj.size != os.path.getsize(file_to_upload):
79 # if the sizes match, then compare by MD5
81 with open(file_to_upload, 'rb') as f:
87 if obj.hash != md5.hexdigest():
88 s3url = 's3://' + awsbucket + '/' + obj.name
89 logging.info(' deleting ' + s3url)
90 if not driver.delete_object(obj):
91 logging.warn('Could not delete ' + s3url)
95 logging.debug(' uploading "' + file_to_upload + '"...')
96 extra = {'acl': 'public-read'}
97 if file_to_upload.endswith('.sig'):
98 extra['content_type'] = 'application/pgp-signature'
99 elif file_to_upload.endswith('.asc'):
100 extra['content_type'] = 'application/pgp-signature'
101 logging.info(' uploading ' + os.path.relpath(file_to_upload)
102 + ' to s3://' + awsbucket + '/' + object_name)
103 obj = driver.upload_object(file_path=file_to_upload,
105 object_name=object_name,
108 # delete the remnants in the bucket, they do not exist locally
110 object_name, obj = objs.popitem()
111 s3url = 's3://' + awsbucket + '/' + object_name
112 if object_name.startswith(upload_dir):
113 logging.warn(' deleting ' + s3url)
114 driver.delete_object(obj)
116 logging.info(' skipping ' + s3url)
119 def update_serverwebroot(serverwebroot, repo_section):
120 rsyncargs = ['rsync', '--archive', '--delete']
122 rsyncargs += ['--verbose']
124 rsyncargs += ['--quiet']
125 if options.identity_file is not None:
126 rsyncargs += ['-e', 'ssh -i ' + options.identity_file]
127 if 'identity_file' in config:
128 rsyncargs += ['-e', 'ssh -i ' + config['identity_file']]
129 indexxml = os.path.join(repo_section, 'index.xml')
130 indexjar = os.path.join(repo_section, 'index.jar')
131 # serverwebroot is guaranteed to have a trailing slash in common.py
132 if subprocess.call(rsyncargs +
133 ['--exclude', indexxml, '--exclude', indexjar,
134 repo_section, serverwebroot]) != 0:
136 # use stricter checking on the indexes since they provide the signature
137 rsyncargs += ['--checksum']
138 sectionpath = serverwebroot + repo_section
139 if subprocess.call(rsyncargs + [indexxml, sectionpath]) != 0:
141 if subprocess.call(rsyncargs + [indexjar, sectionpath]) != 0:
145 def _local_sync(fromdir, todir):
146 rsyncargs = ['rsync', '--recursive', '--links', '--times', '--perms',
147 '--one-file-system', '--delete', '--chmod=Da+rx,Fa-x,a+r,u+w']
148 # use stricter rsync checking on all files since people using offline mode
149 # are already prioritizing security above ease and speed
150 rsyncargs += ['--checksum']
152 rsyncargs += ['--verbose']
154 rsyncargs += ['--quiet']
155 logging.debug(' '.join(rsyncargs + [fromdir, todir]))
156 if subprocess.call(rsyncargs + [fromdir, todir]) != 0:
160 def sync_from_localcopy(repo_section, local_copy_dir):
161 logging.info('Syncing from local_copy_dir to this repo.')
162 # trailing slashes have a meaning in rsync which is not needed here, so
163 # make sure both paths have exactly one trailing slash
164 _local_sync(os.path.join(local_copy_dir, repo_section).rstrip('/') + '/',
165 repo_section.rstrip('/') + '/')
168 def update_localcopy(repo_section, local_copy_dir):
169 # local_copy_dir is guaranteed to have a trailing slash in main() below
170 _local_sync(repo_section, local_copy_dir)
174 global config, options
176 # Parse command line...
177 parser = OptionParser()
178 parser.add_option("-i", "--identity-file", default=None,
179 help="Specify an identity file to provide to SSH for rsyncing")
180 parser.add_option("--local-copy-dir", default=None,
181 help="Specify a local folder to sync the repo to")
182 parser.add_option("--sync-from-local-copy-dir", action="store_true", default=False,
183 help="Before uploading to servers, sync from local copy dir")
184 parser.add_option("-v", "--verbose", action="store_true", default=False,
185 help="Spew out even more information than normal")
186 parser.add_option("-q", "--quiet", action="store_true", default=False,
187 help="Restrict output to warnings and errors")
188 (options, args) = parser.parse_args()
190 config = common.read_config(options)
193 logging.critical("Specify a single command")
196 if args[0] != 'init' and args[0] != 'update':
197 logging.critical("The only commands currently supported are 'init' and 'update'")
200 if config.get('nonstandardwebroot') is True:
201 standardwebroot = False
203 standardwebroot = True
205 for serverwebroot in config.get('serverwebroot', []):
206 host, fdroiddir = serverwebroot.rstrip('/').split(':')
207 repobase = os.path.basename(fdroiddir)
208 if standardwebroot and repobase != 'fdroid':
209 logging.error('serverwebroot path does not end with "fdroid", '
210 + 'perhaps you meant one of these:\n\t'
211 + serverwebroot.rstrip('/') + '/fdroid\n\t'
212 + serverwebroot.rstrip('/').rstrip(repobase) + 'fdroid')
215 if options.local_copy_dir is not None:
216 local_copy_dir = options.local_copy_dir
217 elif config.get('local_copy_dir'):
218 local_copy_dir = config['local_copy_dir']
220 local_copy_dir = None
221 if local_copy_dir is not None:
222 fdroiddir = local_copy_dir.rstrip('/')
223 if os.path.exists(fdroiddir) and not os.path.isdir(fdroiddir):
224 logging.error('local_copy_dir must be directory, not a file!')
226 if not os.path.exists(os.path.dirname(fdroiddir)):
227 logging.error('The root dir for local_copy_dir "'
228 + os.path.dirname(fdroiddir)
229 + '" does not exist!')
231 if not os.path.isabs(fdroiddir):
232 logging.error('local_copy_dir must be an absolute path!')
234 repobase = os.path.basename(fdroiddir)
235 if standardwebroot and repobase != 'fdroid':
236 logging.error('local_copy_dir does not end with "fdroid", '
237 + 'perhaps you meant: ' + fdroiddir + '/fdroid')
239 if local_copy_dir[-1] != '/':
240 local_copy_dir += '/'
241 local_copy_dir = local_copy_dir.replace('//', '/')
242 if not os.path.exists(fdroiddir):
245 if not config.get('awsbucket') \
246 and not config.get('serverwebroot') \
247 and local_copy_dir is None:
248 logging.warn('No serverwebroot, local_copy_dir, or awsbucket set!'
249 + 'Edit your config.py to set at least one.')
252 repo_sections = ['repo']
253 if config['archive_older'] != 0:
254 repo_sections.append('archive')
255 if not os.path.exists('archive'):
258 if args[0] == 'init':
259 ssh = paramiko.SSHClient()
260 ssh.load_system_host_keys()
261 for serverwebroot in config.get('serverwebroot', []):
262 sshstr, remotepath = serverwebroot.rstrip('/').split(':')
263 if sshstr.find('@') >= 0:
264 username, hostname = sshstr.split('@')
266 username = pwd.getpwuid(os.getuid())[0] # get effective uid
268 ssh.connect(hostname, username=username)
269 sftp = ssh.open_sftp()
270 if os.path.basename(remotepath) \
271 not in sftp.listdir(os.path.dirname(remotepath)):
272 sftp.mkdir(remotepath, mode=0755)
273 for repo_section in repo_sections:
274 repo_path = os.path.join(remotepath, repo_section)
275 if os.path.basename(repo_path) \
276 not in sftp.listdir(remotepath):
277 sftp.mkdir(repo_path, mode=0755)
280 elif args[0] == 'update':
281 for repo_section in repo_sections:
282 if local_copy_dir is not None:
283 if config['sync_from_local_copy_dir'] and os.path.exists(repo_section):
284 sync_from_localcopy(repo_section, local_copy_dir)
286 update_localcopy(repo_section, local_copy_dir)
287 for serverwebroot in config.get('serverwebroot', []):
288 update_serverwebroot(serverwebroot, repo_section)
289 if config.get('awsbucket'):
290 update_awsbucket(repo_section)
294 if __name__ == "__main__":