Package zeroinstall :: Package zerostore
[frames] | no frames]

Source Code for Package zeroinstall.zerostore

  1  """ 
  2  Code for managing the implementation cache. 
  3  """ 
  4   
  5  # Copyright (C) 2009, Thomas Leonard 
  6  # See the README file for details, or visit http://0install.net. 
  7   
  8  from __future__ import print_function 
  9   
 10  from zeroinstall import _, logger 
 11  import os 
 12   
 13  from zeroinstall.support import basedir 
 14  from zeroinstall import SafeException, support 
 15   
16 -class BadDigest(SafeException):
17 """Thrown if a digest is invalid (either syntactically or cryptographically).""" 18 detail = None
19
20 -class NotStored(SafeException):
21 """Throws if a requested implementation isn't in the cache."""
22
23 -class NonwritableStore(SafeException):
24 """Attempt to add to a non-writable store directory."""
25
26 -def _copytree2(src, dst):
27 """@type src: str 28 @type dst: str""" 29 import shutil 30 names = os.listdir(src) 31 assert os.path.isdir(dst) 32 for name in names: 33 srcname = os.path.join(src, name) 34 dstname = os.path.join(dst, name) 35 if os.path.islink(srcname): 36 linkto = os.readlink(srcname) 37 os.symlink(linkto, dstname) 38 elif os.path.isdir(srcname): 39 os.mkdir(dstname) 40 mtime = int(os.lstat(srcname).st_mtime) 41 _copytree2(srcname, dstname) 42 os.utime(dstname, (mtime, mtime)) 43 else: 44 shutil.copy2(srcname, dstname)
45
46 -def _validate_pair(value):
47 """@type value: str""" 48 if '/' in value or \ 49 '\\' in value or \ 50 value.startswith('.'): 51 raise BadDigest("Invalid digest '{value}'".format(value = value))
52
53 -def parse_algorithm_digest_pair(src):
54 """Break apart an algorithm/digest into in a tuple. 55 Old algorithms use '=' as the separator, while newer ones use '_'. 56 @param src: the combined string 57 @type src: str 58 @return: the parsed values 59 @rtype: (str, str) 60 @raise BadDigest: if it can't be parsed 61 @since: 1.10""" 62 _validate_pair(src) 63 if src.startswith('sha1=') or src.startswith('sha1new=') or src.startswith('sha256='): 64 return src.split('=', 1) 65 result = src.split('_', 1) 66 if len(result) != 2: 67 if '=' in src: 68 raise BadDigest("Use '_' not '=' for new algorithms, in {src}".format(src = src)) 69 raise BadDigest("Can't parse digest {src}".format(src = src)) 70 return result
71
72 -def format_algorithm_digest_pair(alg, digest):
73 """The opposite of L{parse_algorithm_digest_pair}. 74 The result is suitable for use as a directory name (does not contain '/' characters). 75 @type alg: str 76 @type digest: str 77 @rtype: str 78 @raise BadDigest: if the result is invalid 79 @since: 1.10""" 80 if alg in ('sha1', 'sha1new', 'sha256'): 81 result = alg + '=' + digest 82 else: 83 result = alg + '_' + digest 84 _validate_pair(result) 85 return result
86
87 -class Store(object):
88 """A directory for storing implementations.""" 89
90 - def __init__(self, dir, public = False):
91 """Create a new Store. 92 @param dir: directory to contain the implementations 93 @type dir: str 94 @param public: deprecated 95 @type public: bool""" 96 self.dir = dir 97 self.dry_run_names = set()
98
99 - def __str__(self):
100 return _("Store '%s'") % self.dir
101
102 - def lookup(self, digest):
103 """@type digest: str 104 @rtype: str""" 105 alg, value = parse_algorithm_digest_pair(digest) 106 dir = os.path.join(self.dir, digest) 107 if os.path.isdir(dir) or digest in self.dry_run_names: 108 return dir 109 return None
110
111 - def get_tmp_dir_for(self, required_digest):
112 """Create a temporary directory in the directory where we would store an implementation 113 with the given digest. This is used to setup a new implementation before being renamed if 114 it turns out OK. 115 @type required_digest: str 116 @rtype: str 117 @raise NonwritableStore: if we can't create it""" 118 try: 119 if not os.path.isdir(self.dir): 120 os.makedirs(self.dir) 121 from tempfile import mkdtemp 122 tmp = mkdtemp(dir = self.dir, prefix = 'tmp-') 123 os.chmod(tmp, 0o755) # r-x for all; needed by 0store-helper 124 return tmp 125 except OSError as ex: 126 raise NonwritableStore(str(ex))
127
128 - def add_archive_to_cache(self, required_digest, data, url, extract = None, type = None, start_offset = 0, try_helper = False, dry_run = False):
129 """@type required_digest: str 130 @type data: file 131 @type url: str 132 @type extract: str | None 133 @type type: str | None 134 @type start_offset: int 135 @type try_helper: bool 136 @type dry_run: bool""" 137 from . import unpack 138 139 if self.lookup(required_digest): 140 logger.info(_("Not adding %s as it already exists!"), required_digest) 141 return 142 143 tmp = self.get_tmp_dir_for(required_digest) 144 try: 145 unpack.unpack_archive(url, data, tmp, extract, type = type, start_offset = start_offset) 146 except: 147 import shutil 148 shutil.rmtree(tmp) 149 raise 150 151 try: 152 self.check_manifest_and_rename(required_digest, tmp, extract, try_helper = try_helper, dry_run = dry_run) 153 except Exception: 154 #warn(_("Leaving extracted directory as %s"), tmp) 155 support.ro_rmtree(tmp) 156 raise
157
158 - def add_dir_to_cache(self, required_digest, path, try_helper = False, dry_run = False):
159 """Copy the contents of path to the cache. 160 @param required_digest: the expected digest 161 @type required_digest: str 162 @param path: the root of the tree to copy 163 @type path: str 164 @param try_helper: attempt to use privileged helper before user cache (since 0.26) 165 @type try_helper: bool 166 @type dry_run: bool 167 @raise BadDigest: if the contents don't match the given digest.""" 168 if self.lookup(required_digest): 169 logger.info(_("Not adding %s as it already exists!"), required_digest) 170 return 171 172 tmp = self.get_tmp_dir_for(required_digest) 173 try: 174 _copytree2(path, tmp) 175 self.check_manifest_and_rename(required_digest, tmp, try_helper = try_helper, dry_run = dry_run) 176 except: 177 logger.warning(_("Error importing directory.")) 178 logger.warning(_("Deleting %s"), tmp) 179 support.ro_rmtree(tmp) 180 raise
181
182 - def _add_with_helper(self, required_digest, path, dry_run):
183 """Use 0store-secure-add to copy 'path' to the system store. 184 @param required_digest: the digest for path 185 @type required_digest: str 186 @param path: root of implementation directory structure 187 @type path: str 188 @return: True iff the directory was copied into the system cache successfully""" 189 if required_digest.startswith('sha1='): 190 return False # Old digest alg not supported 191 if os.environ.get('ZEROINSTALL_PORTABLE_BASE'): 192 return False # Can't use helper with portable mode 193 helper = support.find_in_path('0store-secure-add-helper') 194 if not helper: 195 logger.info(_("'0store-secure-add-helper' command not found. Not adding to system cache.")) 196 return False 197 if dry_run: 198 print(_("[dry-run] would use {helper} to store {required_digest} in system store").format( 199 helper = helper, 200 required_digest = required_digest)) 201 self.dry_run_names.add(required_digest) 202 return True 203 import subprocess 204 env = os.environ.copy() 205 env['ENV_NOT_CLEARED'] = 'Unclean' # (warn about insecure configurations) 206 env['HOME'] = 'Unclean' # (warn about insecure configurations) 207 dev_null = os.open(os.devnull, os.O_RDONLY) 208 try: 209 logger.info(_("Trying to add to system cache using %s"), helper) 210 child = subprocess.Popen([helper, required_digest], 211 stdin = dev_null, 212 cwd = path, 213 env = env) 214 exit_code = child.wait() 215 finally: 216 os.close(dev_null) 217 218 if exit_code: 219 logger.warning(_("0store-secure-add-helper failed.")) 220 return False 221 222 logger.info(_("Added succcessfully.")) 223 return True
224
225 - def check_manifest_and_rename(self, required_digest, tmp, extract = None, try_helper = False, dry_run = False):
226 """Check that tmp[/extract] has the required_digest. 227 On success, rename the checked directory to the digest, and 228 make the whole tree read-only. 229 @type required_digest: str 230 @type tmp: str 231 @type extract: str | None 232 @param try_helper: attempt to use privileged helper to import to system cache first (since 0.26) 233 @type try_helper: bool 234 @param dry_run: just print what we would do to stdout (and delete tmp) 235 @type dry_run: bool 236 @raise BadDigest: if the input directory doesn't match the given digest""" 237 if extract: 238 extracted = os.path.join(tmp, extract) 239 if not os.path.isdir(extracted): 240 raise Exception(_('Directory %s not found in archive') % extract) 241 else: 242 extracted = tmp 243 244 from . import manifest 245 246 manifest.fixup_permissions(extracted) 247 248 alg, required_value = manifest.splitID(required_digest) 249 actual_digest = alg.getID(manifest.add_manifest_file(extracted, alg)) 250 if actual_digest != required_digest: 251 raise BadDigest(_('Incorrect manifest -- archive is corrupted.\n' 252 'Required digest: %(required_digest)s\n' 253 'Actual digest: %(actual_digest)s\n') % 254 {'required_digest': required_digest, 'actual_digest': actual_digest}) 255 256 if try_helper: 257 if self._add_with_helper(required_digest, extracted, dry_run = dry_run): 258 support.ro_rmtree(tmp) 259 return 260 logger.info(_("Can't add to system store. Trying user store instead.")) 261 262 logger.info(_("Caching new implementation (digest %s) in %s"), required_digest, self.dir) 263 264 final_name = os.path.join(self.dir, required_digest) 265 if os.path.isdir(final_name): 266 logger.warning(_("Item %s already stored.") % final_name) # not really an error 267 return 268 269 if dry_run: 270 print(_("[dry-run] would store implementation as {path}").format(path = final_name)) 271 self.dry_run_names.add(required_digest) 272 support.ro_rmtree(tmp) 273 return 274 else: 275 # If we just want a subdirectory then the rename will change 276 # extracted/.. and so we'll need write permission on 'extracted' 277 278 os.chmod(extracted, 0o755) 279 os.rename(extracted, final_name) 280 os.chmod(final_name, 0o555) 281 282 if extract: 283 os.rmdir(tmp)
284
285 - def __repr__(self):
286 return "<store: %s>" % self.dir
287
288 -class Stores(object):
289 """A list of L{Store}s. All stores are searched when looking for an implementation. 290 When storing, we use the first of the system caches (if writable), or the user's 291 cache otherwise.""" 292 __slots__ = ['stores'] 293
294 - def __init__(self):
295 # Always add the user cache to have a reliable fallback location for storage 296 user_store = os.path.join(basedir.xdg_cache_home, '0install.net', 'implementations') 297 self.stores = [Store(user_store)] 298 299 # Add custom cache locations 300 dirs = [] 301 for impl_dirs in basedir.load_config_paths('0install.net', 'injector', 'implementation-dirs'): 302 with open(impl_dirs, 'rt') as stream: 303 dirs.extend(stream.readlines()) 304 for directory in dirs: 305 directory = directory.strip() 306 if directory and not directory.startswith('#'): 307 logger.debug(_("Added system store '%s'"), directory) 308 self.stores.append(Store(directory)) 309 310 # Add the system cache when not in portable mode 311 if not os.environ.get('ZEROINSTALL_PORTABLE_BASE'): 312 if os.name == "nt": 313 from win32com.shell import shell, shellcon 314 commonAppData = shell.SHGetFolderPath(0, shellcon.CSIDL_COMMON_APPDATA, 0, 0) 315 systemCachePath = os.path.join(commonAppData, "0install.net", "implementations") 316 # Only use shared cache location on Windows if it was explicitly created 317 if os.path.isdir(systemCachePath): 318 self.stores.append(Store(systemCachePath)) 319 else: 320 self.stores.append(Store('/var/cache/0install.net/implementations'))
321
322 - def lookup(self, digest):
323 """@type digest: str 324 @rtype: str 325 @deprecated: use lookup_any instead""" 326 return self.lookup_any([digest])
327
328 - def lookup_any(self, digests):
329 """Search for digest in all stores. 330 @type digests: [str] 331 @rtype: str 332 @raises NotStored: if not found""" 333 path = self.lookup_maybe(digests) 334 if path: 335 return path 336 raise NotStored(_("Item with digests '%(digests)s' not found in stores. Searched:\n- %(stores)s") % 337 {'digests': digests, 'stores': '\n- '.join([s.dir for s in self.stores])})
338
339 - def lookup_maybe(self, digests):
340 """Like lookup_any, but return None if it isn't found. 341 @type digests: [str] 342 @rtype: str | None 343 @since: 0.53""" 344 assert digests 345 for digest in digests: 346 assert digest 347 _validate_pair(digest) 348 for store in self.stores: 349 path = store.lookup(digest) 350 if path: 351 return path 352 return None
353
354 - def add_dir_to_cache(self, required_digest, dir, dry_run = False):
355 """Add to the best writable cache. 356 @type required_digest: str 357 @type dir: str 358 @type dry_run: bool 359 @see: L{Store.add_dir_to_cache}""" 360 self._write_store(lambda store, **kwargs: store.add_dir_to_cache(required_digest, dir, dry_run = dry_run, **kwargs))
361
362 - def add_archive_to_cache(self, required_digest, data, url, extract = None, type = None, start_offset = 0, dry_run = False):
363 """Add to the best writable cache. 364 @type required_digest: str 365 @type data: file 366 @type url: str 367 @type extract: str | None 368 @type type: str | None 369 @type start_offset: int 370 @type dry_run: bool 371 @see: L{Store.add_archive_to_cache}""" 372 self._write_store(lambda store, **kwargs: store.add_archive_to_cache(required_digest, 373 data, url, extract, type = type, start_offset = start_offset, dry_run = dry_run, **kwargs))
374
375 - def _write_store(self, fn):
376 """Call fn(first_system_store). If it's read-only, try again with the user store.""" 377 if len(self.stores) > 1: 378 try: 379 fn(self.get_first_system_store()) 380 return 381 except NonwritableStore: 382 logger.debug(_("%s not-writable. Trying helper instead."), self.get_first_system_store()) 383 pass 384 fn(self.stores[0], try_helper = True)
385
386 - def get_first_system_store(self):
387 """The first system store is the one we try writing to first. 388 @rtype: L{Store} 389 @since: 0.30""" 390 try: 391 return self.stores[1] 392 except IndexError: 393 raise SafeException(_("No system stores have been configured"))
394