Package zeroinstall :: Package injector :: Module fetch
[frames] | no frames]

Source Code for Module zeroinstall.injector.fetch

  1  """ 
  2  Downloads feeds, keys, packages and icons. 
  3  """ 
  4   
  5  # Copyright (C) 2009, Thomas Leonard 
  6  # See the README file for details, or visit http://0install.net. 
  7   
  8  from zeroinstall import _, logger 
  9  import os, sys 
 10   
 11  from zeroinstall import support 
 12  from zeroinstall.support import tasks, basedir, portable_rename 
 13  from zeroinstall.injector.namespaces import XMLNS_IFACE, config_site 
 14  from zeroinstall.injector import model 
 15  from zeroinstall.injector.model import DownloadSource, Recipe, SafeException, escape, DistributionSource 
 16  from zeroinstall.injector.iface_cache import PendingFeed, ReplayAttack 
 17  from zeroinstall.injector.handler import NoTrustedKeys 
 18  from zeroinstall.injector import download 
19 20 -def _escape_slashes(path):
21 """@type path: str 22 @rtype: str""" 23 return path.replace('/', '%23')
24
25 -def _get_feed_dir(feed):
26 """The algorithm from 0mirror. 27 @type feed: str 28 @rtype: str""" 29 if '#' in feed: 30 raise SafeException(_("Invalid URL '%s'") % feed) 31 scheme, rest = feed.split('://', 1) 32 assert '/' in rest, "Missing / in %s" % feed 33 domain, rest = rest.split('/', 1) 34 for x in [scheme, domain, rest]: 35 if not x or x.startswith('.'): 36 raise SafeException(_("Invalid URL '%s'") % feed) 37 return '/'.join(['feeds', scheme, domain, _escape_slashes(rest)])
38
39 -class KeyInfoFetcher(object):
40 """Fetches information about a GPG key from a key-info server. 41 See L{Fetcher.fetch_key_info} for details. 42 @since: 0.42 43 44 Example: 45 46 >>> kf = KeyInfoFetcher(fetcher, 'https://server', fingerprint) 47 >>> while True: 48 print kf.info 49 if kf.blocker is None: break 50 print kf.status 51 yield kf.blocker 52 """
53 - def __init__(self, fetcher, server, fingerprint):
54 """@type fetcher: L{Fetcher} 55 @type server: str 56 @type fingerprint: str""" 57 self.fingerprint = fingerprint 58 self.info = [] 59 self.blocker = None 60 61 if server is None: return 62 63 self.status = _('Fetching key information from %s...') % server 64 65 dl = fetcher.download_url(server + '/key/' + fingerprint) 66 67 from xml.dom import minidom 68 69 @tasks.async 70 def fetch_key_info(): 71 tempfile = dl.tempfile 72 try: 73 yield dl.downloaded 74 self.blocker = None 75 tasks.check(dl.downloaded) 76 tempfile.seek(0) 77 doc = minidom.parse(tempfile) 78 if doc.documentElement.localName != 'key-lookup': 79 raise SafeException(_('Expected <key-lookup>, not <%s>') % doc.documentElement.localName) 80 self.info += doc.documentElement.childNodes 81 except Exception as ex: 82 doc = minidom.parseString('<item vote="bad"/>') 83 root = doc.documentElement 84 root.appendChild(doc.createTextNode(_('Error getting key information: %s') % ex)) 85 self.info.append(root) 86 finally: 87 tempfile.close()
88 89 self.blocker = fetch_key_info()
90
91 -class Fetcher(object):
92 """Downloads and stores various things. 93 @ivar config: used to get handler, iface_cache and stores 94 @type config: L{config.Config} 95 @ivar key_info: caches information about GPG keys 96 @type key_info: {str: L{KeyInfoFetcher}} 97 """ 98 __slots__ = ['config', 'key_info', '_scheduler', 'external_store'] 99
100 - def __init__(self, config):
101 """@type config: L{zeroinstall.injector.config.Config}""" 102 assert config.handler, "API change!" 103 self.config = config 104 self.key_info = {} 105 self._scheduler = None 106 self.external_store = os.environ.get('ZEROINSTALL_EXTERNAL_STORE')
107 108 @property
109 - def handler(self):
110 return self.config.handler
111 112 @property
113 - def scheduler(self):
114 if self._scheduler is None: 115 from . import scheduler 116 self._scheduler = scheduler.DownloadScheduler() 117 return self._scheduler
118 119 # (force is deprecated and ignored) 120 @tasks.async
121 - def cook(self, required_digest, recipe, stores, force = False, impl_hint = None, dry_run = False, may_use_mirror = True):
122 """Follow a Recipe. 123 @type required_digest: str 124 @type recipe: L{Recipe} 125 @type stores: L{zeroinstall.zerostore.Stores} 126 @type force: bool 127 @param impl_hint: the Implementation this is for (if any) as a hint for the GUI 128 @type dry_run: bool 129 @type may_use_mirror: bool 130 @see: L{download_impl} uses this method when appropriate""" 131 # Maybe we're taking this metaphor too far? 132 133 # Start a download for each ingredient 134 blockers = [] 135 steps = [] 136 try: 137 for stepdata in recipe.steps: 138 cls = StepRunner.class_for(stepdata) 139 step = cls(stepdata, impl_hint = impl_hint, may_use_mirror = may_use_mirror) 140 step.prepare(self, blockers) 141 steps.append(step) 142 143 while blockers: 144 yield blockers 145 tasks.check(blockers) 146 blockers = [b for b in blockers if not b.happened] 147 148 if self.external_store: 149 # Note: external_store will not yet work with non-<archive> steps. 150 streams = [step.stream for step in steps] 151 self._add_to_external_store(required_digest, recipe.steps, streams) 152 else: 153 # Create an empty directory for the new implementation 154 store = stores.stores[0] 155 tmpdir = store.get_tmp_dir_for(required_digest) 156 try: 157 # Unpack each of the downloaded archives into it in turn 158 for step in steps: 159 step.apply(tmpdir) 160 # Check that the result is correct and store it in the cache 161 store.check_manifest_and_rename(required_digest, tmpdir, dry_run=dry_run) 162 tmpdir = None 163 finally: 164 # If unpacking fails, remove the temporary directory 165 if tmpdir is not None: 166 support.ro_rmtree(tmpdir) 167 finally: 168 for step in steps: 169 try: 170 step.close() 171 except IOError as ex: 172 # Can get "close() called during 173 # concurrent operation on the same file 174 # object." if we're unlucky (Python problem). 175 logger.info("Failed to close: %s", ex)
176
177 - def _get_mirror_url(self, feed_url, resource):
178 """Return the URL of a mirror for this feed. 179 @type feed_url: str 180 @type resource: str 181 @rtype: str""" 182 if self.config.mirror is None: 183 return None 184 if feed_url.startswith('http://') or feed_url.startswith('https://'): 185 if support.urlparse(feed_url).hostname == 'localhost': 186 return None 187 return '%s/%s/%s' % (self.config.mirror, _get_feed_dir(feed_url), resource) 188 return None
189
190 - def get_feed_mirror(self, url):
191 """Return the URL of a mirror for this feed. 192 @type url: str 193 @rtype: str""" 194 return self._get_mirror_url(url, 'latest.xml')
195
196 - def _get_archive_mirror(self, source):
197 """@type source: L{DownloadSource} 198 @rtype: str""" 199 if self.config.mirror is None: 200 return None 201 if support.urlparse(source.url).hostname == 'localhost': 202 return None 203 if sys.version_info[0] > 2: 204 from urllib.parse import quote 205 else: 206 from urllib import quote 207 return '{mirror}/archive/{archive}'.format( 208 mirror = self.config.mirror, 209 archive = quote(source.url.replace('/', '#'), safe = ''))
210
211 - def _get_impl_mirror(self, impl):
212 """@type impl: L{zeroinstall.injector.model.ZeroInstallImplementation} 213 @rtype: str""" 214 return self._get_mirror_url(impl.feed.url, 'impl/' + _escape_slashes(impl.id))
215 216 @tasks.async
217 - def get_packagekit_feed(self, feed_url):
218 """Send a query to PackageKit (if available) for information about this package. 219 On success, the result is added to iface_cache. 220 @type feed_url: str""" 221 assert feed_url.startswith('distribution:'), feed_url 222 master_feed = self.config.iface_cache.get_feed(feed_url.split(':', 1)[1]) 223 if master_feed: 224 fetch = self.config.iface_cache.distro.fetch_candidates(master_feed) 225 if fetch: 226 yield fetch 227 tasks.check(fetch) 228 229 # Force feed to be regenerated with the new information 230 self.config.iface_cache.get_feed(feed_url, force = True)
231
232 - def download_and_import_feed(self, feed_url, iface_cache = None):
233 """Download the feed, download any required keys, confirm trust if needed and import. 234 @param feed_url: the feed to be downloaded 235 @type feed_url: str 236 @param iface_cache: (deprecated) 237 @type iface_cache: L{zeroinstall.injector.iface_cache.IfaceCache} | None 238 @rtype: L{zeroinstall.support.tasks.Blocker}""" 239 from .download import DownloadAborted 240 241 assert iface_cache is None or iface_cache is self.config.iface_cache 242 243 if not self.config.handler.dry_run: 244 try: 245 self.config.iface_cache.mark_as_checking(feed_url) 246 except OSError as ex: 247 retval = tasks.Blocker("mark_as_checking") 248 retval.trigger(exception = (ex, None)) 249 return retval 250 251 logger.debug(_("download_and_import_feed %(url)s"), {'url': feed_url}) 252 assert not os.path.isabs(feed_url) 253 254 if feed_url.startswith('distribution:'): 255 return self.get_packagekit_feed(feed_url) 256 257 primary = self._download_and_import_feed(feed_url, use_mirror = False) 258 259 @tasks.named_async("monitor feed downloads for " + feed_url) 260 def wait_for_downloads(primary): 261 # Download just the upstream feed, unless it takes too long... 262 timeout = tasks.TimeoutBlocker(5, 'Mirror timeout') # 5 seconds 263 264 yield primary, timeout 265 tasks.check(timeout) 266 267 try: 268 tasks.check(primary) 269 if primary.happened: 270 return # OK, primary succeeded! 271 # OK, maybe it's just being slow... 272 logger.info("Feed download from %s is taking a long time.", feed_url) 273 primary_ex = None 274 except NoTrustedKeys as ex: 275 raise # Don't bother trying the mirror if we have a trust problem 276 except ReplayAttack as ex: 277 raise # Don't bother trying the mirror if we have a replay attack 278 except DownloadAborted as ex: 279 raise # Don't bother trying the mirror if the user cancelled 280 except SafeException as ex: 281 # Primary failed 282 primary = None 283 primary_ex = ex 284 logger.warning(_("Feed download from %(url)s failed: %(exception)s"), {'url': feed_url, 'exception': ex}) 285 286 # Start downloading from mirror... 287 mirror = self._download_and_import_feed(feed_url, use_mirror = True) 288 289 # Wait until both mirror and primary tasks are complete... 290 while True: 291 blockers = list(filter(None, [primary, mirror])) 292 if not blockers: 293 break 294 yield blockers 295 296 if primary: 297 try: 298 tasks.check(primary) 299 if primary.happened: 300 primary = None 301 # No point carrying on with the mirror once the primary has succeeded 302 if mirror: 303 logger.info(_("Primary feed download succeeded; aborting mirror download for %s") % feed_url) 304 mirror.dl.abort() 305 except SafeException as ex: 306 primary = None 307 primary_ex = ex 308 logger.info(_("Feed download from %(url)s failed; still trying mirror: %(exception)s"), {'url': feed_url, 'exception': ex}) 309 310 if mirror: 311 try: 312 tasks.check(mirror) 313 if mirror.happened: 314 mirror = None 315 if primary_ex: 316 # We already warned; no need to raise an exception too, 317 # as the mirror download succeeded. 318 primary_ex = None 319 except ReplayAttack as ex: 320 logger.info(_("Version from mirror is older than cached version; ignoring it: %s"), ex) 321 mirror = None 322 primary_ex = None 323 except SafeException as ex: 324 logger.info(_("Mirror download failed: %s"), ex) 325 mirror = None 326 327 if primary_ex: 328 raise primary_ex
329 330 return wait_for_downloads(primary)
331
332 - def _download_and_import_feed(self, feed_url, use_mirror):
333 """Download and import a feed. 334 @type feed_url: str 335 @param use_mirror: False to use primary location; True to use mirror. 336 @type use_mirror: bool 337 @rtype: L{zeroinstall.support.tasks.Blocker}""" 338 if use_mirror: 339 url = self.get_feed_mirror(feed_url) 340 if url is None: return None 341 logger.info(_("Trying mirror server for feed %s") % feed_url) 342 else: 343 url = feed_url 344 345 if self.config.handler.dry_run: 346 print(_("[dry-run] downloading feed {url}").format(url = url)) 347 dl = self.download_url(url, hint = feed_url) 348 stream = dl.tempfile 349 350 @tasks.named_async("fetch_feed " + url) 351 def fetch_feed(): 352 try: 353 yield dl.downloaded 354 tasks.check(dl.downloaded) 355 356 pending = PendingFeed(feed_url, stream) 357 358 if use_mirror: 359 # If we got the feed from a mirror, get the key from there too 360 key_mirror = self.config.mirror + '/keys/' 361 else: 362 key_mirror = None 363 364 keys_downloaded = tasks.Task(pending.download_keys(self, feed_hint = feed_url, key_mirror = key_mirror), _("download keys for %s") % feed_url) 365 yield keys_downloaded.finished 366 tasks.check(keys_downloaded.finished) 367 368 dry_run = self.handler.dry_run 369 if not self.config.iface_cache.update_feed_if_trusted(pending.url, pending.sigs, pending.new_xml, dry_run = dry_run): 370 blocker = self.config.trust_mgr.confirm_keys(pending) 371 if blocker: 372 yield blocker 373 tasks.check(blocker) 374 if not self.config.iface_cache.update_feed_if_trusted(pending.url, pending.sigs, pending.new_xml, dry_run = dry_run): 375 raise NoTrustedKeys(_("No signing keys trusted; not importing")) 376 finally: 377 stream.close()
378 379 task = fetch_feed() 380 task.dl = dl 381 return task 382
383 - def fetch_key_info(self, fingerprint):
384 """@type fingerprint: str 385 @rtype: L{KeyInfoFetcher}""" 386 try: 387 return self.key_info[fingerprint] 388 except KeyError: 389 if self.config.handler.dry_run: 390 print(_("[dry-run] asking {url} about key {key}").format( 391 url = self.config.key_info_server, 392 key = fingerprint)) 393 self.key_info[fingerprint] = key_info = KeyInfoFetcher(self, 394 self.config.key_info_server, fingerprint) 395 return key_info
396 397 # (force is deprecated and ignored)
398 - def download_impl(self, impl, retrieval_method, stores, force = False):
399 """Download an implementation. 400 @param impl: the selected implementation 401 @type impl: L{model.ZeroInstallImplementation} 402 @param retrieval_method: a way of getting the implementation (e.g. an Archive or a Recipe) 403 @type retrieval_method: L{model.RetrievalMethod} 404 @param stores: where to store the downloaded implementation 405 @type stores: L{zerostore.Stores} 406 @type force: bool 407 @rtype: L{tasks.Blocker}""" 408 assert impl 409 assert retrieval_method 410 411 if isinstance(retrieval_method, DistributionSource): 412 return retrieval_method.install(self.handler) 413 414 from zeroinstall.zerostore import manifest, parse_algorithm_digest_pair 415 best = None 416 for digest in impl.digests: 417 alg_name, digest_value = parse_algorithm_digest_pair(digest) 418 alg = manifest.algorithms.get(alg_name, None) 419 if alg and (best is None or best.rating < alg.rating): 420 best = alg 421 required_digest = digest 422 423 if best is None: 424 if not impl.digests: 425 raise SafeException(_("No <manifest-digest> given for '%(implementation)s' version %(version)s") % 426 {'implementation': impl.feed.get_name(), 'version': impl.get_version()}) 427 raise SafeException(_("Unknown digest algorithms '%(algorithms)s' for '%(implementation)s' version %(version)s") % 428 {'algorithms': impl.digests, 'implementation': impl.feed.get_name(), 'version': impl.get_version()}) 429 430 @tasks.async 431 def download_impl(method): 432 original_exception = None 433 while True: 434 if not isinstance(method, Recipe): 435 # turn an individual method into a single-step Recipe 436 step = method 437 method = Recipe() 438 method.steps.append(step) 439 440 try: 441 blocker = self.cook(required_digest, method, stores, 442 impl_hint = impl, 443 dry_run = self.handler.dry_run, 444 may_use_mirror = original_exception is None) 445 yield blocker 446 tasks.check(blocker) 447 except download.DownloadError as ex: 448 if original_exception: 449 logger.info("Error from mirror: %s", ex) 450 raise original_exception 451 else: 452 original_exception = ex 453 mirror_url = self._get_impl_mirror(impl) 454 if mirror_url is not None: 455 logger.info("%s: trying implementation mirror at %s", ex, mirror_url) 456 method = model.DownloadSource(impl, mirror_url, 457 None, None, type = 'application/x-bzip-compressed-tar') 458 continue # Retry 459 raise 460 except SafeException as ex: 461 raise SafeException("Error fetching {url} {version}: {ex}".format( 462 url = impl.feed.url, 463 version = impl.get_version(), 464 ex = ex)) 465 break 466 467 self.handler.impl_added_to_store(impl)
468 return download_impl(retrieval_method) 469
470 - def _add_to_external_store(self, required_digest, steps, streams):
471 """@type required_digest: str""" 472 from zeroinstall.zerostore.unpack import type_from_url 473 474 # combine archive path, extract directory and MIME type arguments in an alternating fashion 475 paths = map(lambda stream: stream.name, streams) 476 extracts = map(lambda step: step.extract or "", steps) 477 types = map(lambda step: step.type or type_from_url(step.url), steps) 478 args = [None]*(len(paths)+len(extracts)+len(types)) 479 args[::3] = paths 480 args[1::3] = extracts 481 args[2::3] = types 482 483 # close file handles to allow external processes access 484 for stream in streams: 485 stream.close() 486 487 # delegate extracting archives to external tool 488 import subprocess 489 subprocess.call([self.external_store, "add", required_digest] + args) 490 491 # delete temp files 492 for path in paths: 493 os.remove(path)
494 495 # (force is deprecated and ignored)
496 - def download_archive(self, download_source, force = False, impl_hint = None, may_use_mirror = False):
497 """Fetch an archive. You should normally call L{download_impl} 498 instead, since it handles other kinds of retrieval method too. 499 It is the caller's responsibility to ensure that the returned stream is closed. 500 @type download_source: L{DownloadSource} 501 @type force: bool 502 @type may_use_mirror: bool 503 @rtype: (L{Blocker}, file)""" 504 from zeroinstall.zerostore import unpack 505 506 mime_type = download_source.type 507 if not mime_type: 508 mime_type = unpack.type_from_url(download_source.url) 509 if not mime_type: 510 raise SafeException(_("No 'type' attribute on archive, and I can't guess from the name (%s)") % download_source.url) 511 if not self.external_store: 512 unpack.check_type_ok(mime_type) 513 514 if may_use_mirror: 515 mirror = self._get_archive_mirror(download_source) 516 else: 517 mirror = None 518 519 if self.config.handler.dry_run: 520 print(_("[dry-run] downloading archive {url}").format(url = download_source.url)) 521 dl = self.download_url(download_source.url, hint = impl_hint, mirror_url = mirror) 522 if download_source.size is not None: 523 dl.expected_size = download_source.size + (download_source.start_offset or 0) 524 # (else don't know sizes for mirrored archives) 525 return (dl.downloaded, dl.tempfile)
526
527 - def download_file(self, download_source, impl_hint=None):
528 """Fetch a single file. You should normally call L{download_impl} 529 instead, since it handles other kinds of retrieval method too. 530 It is the caller's responsibility to ensure that the returned stream is closed. 531 @type download_source: L{zeroinstall.injector.model.FileSource} 532 @type impl_hint: L{zeroinstall.injector.model.ZeroInstallImplementation} | None 533 @rtype: tuple""" 534 if self.config.handler.dry_run: 535 print(_("[dry-run] downloading file {url}").format(url = download_source.url)) 536 537 dl = self.download_url(download_source.url, hint = impl_hint) 538 dl.expected_size = download_source.size 539 return (dl.downloaded, dl.tempfile)
540 541 # (force is deprecated and ignored)
542 - def download_icon(self, interface, force = False):
543 """Download an icon for this interface and add it to the 544 icon cache. If the interface has no icon do nothing. 545 @type interface: L{zeroinstall.injector.model.Interface} 546 @type force: bool 547 @return: the task doing the import, or None 548 @rtype: L{tasks.Task}""" 549 logger.debug("download_icon %(interface)s", {'interface': interface}) 550 551 modification_time = None 552 existing_icon = self.config.iface_cache.get_icon_path(interface) 553 if existing_icon: 554 file_mtime = os.stat(existing_icon).st_mtime 555 from email.utils import formatdate 556 modification_time = formatdate(timeval = file_mtime, localtime = False, usegmt = True) 557 558 feed = self.config.iface_cache.get_feed(interface.uri) 559 if feed is None: 560 return None 561 562 # Find a suitable icon to download 563 for icon in feed.get_metadata(XMLNS_IFACE, 'icon'): 564 type = icon.getAttribute('type') 565 if type != 'image/png': 566 logger.debug(_('Skipping non-PNG icon')) 567 continue 568 source = icon.getAttribute('href') 569 if source: 570 break 571 logger.warning(_('Missing "href" attribute on <icon> in %s'), interface) 572 else: 573 logger.info(_('No PNG icons found in %s'), interface) 574 return 575 576 dl = self.download_url(source, hint = interface, modification_time = modification_time) 577 578 @tasks.async 579 def download_and_add_icon(): 580 stream = dl.tempfile 581 try: 582 yield dl.downloaded 583 tasks.check(dl.downloaded) 584 if dl.unmodified: return 585 stream.seek(0) 586 587 import shutil, tempfile 588 icons_cache = basedir.save_cache_path(config_site, 'interface_icons') 589 590 tmp_file = tempfile.NamedTemporaryFile(dir = icons_cache, delete = False) 591 shutil.copyfileobj(stream, tmp_file) 592 tmp_file.close() 593 594 icon_file = os.path.join(icons_cache, escape(interface.uri)) 595 portable_rename(tmp_file.name, icon_file) 596 finally: 597 stream.close()
598 599 return download_and_add_icon() 600
601 - def download_impls(self, implementations, stores):
602 """Download the given implementations, choosing a suitable retrieval method for each. 603 If any of the retrieval methods are DistributionSources and 604 need confirmation, handler.confirm is called to check that the 605 installation should proceed. 606 @type implementations: [L{zeroinstall.injector.model.ZeroInstallImplementation}] 607 @type stores: L{zeroinstall.zerostore.Stores} 608 @rtype: L{zeroinstall.support.tasks.Blocker}""" 609 unsafe_impls = [] 610 611 to_download = [] 612 for impl in implementations: 613 logger.debug(_("start_downloading_impls: for %(feed)s get %(implementation)s"), {'feed': impl.feed, 'implementation': impl}) 614 source = self.get_best_source(impl) 615 if not source: 616 raise SafeException(_("Implementation %(implementation_id)s of interface %(interface)s" 617 " cannot be downloaded (no download locations given in " 618 "interface!)") % {'implementation_id': impl.id, 'interface': impl.feed.get_name()}) 619 to_download.append((impl, source)) 620 621 if isinstance(source, DistributionSource) and source.needs_confirmation: 622 unsafe_impls.append(source.package_id) 623 624 @tasks.async 625 def download_impls(): 626 if unsafe_impls: 627 confirm = self.handler.confirm_install(_('The following components need to be installed using native packages. ' 628 'These come from your distribution, and should therefore be trustworthy, but they also ' 629 'run with extra privileges. In particular, installing them may run extra services on your ' 630 'computer or affect other users. You may be asked to enter a password to confirm. The ' 631 'packages are:\n\n') + ('\n'.join('- ' + x for x in unsafe_impls))) 632 yield confirm 633 tasks.check(confirm) 634 635 blockers = [] 636 637 for impl, source in to_download: 638 blockers.append(self.download_impl(impl, source, stores)) 639 640 # Record the first error log the rest 641 error = [] 642 def dl_error(ex, tb = None): 643 if error: 644 self.handler.report_error(ex) 645 else: 646 error.append((ex, tb))
647 while blockers: 648 yield blockers 649 tasks.check(blockers, dl_error) 650 651 blockers = [b for b in blockers if not b.happened] 652 if error: 653 from zeroinstall import support 654 support.raise_with_traceback(*error[0]) 655 656 if not to_download: 657 return None 658 659 return download_impls() 660
661 - def get_best_source(self, impl):
662 """Return the best download source for this implementation. 663 @type impl: L{zeroinstall.injector.model.ZeroInstallImplementation} 664 @rtype: L{model.RetrievalMethod}""" 665 if impl.download_sources: 666 return impl.download_sources[0] 667 return None
668
669 - def download_url(self, url, hint = None, modification_time = None, expected_size = None, mirror_url = None):
670 """The most low-level method here; just download a raw URL. 671 It is the caller's responsibility to ensure that dl.stream is closed. 672 @param url: the location to download from 673 @type url: str 674 @param hint: user-defined data to store on the Download (e.g. used by the GUI) 675 @param modification_time: don't download unless newer than this 676 @param mirror_url: an altertive URL to try if this one fails 677 @type mirror_url: str 678 @rtype: L{download.Download} 679 @since: 1.5""" 680 if not (url.startswith('http:') or url.startswith('https:') or url.startswith('ftp:')): 681 raise SafeException(_("Unknown scheme in download URL '%s'") % url) 682 dl = download.Download(url, hint = hint, modification_time = modification_time, expected_size = expected_size, auto_delete = not self.external_store) 683 dl.mirror = mirror_url 684 self.handler.monitor_download(dl) 685 dl.downloaded = self.scheduler.download(dl) 686 return dl
687
688 -class StepRunner(object):
689 """The base class of all step runners. 690 @since: 1.10""" 691
692 - def __init__(self, stepdata, impl_hint, may_use_mirror = True):
693 """@type stepdata: L{zeroinstall.injector.model.RetrievalMethod} 694 @type may_use_mirror: bool""" 695 self.stepdata = stepdata 696 self.impl_hint = impl_hint 697 self.may_use_mirror = may_use_mirror
698
699 - def prepare(self, fetcher, blockers):
700 """@type fetcher: L{Fetcher} 701 @type blockers: [L{zeroinstall.support.tasks.Blocker}]""" 702 pass
703 704 @classmethod
705 - def class_for(cls, model):
706 """@type model: L{zeroinstall.injector.model.RetrievalMethod}""" 707 for subcls in cls.__subclasses__(): 708 if subcls.model_type == type(model): 709 return subcls 710 raise Exception(_("Unknown download type for '%s'") % model)
711
712 - def close(self):
713 """Release any resources (called on success or failure).""" 714 pass
715
716 -class RenameStepRunner(StepRunner):
717 """A step runner for the <rename> step. 718 @since: 1.10""" 719 720 model_type = model.RenameStep 721
722 - def apply(self, basedir):
723 """@type basedir: str""" 724 source = native_path_within_base(basedir, self.stepdata.source) 725 dest = native_path_within_base(basedir, self.stepdata.dest) 726 _ensure_dir_exists(os.path.dirname(dest)) 727 os.rename(source, dest)
728
729 -class RemoveStepRunner(StepRunner):
730 """A step runner for the <remove> step.""" 731 732 model_type = model.RemoveStep 733
734 - def apply(self, basedir):
735 """@type basedir: str""" 736 path = native_path_within_base(basedir, self.stepdata.path) 737 support.ro_rmtree(path)
738
739 -class DownloadStepRunner(StepRunner):
740 """A step runner for the <archive> step. 741 @since: 1.10""" 742 743 model_type = model.DownloadSource 744
745 - def prepare(self, fetcher, blockers):
746 """@type fetcher: L{Fetcher} 747 @type blockers: [L{zeroinstall.support.tasks.Blocker}]""" 748 self.blocker, self.stream = fetcher.download_archive(self.stepdata, impl_hint = self.impl_hint, may_use_mirror = self.may_use_mirror) 749 assert self.stream 750 blockers.append(self.blocker)
751
752 - def apply(self, basedir):
753 """@type basedir: str""" 754 from zeroinstall.zerostore import unpack 755 assert self.blocker.happened 756 if self.stepdata.dest is not None: 757 basedir = native_path_within_base(basedir, self.stepdata.dest) 758 _ensure_dir_exists(basedir) 759 unpack.unpack_archive_over(self.stepdata.url, self.stream, basedir, 760 extract = self.stepdata.extract, 761 type=self.stepdata.type, 762 start_offset = self.stepdata.start_offset or 0)
763
764 - def close(self):
765 self.stream.close()
766
767 -class FileStepRunner(StepRunner):
768 """A step runner for the <file> step.""" 769 770 model_type = model.FileSource 771
772 - def prepare(self, fetcher, blockers):
773 """@type fetcher: L{Fetcher} 774 @type blockers: [L{zeroinstall.support.tasks.Blocker}]""" 775 self.blocker, self.stream = fetcher.download_file(self.stepdata, 776 impl_hint = self.impl_hint) 777 assert self.stream 778 blockers.append(self.blocker)
779
780 - def apply(self, basedir):
781 """@type basedir: str""" 782 import shutil 783 assert self.blocker.happened 784 dest = native_path_within_base(basedir, self.stepdata.dest) 785 _ensure_dir_exists(os.path.dirname(dest)) 786 787 with open(dest, 'wb') as output: 788 shutil.copyfileobj(self.stream, output) 789 os.utime(dest, (0, 0))
790
791 - def close(self):
792 self.stream.close()
793
794 -def native_path_within_base(base, crossplatform_path):
795 """Takes a cross-platform relative path (i.e using forward slashes, even on windows) 796 and returns the absolute, platform-native version of the path. 797 If the path does not resolve to a location within `base`, a SafeError is raised. 798 @type base: str 799 @type crossplatform_path: str 800 @rtype: str 801 @since: 1.10""" 802 assert os.path.isabs(base) 803 if crossplatform_path.startswith("/"): 804 raise SafeException("path %r is not within the base directory" % (crossplatform_path,)) 805 native_path = os.path.join(*crossplatform_path.split("/")) 806 fullpath = os.path.realpath(os.path.join(base, native_path)) 807 base = os.path.realpath(base) 808 if not fullpath.startswith(base + os.path.sep): 809 raise SafeException("path %r is not within the base directory" % (crossplatform_path,)) 810 return fullpath
811
812 -def _ensure_dir_exists(dest):
813 """@type dest: str""" 814 if not os.path.isdir(dest): 815 os.makedirs(dest)
816