def _get_sigs_from_gpg_status_stream(status_r, child, errors): """Read messages from status_r and collect signatures from it. When done, reap 'child'. If there are no signatures, throw SafeException (using errors for the error message if non-empty).""" sigs = [] # Should we error out on bad signatures, even if there's a good # signature too? for line in status_r: assert line.endswith('\n') if not line.startswith('[GNUPG:] '): # The docs says every line starts with this, but if auto-key-retrieve # is on then they might not. See bug #3420548 warn("Invalid output from GnuPG: %r", line) continue line = line[9:-1] split_line = line.split(' ') code = split_line[0] args = split_line[1:] if code == 'VALIDSIG': sigs.append(ValidSig(args)) elif code == 'BADSIG': sigs.append(BadSig(args)) elif code == 'ERRSIG': sigs.append(ErrSig(args)) child.wait() # (ignore exit status) errors.seek(0) error_messages = errors.read().strip() errors.close() if not sigs: if error_messages: raise SafeException( _("No signatures found. Errors from GPG:\n%s") % error_messages) else: raise SafeException( _("No signatures found. No error messages from GPG.")) elif error_messages: # Attach the warnings to all the signatures, in case they're useful. for s in sigs: s.messages = error_messages return sigs
def native_path_within_base(base, crossplatform_path): """Takes a cross-platform relative path (i.e using forward slashes, even on windows) and returns the absolute, platform-native version of the path. If the path does not resolve to a location within `base`, a SafeError is raised. @since: 1.10 """ assert os.path.isabs(base) if crossplatform_path.startswith("/"): raise SafeException("path %r is not within the base directory" % (crossplatform_path,)) native_path = os.path.join(*crossplatform_path.split("/")) fullpath = os.path.realpath(os.path.join(base, native_path)) base = os.path.realpath(base) if not fullpath.startswith(base + os.path.sep): raise SafeException("path %r is not within the base directory" % (crossplatform_path,)) return fullpath
def download_archive(self, download_source, force = False, impl_hint = None, may_use_mirror = False): """Fetch an archive. You should normally call L{download_impl} instead, since it handles other kinds of retrieval method too. It is the caller's responsibility to ensure that the returned stream is closed. If impl_hint is from a local feed and the url is relative, just opens the existing file for reading. @type download_source: L{model.DownloadSource} @type force: bool @type may_use_mirror: bool @rtype: (L{Blocker} | None, file)""" from zeroinstall.zerostore import unpack mime_type = download_source.type if not mime_type: mime_type = unpack.type_from_url(download_source.url) if not mime_type: raise SafeException(_("No 'type' attribute on archive, and I can't guess from the name (%s)") % download_source.url) if not self.external_store: unpack.check_type_ok(mime_type) if '://' not in download_source.url: return self._download_local_file(download_source, impl_hint) if may_use_mirror: mirror = self._get_archive_mirror(download_source) else: mirror = None if self.config.handler.dry_run: print(_("[dry-run] downloading archive {url}").format(url = download_source.url)) dl = self.download_url(download_source.url, hint = impl_hint, mirror_url = mirror) if download_source.size is not None: dl.expected_size = download_source.size + (download_source.start_offset or 0) # (else don't know sizes for mirrored archives) return (dl.downloaded, dl.tempfile)
def download_url(self, url, hint = None, modification_time = None, expected_size = None, mirror_url = None, timeout = None): """The most low-level method here; just download a raw URL. It is the caller's responsibility to ensure that dl.stream is closed. @param url: the location to download from @type url: str @param hint: user-defined data to store on the Download (e.g. used by the GUI) @param modification_time: don't download unless newer than this @param mirror_url: an altertive URL to try if this one fails @type mirror_url: str @param timeout: create a blocker which triggers if a download hangs for this long @type timeout: float | None @rtype: L{download.Download} @since: 1.5""" if not (url.startswith('http:') or url.startswith('https:') or url.startswith('ftp:')): raise SafeException(_("Unknown scheme in download URL '%s'") % url) dl = download.Download(url, hint = hint, modification_time = modification_time, expected_size = expected_size, auto_delete = not self.external_store) dl.mirror = mirror_url self.handler.monitor_download(dl) if timeout is not None: dl.timeout = tasks.Blocker('Download timeout') dl.downloaded = self.scheduler.download(dl, timeout = timeout) return dl
def _add_to_external_store(self, required_digest, steps, streams): """@type required_digest: str""" from zeroinstall.zerostore.unpack import type_from_url # combine archive path, extract directory and MIME type arguments in an alternating fashion paths = map(lambda stream: stream.name, streams) extracts = map(lambda step: step.extract or "", steps) types = map(lambda step: step.type or type_from_url(step.url), steps) args = [None]*(len(paths)+len(extracts)+len(types)) args[::3] = paths args[1::3] = extracts args[2::3] = types # close file handles to allow external processes access for stream in streams: stream.close() # delegate extracting archives to external tool import subprocess retval = subprocess.call([self.external_store, "add", required_digest] + args) # delete temp files for path in paths: os.remove(path) if retval != 0: raise SafeException(_("Extracting with external store failed"))
def do_exec_binding(self, binding, iface): assert iface is not None name = binding.name if '/' in name or name.startswith('.') or "'" in name: raise SafeException("Invalid <executable> name '%s'" % name) exec_dir = basedir.save_cache_path(namespaces.config_site, namespaces.config_prog, 'executables', name) exec_path = os.path.join(exec_dir, name + ".exe" if os.name == "nt" else name) if not self._checked_runenv: self._check_runenv() if not os.path.exists(exec_path): if os.name == "nt": # Copy runenv.cli.template to ~/.cache/0install.net/injector/executables/$name/$name import shutil shutil.copyfile(os.path.join(os.path.dirname(__file__), "runenv.cli.template"), exec_path) else: # Symlink ~/.cache/0install.net/injector/executables/$name/$name to runenv.py os.symlink('../../runenv.py', exec_path) os.chmod(exec_dir, 0o500) if binding.in_path: path = os.environ["PATH"] = exec_dir + os.pathsep + os.environ["PATH"] logger.info("PATH=%s", path) else: os.environ[name] = exec_path logger.info("%s=%s", name, exec_path) args = self.build_command(iface, binding.command) if os.name == "nt": os.environ["0install-runenv-file-" + name + ".exe"] = args[0] os.environ["0install-runenv-args-" + name + ".exe"] = support.windows_args_escape(args[1:]) else: import json os.environ["0install-runenv-" + name] = json.dumps(args)
def do_exec_binding(self, binding, iface): assert iface is not None name = binding.name if '/' in name or name.startswith('.') or "'" in name: raise SafeException("Invalid <executable> name '%s'" % name) exec_dir = basedir.save_cache_path(namespaces.config_site, namespaces.config_prog, 'executables', name) exec_path = os.path.join(exec_dir, name) if not self._checked_runenv: self._check_runenv() if not os.path.exists(exec_path): # Symlink ~/.cache/0install.net/injector/executables/$name/$name to runenv.py os.symlink('../../runenv.py', exec_path) os.chmod(exec_dir, 0o500) if binding.in_path: path = os.environ[ "PATH"] = exec_dir + os.pathsep + os.environ["PATH"] info("PATH=%s", path) else: os.environ[name] = exec_path info("%s=%s", name, exec_path) import json args = self.build_command(iface, binding.command) os.environ["0install-runenv-" + name] = json.dumps(args)
def download_impl(self, impl, retrieval_method, stores, force=False): """Download an implementation. @param impl: the selected implementation @type impl: L{model.ZeroInstallImplementation} @param retrieval_method: a way of getting the implementation (e.g. an Archive or a Recipe) @type retrieval_method: L{model.RetrievalMethod} @param stores: where to store the downloaded implementation @type stores: L{zerostore.Stores} @param force: whether to abort and restart an existing download @rtype: L{tasks.Blocker}""" assert impl assert retrieval_method if not impl.id.startswith('package:'): from zeroinstall.zerostore import manifest alg = impl.id.split('=', 1)[0] if alg not in manifest.algorithms: raise SafeException( _("Unknown digest algorithm '%(algorithm)s' for '%(implementation)s' version %(version)s" ) % { 'algorithm': alg, 'implementation': impl.feed.get_name(), 'version': impl.get_version() }) @tasks. async def download_impl(): if isinstance(retrieval_method, DownloadSource): blocker, stream = self.download_archive(retrieval_method, force=force, impl_hint=impl) yield blocker tasks.check(blocker) stream.seek(0) self._add_to_cache(stores, retrieval_method, stream) elif isinstance(retrieval_method, DistroKitSource): dl = self.handler.get_download(retrieval_method.id, force=force, hint=impl, factory=distrokit.Download) yield dl.downloaded tasks.check(dl.downloaded) elif isinstance(retrieval_method, Recipe): blocker = self.cook(impl.id, retrieval_method, stores, force, impl_hint=impl) yield blocker tasks.check(blocker) else: raise Exception( _("Unknown download type for '%s'") % retrieval_method) self.handler.impl_added_to_store(impl) return download_impl()
def _download_local_file(self, download_source, impl_hint): # Relative path if impl_hint is None or not impl_hint.feed.local_path: raise SafeException(_("Relative URL '{url}' in non-local feed '{feed}'").format( url = download_source.url, feed = impl_hint.feed)) local_file = os.path.join(os.path.dirname(impl_hint.feed.local_path), download_source.url) try: size = os.path.getsize(local_file) if size != download_source.size: raise SafeException(_("Wrong size for {path}: feed says {expected}, but actually {actual} bytes").format( path = local_file, expected = download_source.size, actual = size)) return (None, open(local_file, 'rb')) except OSError as ex: raise SafeException(str(ex)) # (error already includes path)
def download_impls(self, implementations, stores): """Download the given implementations, choosing a suitable retrieval method for each. If any of the retrieval methods are DistributionSources and need confirmation, handler.confirm is called to check that the installation should proceed. """ unsafe_impls = [] to_download = [] for impl in implementations: logger.debug(_("start_downloading_impls: for %(feed)s get %(implementation)s"), {'feed': impl.feed, 'implementation': impl}) source = self.get_best_source(impl) if not source: raise SafeException(_("Implementation %(implementation_id)s of interface %(interface)s" " cannot be downloaded (no download locations given in " "interface!)") % {'implementation_id': impl.id, 'interface': impl.feed.get_name()}) to_download.append((impl, source)) if isinstance(source, DistributionSource) and source.needs_confirmation: unsafe_impls.append(source.package_id) @tasks.async def download_impls(): if unsafe_impls: confirm = self.handler.confirm_install(_('The following components need to be installed using native packages. ' 'These come from your distribution, and should therefore be trustworthy, but they also ' 'run with extra privileges. In particular, installing them may run extra services on your ' 'computer or affect other users. You may be asked to enter a password to confirm. The ' 'packages are:\n\n') + ('\n'.join('- ' + x for x in unsafe_impls))) yield confirm tasks.check(confirm) blockers = [] for impl, source in to_download: blockers.append(self.download_impl(impl, source, stores)) # Record the first error log the rest error = [] def dl_error(ex, tb = None): if error: self.handler.report_error(ex) else: error.append((ex, tb)) while blockers: yield blockers tasks.check(blockers, dl_error) blockers = [b for b in blockers if not b.happened] if error: from zeroinstall import support support.raise_with_traceback(*error[0]) if not to_download: return None return download_impls()
def check_stream(stream): """Verify the GPG signature at the end of stream. stream must be seekable. @type stream: file @return: (stream, [Signatures]) @rtype: (file, [L{Signature}])""" stream.seek(0) start = stream.read(6) stream.seek(0) if start == b"<?xml ": return _check_xml_stream(stream) elif start == b'-----B': raise SafeException(_("Plain GPG-signed feeds no longer supported")) else: raise SafeException( _("This is not a Zero Install feed! It should be an XML document, but it starts:\n%s" ) % repr(stream.read(120)))
def check_stream(stream): """Pass stream through gpg --decrypt to get the data, the error text, and a list of signatures (good or bad). If stream starts with "<?xml " then get the signature from a comment at the end instead (and the returned data is the original stream). stream must be seekable. @note: Stream returned may or may not be the one passed in. Be careful! @return: (data_stream, [Signatures])""" stream.seek(0) start = stream.read(6) stream.seek(0) if start == b"<?xml ": return _check_xml_stream(stream) elif start == b'-----B': raise SafeException(_("Plain GPG-signed feeds no longer supported")) else: raise SafeException( _("This is not a Zero Install feed! It should be an XML document, but it starts:\n%s" ) % repr(stream.read(120)))
def import_key(stream): """Run C{gpg --import} with this stream as stdin.""" with tempfile.TemporaryFile(mode='w+t') as errors: child = _run_gpg(['--quiet', '--import', '--batch'], stdin=stream, stderr=errors) status = child.wait() errors.seek(0) error_messages = errors.read().strip() if status != 0: if error_messages: raise SafeException( _("Errors from 'gpg --import':\n%s") % error_messages) else: raise SafeException( _("Non-zero exit code %d from 'gpg --import'") % status) elif error_messages: logger.warn(_("Warnings from 'gpg --import':\n%s") % error_messages)
def _execute(root_impl, prog_args, dry_run, main, wrapper): assert root_impl is not None if root_impl.id.startswith('package:'): main = main or root_impl.main prog_path = main else: if main is None: main = root_impl.main elif main.startswith('/'): main = main[1:] elif root_impl.main: main = os.path.join(os.path.dirname(root_impl.main), main) if main is not None: prog_path = os.path.join(_get_implementation_path(root_impl.id), main) if main is None: raise SafeException(_("Implementation '%s' cannot be executed directly; it is just a library " "to be used by other programs (or missing 'main' attribute)") % root_impl) if not os.path.exists(prog_path): raise SafeException(_("File '%(program_path)s' does not exist.\n" "(implementation '%(implementation_id)s' + program '%(main)s')") % {'program_path': prog_path, 'implementation_id': root_impl.id, 'main': main}) if wrapper: prog_args = ['-c', wrapper + ' "$@"', '-', prog_path] + list(prog_args) prog_path = '/bin/sh' if dry_run: print _("Would execute: %s") % ' '.join([prog_path] + prog_args) else: info(_("Executing: %s"), prog_path) sys.stdout.flush() sys.stderr.flush() try: os.execl(prog_path, prog_path, *prog_args) except OSError, ex: raise SafeException(_("Failed to run '%(program_path)s': %(exception)s") % {'program_path': prog_path, 'exception': str(ex)})
def download_archive(self, download_source, force=False, impl_hint=None): """Fetch an archive. You should normally call L{download_impl} instead, since it handles other kinds of retrieval method too.""" from zeroinstall.zerostore import unpack url = download_source.url if not (url.startswith('http:') or url.startswith('https:') or url.startswith('ftp:')): raise SafeException(_("Unknown scheme in download URL '%s'") % url) mime_type = download_source.type if not mime_type: mime_type = unpack.type_from_url(download_source.url) if not mime_type: raise SafeException( _("No 'type' attribute on archive, and I can't guess from the name (%s)" ) % download_source.url) unpack.check_type_ok(mime_type) dl = self.download_url(download_source.url, hint=impl_hint) dl.expected_size = download_source.size + (download_source.start_offset or 0) return (dl.downloaded, dl.tempfile)
def download_archive(self, download_source, force=False, impl_hint=None, may_use_mirror=False): """Fetch an archive. You should normally call L{download_impl} instead, since it handles other kinds of retrieval method too. It is the caller's responsibility to ensure that the returned stream is closed. """ from zeroinstall.zerostore import unpack url = download_source.url if not (url.startswith('http:') or url.startswith('https:') or url.startswith('ftp:')): raise SafeException(_("Unknown scheme in download URL '%s'") % url) mime_type = download_source.type if not mime_type: mime_type = unpack.type_from_url(download_source.url) if not mime_type: raise SafeException( _("No 'type' attribute on archive, and I can't guess from the name (%s)" ) % download_source.url) if not self.external_store: unpack.check_type_ok(mime_type) if may_use_mirror: mirror = self._get_archive_mirror(download_source) else: mirror = None dl = self.download_url(download_source.url, hint=impl_hint, mirror_url=mirror) if download_source.size is not None: dl.expected_size = download_source.size + ( download_source.start_offset or 0) # (else don't know sizes for mirrored archives) return (dl.downloaded, dl.tempfile)
def apply(self, basedir): """@type basedir: str""" source = native_path_within_base(basedir, self.stepdata.source) dest = native_path_within_base(basedir, self.stepdata.dest) _ensure_dir_exists(os.path.dirname(dest)) try: os.rename(source, dest) except OSError as ex: if not os.path.exists(source): # Python by default reports the path of the destination in this case raise SafeException("<rename> source '{source}' does not exist".format( source = self.stepdata.source)) raise
def _get_sigs_from_gpg_status_stream(status_r, child, errors): """Read messages from status_r and collect signatures from it. When done, reap 'child'. If there are no signatures, throw SafeException (using errors for the error message if non-empty).""" sigs = [] # Should we error out on bad signatures, even if there's a good # signature too? for line in status_r: assert line.endswith('\n') assert line.startswith('[GNUPG:] ') line = line[9:-1] split_line = line.split(' ') code = split_line[0] args = split_line[1:] if code == 'VALIDSIG': sigs.append(ValidSig(args)) elif code == 'BADSIG': sigs.append(BadSig(args)) elif code == 'ERRSIG': sigs.append(ErrSig(args)) status = child.wait() errors.seek(0) error_messages = errors.read().strip() errors.close() if not sigs: if error_messages: raise SafeException(_("No signatures found. Errors from GPG:\n%s") % error_messages) else: raise SafeException(_("No signatures found. No error messages from GPG.")) return sigs
def download_impls(self, implementations, stores): """Download the given implementations, choosing a suitable retrieval method for each.""" blockers = [] to_download = [] for impl in implementations: debug( _("start_downloading_impls: for %(feed)s get %(implementation)s" ), { 'feed': impl.feed, 'implementation': impl }) source = self.get_best_source(impl) if not source: raise SafeException( _("Implementation %(implementation_id)s of interface %(interface)s" " cannot be downloaded (no download locations given in " "interface!)") % { 'implementation_id': impl.id, 'interface': impl.feed.get_name() }) to_download.append((impl, source)) for impl, source in to_download: blockers.append(self.download_impl(impl, source, stores)) if not blockers: return None @tasks. async def download_impls(blockers): # Record the first error log the rest error = [] def dl_error(ex, tb=None): if error: self.handler.report_error(ex) else: error.append(ex) while blockers: yield blockers tasks.check(blockers, dl_error) blockers = [b for b in blockers if not b.happened] if error: raise error[0] return download_impls(blockers)
def download_impl(method): original_exception = None while True: if not isinstance(method, Recipe): # turn an individual method into a single-step Recipe step = method method = Recipe() method.steps.append(step) try: blocker = self.cook( required_digest, method, stores, impl_hint=impl, dry_run=self.handler.dry_run, may_use_mirror=original_exception is None) yield blocker tasks.check(blocker) except download.DownloadError as ex: if original_exception: logger.info("Error from mirror: %s", ex) raise original_exception else: original_exception = ex mirror_url = self._get_impl_mirror(impl) if mirror_url is not None: logger.info("%s: trying implementation mirror at %s", ex, mirror_url) method = model.DownloadSource( impl, mirror_url, None, None, type='application/x-bzip-compressed-tar') continue # Retry raise except SafeException as ex: raise SafeException( "Error fetching {url} {version}: {ex}".format( url=impl.feed.url, version=impl.get_version(), ex=ex)) break self.handler.impl_added_to_store(impl)
def fetch_key_info(): tempfile = dl.tempfile try: yield dl.downloaded self.blocker = None tasks.check(dl.downloaded) tempfile.seek(0) doc = minidom.parse(tempfile) if doc.documentElement.localName != 'key-lookup': raise SafeException(_('Expected <key-lookup>, not <%s>') % doc.documentElement.localName) self.info += doc.documentElement.childNodes except Exception as ex: doc = minidom.parseString('<item vote="bad"/>') root = doc.documentElement root.appendChild(doc.createTextNode(_('Error getting key information: %s') % ex)) self.info.append(root) finally: tempfile.close()
def _download_with_external_fetcher(self, implementations): """@type implementations: [L{zeroinstall.injector.model.ZeroInstallImplementation}]""" # Serialize implementation list to XML from xml.dom import minidom, XMLNS_NAMESPACE from zeroinstall.injector.namespaces import XMLNS_IFACE from zeroinstall.injector.qdom import Prefixes doc = minidom.getDOMImplementation().createDocument(XMLNS_IFACE, "interface", None) root = doc.documentElement root.setAttributeNS(XMLNS_NAMESPACE, 'xmlns', XMLNS_IFACE) for impl in implementations: root.appendChild(impl._toxml(doc, Prefixes(XMLNS_IFACE))) # Pipe XML into external process import subprocess process = subprocess.Popen(self.external_fetcher, stdin=subprocess.PIPE) process.communicate(doc.toxml() + "\n") if process.returncode != 0: raise SafeException(_("Download with external fetcher failed"))
def check_stream(stream): """Pass stream through gpg --decrypt to get the data, the error text, and a list of signatures (good or bad). If stream starts with "<?xml " then get the signature from a comment at the end instead (and the returned data is the original stream). stream must be seekable. @note: Stream returned may or may not be the one passed in. Be careful! @return: (data_stream, [Signatures])""" #stream.seek(0) #all = stream.read() stream.seek(0) start = stream.read(6) stream.seek(0) if start == "<?xml ": return _check_xml_stream(stream) elif start == '-----B': import warnings warnings.warn(_("Plain GPG-signed feeds are deprecated!"), DeprecationWarning, stacklevel = 2) os.lseek(stream.fileno(), 0, 0) return _check_plain_stream(stream) else: raise SafeException(_("This is not a Zero Install feed! It should be an XML document, but it starts:\n%s") % repr(stream.read(120)))
def download_impl(self, impl, retrieval_method, stores, force=False): """Download an implementation. @param impl: the selected implementation @type impl: L{model.ZeroInstallImplementation} @param retrieval_method: a way of getting the implementation (e.g. an Archive or a Recipe) @type retrieval_method: L{model.RetrievalMethod} @param stores: where to store the downloaded implementation @type stores: L{zerostore.Stores} @type force: bool @rtype: L{tasks.Blocker}""" assert impl assert retrieval_method if isinstance(retrieval_method, DistributionSource): return retrieval_method.install(self.handler) from zeroinstall.zerostore import manifest, parse_algorithm_digest_pair best = None for digest in impl.digests: alg_name, digest_value = parse_algorithm_digest_pair(digest) alg = manifest.algorithms.get(alg_name, None) if alg and (best is None or best.rating < alg.rating): best = alg required_digest = digest if best is None: if not impl.digests: raise SafeException( _("No <manifest-digest> given for '%(implementation)s' version %(version)s" ) % { 'implementation': impl.feed.get_name(), 'version': impl.get_version() }) raise SafeException( _("Unknown digest algorithms '%(algorithms)s' for '%(implementation)s' version %(version)s" ) % { 'algorithms': impl.digests, 'implementation': impl.feed.get_name(), 'version': impl.get_version() }) @tasks. async def download_impl(method): original_exception = None while True: if not isinstance(method, Recipe): # turn an individual method into a single-step Recipe step = method method = Recipe() method.steps.append(step) try: blocker = self.cook( required_digest, method, stores, impl_hint=impl, dry_run=self.handler.dry_run, may_use_mirror=original_exception is None) yield blocker tasks.check(blocker) except download.DownloadError as ex: if original_exception: logger.info("Error from mirror: %s", ex) raise original_exception else: original_exception = ex mirror_url = self._get_impl_mirror(impl) if mirror_url is not None: logger.info("%s: trying implementation mirror at %s", ex, mirror_url) method = model.DownloadSource( impl, mirror_url, None, None, type='application/x-bzip-compressed-tar') continue # Retry raise except SafeException as ex: raise SafeException( "Error fetching {url} {version}: {ex}".format( url=impl.feed.url, version=impl.get_version(), ex=ex)) break self.handler.impl_added_to_store(impl) return download_impl(retrieval_method)
def build_command(self, command_iface, command_name, user_command=None): """Create a list of strings to be passed to exec to run the <command>s in the selections. @param command_iface: the interface of the program being run @type command_iface: str @param command_name: the name of the command being run @type command_name: str @param user_command: a custom command to use instead @type user_command: L{model.Command} @return: the argument list @rtype: [str]""" if not (command_name or user_command): raise SafeException(_("Can't run: no command specified!")) prog_args = [] sels = self.selections.selections while command_name or user_command: command_sel = sels[command_iface] if user_command is None: command = command_sel.get_command(command_name) else: command = user_command user_command = None command_args = [] # Add extra arguments for runner runner = command.get_runner() if runner: command_iface = runner.interface command_name = runner.command _process_args(command_args, runner.qdom) else: command_iface = None command_name = None # Add main program path command_path = command.path if command_path is not None: if command_sel.id.startswith('package:'): prog_path = command_path else: if command_path.startswith('/'): raise SafeException( _("Command path must be relative, but '%s' starts with '/'!" ) % command_path) prog_path = os.path.join(command_sel.get_path(self.stores), command_path) assert prog_path is not None if not os.path.exists(prog_path): raise SafeException( _("File '%(program_path)s' does not exist.\n" "(implementation '%(implementation_id)s' + program '%(main)s')" ) % { 'program_path': prog_path, 'implementation_id': command_sel.id, 'main': command_path }) command_args.append(prog_path) # Add extra arguments for program _process_args(command_args, command.qdom) prog_args = command_args + prog_args # Each command is run by the next, but the last one is run by exec, and we # need a path for that. if command.path is None: raise SafeException("Missing 'path' attribute on <command>") return prog_args
def execute_selections(selections, prog_args, dry_run=False, main=None, wrapper=None, stores=None): """Execute program. On success, doesn't return. On failure, raises an Exception. Returns normally only for a successful dry run. @param selections: the selected versions @type selections: L{selections.Selections} @param prog_args: arguments to pass to the program @type prog_args: [str] @param dry_run: if True, just print a message about what would have happened @type dry_run: bool @param main: the name of the binary to run, or None to use the default @type main: str @param wrapper: a command to use to actually run the binary, or None to run the binary directly @type wrapper: str @since: 0.27 @precondition: All implementations are in the cache. """ #assert stores is not None if stores is None: from zeroinstall import zerostore stores = zerostore.Stores() setup = Setup(stores, selections) commands = selections.commands if main is not None: # Replace first command with user's input if main.startswith('/'): main = main[ 1:] # User specified a path relative to the package root else: old_path = commands[0].path if commands else None if not old_path: raise SafeException( _("Can't use a relative replacement main when there is no original one!" )) main = os.path.join( os.path.dirname(old_path), main) # User main is relative to command's name # Copy all child nodes (e.g. <runner>) except for the arguments user_command_element = qdom.Element(namespaces.XMLNS_IFACE, 'command', {'path': main}) if commands: for child in commands[0].qdom.childNodes: if child.uri == namespaces.XMLNS_IFACE and child.name == 'arg': continue user_command_element.childNodes.append(child) user_command = Command(user_command_element, None) else: user_command = None setup.prepare_env() prog_args = setup.build_command(selections.interface, selections.command, user_command) + prog_args if wrapper: prog_args = ['/bin/sh', '-c', wrapper + ' "$@"', '-'] + list(prog_args) if dry_run: print(_("Would execute: %s") % ' '.join(prog_args)) else: logger.info(_("Executing: %s"), prog_args) sys.stdout.flush() sys.stderr.flush() try: env = os.environ.copy() for x in ['0install-runenv-ZEROINSTALL_GPG', 'ZEROINSTALL_GPG']: if x in env: del env[x] os.execve(prog_args[0], prog_args, env) except OSError as ex: raise SafeException( _("Failed to run '%(program_path)s': %(exception)s") % { 'program_path': prog_args[0], 'exception': str(ex) })
def _check_xml_stream(stream): xml_comment_start = b'<!-- Base64 Signature' data_to_check = stream.read() last_comment = data_to_check.rfind(b'\n' + xml_comment_start) if last_comment < 0: raise SafeException( _("No signature block in XML. Maybe this file isn't signed?")) last_comment += 1 # Include new-line in data # Copy the file to 'data', without the signature # Copy the signature to 'sig' with tempfile.TemporaryFile(mode='w+b') as data: data.write(data_to_check[:last_comment]) data.flush() os.lseek(data.fileno(), 0, 0) with tempfile.TemporaryFile('w+t') as errors: sig_lines = data_to_check[last_comment:].split(b'\n') if sig_lines[0].strip() != xml_comment_start: raise SafeException( _('Bad signature block: extra data on comment line')) while sig_lines and not sig_lines[-1].strip(): del sig_lines[-1] if sig_lines[-1].strip() != b'-->': raise SafeException( _('Bad signature block: last line is not end-of-comment')) sig_data = b'\n'.join(sig_lines[1:-1]) if re.match(b'^[ A-Za-z0-9+/=\n]+$', sig_data) is None: raise SafeException( _("Invalid characters found in base 64 encoded signature")) try: if hasattr(base64, 'decodebytes'): sig_data = base64.decodebytes(sig_data) # Python 3 else: sig_data = base64.decodestring(sig_data) # Python 2 except Exception as ex: raise SafeException( _("Invalid base 64 encoded signature: %s") % str(ex)) with tempfile.NamedTemporaryFile(prefix='injector-sig-', mode='wb', delete=False) as sig_file: sig_file.write(sig_data) try: # Note: Should ideally close status_r in the child, but we want to support Windows too child = _run_gpg( [ # Not all versions support this: #'--max-output', str(1024 * 1024), '--batch', # Windows GPG can only cope with "1" here '--status-fd', '1', # Don't try to download missing keys; we'll do that '--keyserver-options', 'no-auto-key-retrieve', '--verify', sig_file.name, '-' ], stdin=data, stdout=subprocess.PIPE, stderr=errors) try: sigs = _get_sigs_from_gpg_status_stream( child.stdout, child, errors) finally: os.lseek(stream.fileno(), 0, 0) errors.close() child.stdout.close() child.wait() stream.seek(0) finally: os.unlink(sig_file.name) return (stream, sigs)
def build_command_args(self, selections, commands=None): """Create a list of strings to be passed to exec to run the <command>s in the selections. @param selections: the selections containing the commands @type selections: L{selections.Selections} @param commands: the commands to be used (taken from selections is None) @type commands: [L{model.Command}] @return: the argument list @rtype: [str]""" prog_args = [] commands = commands or selections.commands sels = selections.selections # Each command is run by the next, but the last one is run by exec, and we # need a path for that. if commands[-1].path is None: raise SafeException("Missing 'path' attribute on <command>") command_iface = selections.interface for command in commands: command_sel = sels[command_iface] command_args = [] # Add extra arguments for runner runner = command.get_runner() if runner: command_iface = runner.interface _process_args(command_args, runner.qdom) # Add main program path command_path = command.path if command_path is not None: if command_sel.id.startswith('package:'): prog_path = command_path else: if command_path.startswith('/'): raise SafeException( _("Command path must be relative, but '%s' starts with '/'!" ) % command_path) prog_path = os.path.join( self._get_implementation_path(command_sel), command_path) assert prog_path is not None if not os.path.exists(prog_path): raise SafeException( _("File '%(program_path)s' does not exist.\n" "(implementation '%(implementation_id)s' + program '%(main)s')" ) % { 'program_path': prog_path, 'implementation_id': command_sel.id, 'main': command_path }) command_args.append(prog_path) # Add extra arguments for program _process_args(command_args, command.qdom) prog_args = command_args + prog_args return prog_args
def download_archive(self, download_source, force=False, impl_hint=None, may_use_mirror=False): """Fetch an archive. You should normally call L{download_impl} instead, since it handles other kinds of retrieval method too. It is the caller's responsibility to ensure that the returned stream is closed. If impl_hint is from a local feed and the url is relative, just opens the existing file for reading. @type download_source: L{model.DownloadSource} @type force: bool @type may_use_mirror: bool @rtype: (L{Blocker} | None, file)""" from zeroinstall.zerostore import unpack mime_type = download_source.type if not mime_type: mime_type = unpack.type_from_url(download_source.url) if not mime_type: raise SafeException( _("No 'type' attribute on archive, and I can't guess from the name (%s)" ) % download_source.url) if not self.external_store: unpack.check_type_ok(mime_type) if '://' not in download_source.url: # Relative path if impl_hint is None or not impl_hint.feed.local_path: raise SafeException( _("Relative URL '{url}' in non-local feed '{feed}'"). format(url=download_source.url, feed=impl_hint.feed)) archive_file = os.path.join( os.path.dirname(impl_hint.feed.local_path), download_source.url) try: size = os.path.getsize(archive_file) if size != download_source.size: raise SafeException( _("Wrong size for {path}: feed says {expected}, but actually {actual} bytes" ).format(path=archive_file, expected=download_source.size, actual=size)) return (None, open(archive_file, 'rb')) except OSError as ex: raise SafeException(str(ex)) # (error already includes path) if may_use_mirror: mirror = self._get_archive_mirror(download_source) else: mirror = None if self.config.handler.dry_run: print( _("[dry-run] downloading archive {url}").format( url=download_source.url)) dl = self.download_url(download_source.url, hint=impl_hint, mirror_url=mirror) if download_source.size is not None: dl.expected_size = download_source.size + ( download_source.start_offset or 0) # (else don't know sizes for mirrored archives) return (dl.downloaded, dl.tempfile)
def download_impl(self, impl, retrieval_method, stores, force=False): """Download an implementation. @param impl: the selected implementation @type impl: L{model.ZeroInstallImplementation} @param retrieval_method: a way of getting the implementation (e.g. an Archive or a Recipe) @type retrieval_method: L{model.RetrievalMethod} @param stores: where to store the downloaded implementation @type stores: L{zerostore.Stores} @rtype: L{tasks.Blocker}""" assert impl assert retrieval_method if isinstance(retrieval_method, DistributionSource): return retrieval_method.install(self.handler) from zeroinstall.zerostore import manifest, parse_algorithm_digest_pair best = None for digest in impl.digests: alg_name, digest_value = parse_algorithm_digest_pair(digest) alg = manifest.algorithms.get(alg_name, None) if alg and (best is None or best.rating < alg.rating): best = alg required_digest = digest if best is None: if not impl.digests: raise SafeException( _("No <manifest-digest> given for '%(implementation)s' version %(version)s" ) % { 'implementation': impl.feed.get_name(), 'version': impl.get_version() }) raise SafeException( _("Unknown digest algorithms '%(algorithms)s' for '%(implementation)s' version %(version)s" ) % { 'algorithms': impl.digests, 'implementation': impl.feed.get_name(), 'version': impl.get_version() }) @tasks. async def download_impl(method): original_exception = None while True: try: if isinstance(method, DownloadSource): blocker, stream = self.download_archive( method, impl_hint=impl, may_use_mirror=original_exception is None) try: yield blocker tasks.check(blocker) stream.seek(0) if self.external_store: self._add_to_external_store( required_digest, [method], [stream]) else: self._add_to_cache(required_digest, stores, method, stream) finally: stream.close() elif isinstance(method, Recipe): blocker = self.cook(required_digest, method, stores, impl_hint=impl) yield blocker tasks.check(blocker) else: raise Exception( _("Unknown download type for '%s'") % method) except download.DownloadError as ex: if original_exception: logger.info("Error from mirror: %s", ex) raise original_exception else: original_exception = ex mirror_url = self._get_impl_mirror(impl) if mirror_url is not None: logger.info("%s: trying implementation mirror at %s", ex, mirror_url) method = model.DownloadSource( impl, mirror_url, None, None, type='application/x-bzip-compressed-tar') continue # Retry raise break self.handler.impl_added_to_store(impl) return download_impl(retrieval_method)