def validate_system(self):
        """
        Make sure the system is running a supported version of Debian or Ubuntu.

        :raises: :exc:`UnsupportedSystemError` when validation fails.
        """
        # Make sure we're dealing with a Debian or Ubuntu system.
        logger.verbose("Validating operating system distribution ..")
        if self.distributor_id.lower() not in ('debian', 'ubuntu'):
            raise UnsupportedSystemError(
                compact("""
                According to the output of the 'lsb_release --id' command you
                are running an unsupported operating system distribution!
                (output: {output})
            """,
                        output=repr(self.distributor_id)))
        # Make sure we're dealing with a supported version of Debian or Ubuntu.
        base_url = format(
            'https://deb.nodesource.com/{version}/dists/{codename}/',
            version=self.nodejs_version,
            codename=self.distribution_codename.lower())
        logger.info("Validating repository availability (%s) ..", base_url)
        if not requests.get(base_url).ok:
            raise UnsupportedSystemError(
                compact("""
                Based on the output of the 'lsb_release --codename' command
                ({codename}) it seems that your version of {distro} isn't
                supported by NodeSource! (more specifically, it seems that
                {url} isn't available)
            """,
                        distro=self.distributor_id,
                        codename=self.distribution_codename,
                        url=base_url))
Exemple #2
0
    def check_old_usage(self):
        """
        Raise an exception when either the public or the secret key hasn't been provided.

        :raises: :exc:`~exceptions.TypeError` as described below.

        When GnuPG < 2.1 is installed :func:`check_old_usage()` is called
        to ensure that :attr:`public_key_file` and :attr:`secret_key_file`
        are either both provided or both omitted.
        """
        if self.secret_key_file and not self.public_key_file:
            raise TypeError(
                compact("""
                The secret key file {filename} was provided without a
                corresponding public key file! Please provide both or
                neither.
            """,
                        filename=self.secret_key_file))
        elif self.public_key_file and not self.secret_key_file:
            raise TypeError(
                compact("""
                The public key file {filename} was provided without a
                corresponding secret key file! Please provide both or
                neither.
            """,
                        filename=self.public_key_file))
Exemple #3
0
    def check_new_usage(self):
        """
        Raise an exception when detecting a backwards incompatibility.

        :raises: :exc:`~exceptions.TypeError` as described below.

        When GnuPG >= 2.1 is installed the :func:`check_new_usage()` method is
        called to make sure that the caller is aware of the changes in API
        contract that this implies. We do so by raising an exception when both
        of the following conditions hold:

        - The caller is using the old calling convention of setting
          :attr:`public_key_file` and :attr:`secret_key_file` (which
          confirms that the intention is to use an isolated GPG key).

        - The caller is not using the new calling convention of setting
          :attr:`directory` (even though this is required to use an isolated
          GPG key with GnuPG >= 2.1).
        """
        if self.old_usage and not self.new_usage:
            raise TypeError(
                compact("""
                You're running GnuPG >= 2.1 which requires changes to how
                deb_pkg_tools.gpg.GPGKey is used and unfortunately our
                caller hasn't been updated to support this. Please refer
                to the the deb-pkg-tools 5.0 release notes for details.
            """))
def check_version_conflicts(dependency_set, cache=None):
    """
    Check for version conflicts in a dependency set.

    :param dependency_set: A list of filenames (strings) of ``*.deb`` files.
    :param cache: The :class:`.PackageCache` to use (defaults to :data:`None`).
    :raises: :exc:`VersionConflictFound` when one or more version
             conflicts are found.

    For each Debian binary package archive given, check if a newer version of
    the same package exists in the same repository (directory). This analysis
    can be very slow. To make it faster you can use the
    :class:`.PackageCache`.
    """
    timer = Timer()
    summary = []
    dependency_set = list(map(parse_filename, dependency_set))
    spinner = Spinner(total=len(dependency_set))
    logger.info("Checking for version conflicts in %i package(s) ..", len(dependency_set))
    for i, archive in enumerate(dependency_set, start=1):
        if archive.newer_versions:
            summary.append(compact("""
                    Dependency set includes {dependency} but newer version(s)
                    of that package also exist and will take precedence:
            """, dependency=format_path(archive.filename)))
            summary.append("\n".join(" - %s" % format_path(a.filename) for a in sorted(archive.newer_versions)))
        spinner.step(label="Checking for version conflicts", progress=i)
    spinner.clear()
    if summary:
        summary.insert(0, "One or more version conflicts found:")
        raise VersionConflictFound('\n\n'.join(summary))
    else:
        logger.info("No version conflicts found (took %s).", timer)
Exemple #5
0
    def test_sphinx_integration(self):
        """Tests for the :mod:`property_manager.sphinx` module."""
        class FakeApp(object):

            def __init__(self):
                self.callbacks = {}

            def connect(self, event, callback):
                self.callbacks.setdefault(event, []).append(callback)

        app = FakeApp()
        setup(app)
        assert append_property_docs in app.callbacks['autodoc-process-docstring']
        lines = ["Some boring description."]
        obj = TypeInspector
        append_property_docs(app=app, what=None, name=None, obj=obj, options=None, lines=lines)
        assert len(lines) > 0
        assert lines[0] == "Some boring description."
        assert not lines[1]
        assert lines[2] == "Here's an overview of the :class:`TypeInspector` class:"
        assert not lines[3]
        assert lines[-1] == compact("""
            When you initialize a :class:`TypeInspector` object you are
            required to provide a value for the :attr:`type` property. You can
            set the value of the :attr:`type` property by passing a keyword
            argument to the class initializer.
        """)
Exemple #6
0
 def python_callback(self, value):
     """Automatically coerce :attr:`python_callback` to a callable value."""
     if value:
         # Python callers get to pass a callable directly.
         if not callable(value):
             expression = value
             # Otherwise we expect a string to parse (from a command line
             # argument, environment variable or configuration file).
             callback_path, _, callback_name = expression.partition(':')
             if os.path.isfile(callback_path):
                 # Callback specified as Python script.
                 script_name = os.path.basename(callback_path)
                 if script_name.endswith('.py'):
                     script_name, _ = os.path.splitext(script_name)
                 environment = dict(__file__=callback_path, __name__=script_name)
                 logger.debug("Loading Python callback from pathname: %s", callback_path)
                 with open(callback_path) as handle:
                     exec(handle.read(), environment)
                 value = environment.get(callback_name)
             else:
                 # Callback specified as `dotted path'.
                 logger.debug("Loading Python callback from dotted path: %s", callback_path)
                 module = importlib.import_module(callback_path)
                 value = getattr(module, callback_name, None)
             if not callable(value):
                 raise ValueError(compact("""
                     The Python callback expression {expr} didn't result in
                     a valid callable! (result: {value})
                 """, expr=expression, value=value))
     else:
         value = None
     set_property(self, 'python_callback', value)
Exemple #7
0
    def matches(self, name, version=None):
        """
        Check if the relationship matches a given package name and version.

        :param name: The name of a package (a string).
        :param version: The version number of a package (a string, optional).
        :returns: One of the values :data:`True`, :data:`False` or :data:`None`
                  meaning the following:

                  - :data:`True` if the name matches and the version
                    doesn't invalidate the match,

                  - :data:`False` if the name matches but the version
                    invalidates the match,

                  - :data:`None` if the name doesn't match at all.
        :raises: :exc:`~exceptions.NotImplementedError` when :attr:`architectures`
                 is not empty (because evaluation of architecture restrictions
                 hasn't been implemented).

        Uses the external command ``dpkg --compare-versions`` to ensure
        compatibility with Debian's package version comparison algorithm.
        """
        if self.name == name:
            if version:
                if self.architectures:
                    raise NotImplementedError(
                        compact(ARCHITECTURE_RESTRICTIONS_MESSAGE))
                return compare_versions(version, self.operator, self.version)
            else:
                return False
Exemple #8
0
    def matches(self, name, version=None):
        """
        Check if the relationship matches a given package name and version.

        :param name: The name of a package (a string).
        :param version: The version number of a package (a string, optional).
        :returns: One of the values :data:`True`, :data:`False` or :data:`None`
                  meaning the following:

                  - :data:`True` if the name matches and the version
                    doesn't invalidate the match,

                  - :data:`False` if the name matches but the version
                    invalidates the match,

                  - :data:`None` if the name doesn't match at all.
        :raises: :exc:`~exceptions.NotImplementedError` when :attr:`architectures`
                 is not empty (because evaluation of architecture restrictions
                 hasn't been implemented).

        Uses the external command ``dpkg --compare-versions`` to ensure
        compatibility with Debian's package version comparison algorithm.
        """
        if self.name == name:
            if version:
                if self.architectures:
                    raise NotImplementedError(compact(ARCHITECTURE_RESTRICTIONS_MESSAGE))
                return compare_versions(version, self.operator, self.version)
            else:
                return False
Exemple #9
0
 def check_key_id(self):
     """Raise :exc:`~exceptions.EnvironmentError` when a key ID has been specified but the key pair doesn't exist."""
     if self.key_id and not self.existing_files:
         raise EnvironmentError(
             compact(
                 "The key ID {key_id} was specified but the configured key pair doesn't exist!",
                 key_id=self.key_id,
             ))
Exemple #10
0
 def proxy_method(self, *args, **kw):
     if not hasattr(self, 'output'):
         raise TypeError(compact("""
             The old calling interface is only supported when
             merged=True and start_capture() has been called!
         """))
     real_method = getattr(self.output, name)
     return real_method(*args, **kw)
Exemple #11
0
 def proxy_method(self, *args, **kw):
     if not hasattr(self, 'output'):
         raise TypeError(compact("""
             The old calling interface is only supported when
             merged=True and start_capture() has been called!
         """))
     real_method = getattr(self.output, name)
     return real_method(*args, **kw)
Exemple #12
0
    def __init__(self, text, *args, **kw):
        """
        Initialize a :class:`ProgramError` object.

        For argument handling see the :func:`~humanfriendly.text.compact()`
        function. The resulting string is used as the exception message.
        """
        message = compact(text, *args, **kw)
        super(ProgramError, self).__init__(message)
Exemple #13
0
def parse_relationship(expression):
    """
    Parse an expression containing a package name and version.

    :param expression: A relationship expression (a string).
    :returns: A :class:`Relationship` object.
    :raises: :exc:`~exceptions.ValueError` when parsing fails.

    This function parses relationship expressions containing a package name
    and (optionally) a version relation of the form ``python (>= 2.6)``.

    An example:

    >>> from deb_pkg_tools.deps import parse_relationship
    >>> parse_relationship('python')
    Relationship(name='python')
    >>> parse_relationship('python (<< 3)')
    VersionedRelationship(name='python', operator='<<', version='3')
    """
    tokens = [t.strip() for t in re.split('[()]', expression) if t and not t.isspace()]
    if len(tokens) == 1:
        # Just a package name (no version information).
        return Relationship(tokens[0])
    elif len(tokens) != 2:
        # Encountered something unexpected!
        raise ValueError(compact("""
            Corrupt package relationship expression: Splitting name from
            relationship resulted in more than two tokens!
            (expression: {e}, tokens: {t})
        """, e=expression, t=tokens))
    else:
        # Package name followed by relationship to specific version(s) of package.
        name, relationship = tokens
        tokens = [t.strip() for t in re.split('([<>=]+)', relationship) if t and not t.isspace()]
        if len(tokens) != 2:
            # Encountered something unexpected!
            raise ValueError(compact("""
                Corrupt package relationship expression: Splitting operator
                from version resulted in more than two tokens!
                (expression: {e}, tokens: {t})
            """, e=relationship, t=tokens))
        return VersionedRelationship(name, *tokens)
Exemple #14
0
 def get_push_command(self, remote=None, revision=None):
     """Get the command to push changes from the local repository to a remote repository."""
     if revision:
         raise NotImplementedError(compact("""
             Bazaar repository support doesn't include
             the ability to push specific revisions!
         """))
     command = ['bzr', 'push']
     if remote:
         command.append(remote)
     return command
Exemple #15
0
    def generate_key_pair(self):
        """
        Generate a missing GPG key pair on demand.

        :raises: :exc:`~exceptions.TypeError` when the GPG key pair needs to be
                 generated (because it doesn't exist yet) but no :attr:`name`
                 and :attr:`description` were provided.
        """
        logger.debug("Checking if GPG key pair exists ..")
        if self.existing_files:
            logger.debug(
                "Assuming key pair exists (found existing files: %s).",
                self.existing_files)
            return
        elif not (self.name and self.description):
            raise TypeError(
                "Can't generate GPG key pair without 'name' and 'description'!"
            )
        logger.info("Generating GPG key pair: %s (%s)", self.name,
                    self.description)
        # Make sure all of the required directories exist and have sane
        # permissions (to avoid GnuPG warnings).
        required_dirs = set([self.directory_default, self.directory_effective])
        if not have_updated_gnupg():
            required_dirs.update([
                os.path.dirname(self.public_key_file),
                os.path.dirname(self.public_key_file),
            ])
        for directory in required_dirs:
            create_directory(directory)
        # Use a temporary file for the `gpg --batch --gen-key' batch instructions.
        fd, temporary_file = tempfile.mkstemp(suffix='.txt')
        try:
            with open(temporary_file, 'w') as handle:
                handle.write(self.batch_script)
                handle.write('\n')
            # Inform the operator that this may take a while.
            logger.info(
                compact("""
                Please note: Generating a GPG key pair can take a long time. If
                you are logged into a virtual machine or a remote server over
                SSH, now is a good time to familiarize yourself with the
                concept of entropy and how to make more of it :-)
            """))
            timer = Timer()
            with EntropyGenerator():
                gen_key_cmd = self.scoped_command
                gen_key_cmd += ['--batch', '--gen-key', temporary_file]
                execute(*gen_key_cmd, logger=logger)
            logger.info("Finished generating GPG key pair in %s.", timer)
        finally:
            os.unlink(temporary_file)
        # Reset cached properties after key generation.
        self.clear_cached_properties()
 def ensure_writable(self):
     """Make sure the directory exists and is writable."""
     self.ensure_exists()
     if not self.context.is_writable(self.directory):
         if self.context.have_superuser_privileges:
             msg = "The directory %s isn't writable!"
             raise ValueError(msg % self)
         else:
             raise ValueError(compact("""
                 The directory {location} isn't writable, most likely due
                 to permissions. Consider using the --use-sudo option.
             """, location=self))
 def ensure_writable(self):
     """Make sure the directory exists and is writable."""
     self.ensure_exists()
     if not self.context.is_writable(self.directory):
         if self.context.have_superuser_privileges:
             msg = "The directory %s isn't writable!"
             raise ValueError(msg % self)
         else:
             raise ValueError(compact("""
                 The directory {location} isn't writable, most likely due
                 to permissions. Consider using the --use-sudo option.
             """, location=self))
def render_error(filename, line_number, text, *args, **kw):
    """Render an error message including line number and optional filename."""
    message = []
    if filename and line_number:
        message.append(
            format("Control file parsing error in %s at line %i:", filename,
                   line_number))
    else:
        message.append(
            format("Failed to parse control field at line %i:", line_number))
    message.append(compact(text, *args, **kw))
    return u" ".join(message)
Exemple #19
0
def parse_relationship(expression):
    """
    Parse an expression containing a package name and optional version/architecture restrictions.

    :param expression: A relationship expression (a string).
    :returns: A :class:`Relationship` object.
    :raises: :exc:`~exceptions.ValueError` when parsing fails.

    This function parses relationship expressions containing a package name and
    (optionally) a version relation of the form ``python (>= 2.6)`` and/or an
    architecture restriction (refer to the Debian policy manual's documentation
    on the `syntax of relationship fields`_ for details). Here's an example:

    >>> from deb_pkg_tools.deps import parse_relationship
    >>> parse_relationship('python')
    Relationship(name='python')
    >>> parse_relationship('python (<< 3)')
    VersionedRelationship(name='python', operator='<<', version='3')

    .. _syntax of relationship fields: https://www.debian.org/doc/debian-policy/ch-relationships.html
    """
    logger.debug("Parsing relationship: %r", expression)
    match = EXPRESSION_PATTERN.match(expression)
    name = match.group('name')
    version = match.group('version')
    # Split the architecture restrictions into a tuple of strings.
    architectures = tuple((match.group('architectures') or '').split())
    if name and not version:
        # A package name (and optional architecture restrictions) without version relation.
        return Relationship(name=name, architectures=architectures)
    else:
        # A package name (and optional architecture restrictions) followed by a
        # relationship to specific version(s) of the package.
        tokens = [
            t.strip() for t in re.split('([<>=]+)', version)
            if t and not t.isspace()
        ]
        if len(tokens) != 2:
            # Encountered something unexpected!
            raise ValueError(
                compact("""
                Corrupt package relationship expression: Splitting operator
                from version resulted in more than two tokens!
                (expression: {e}, tokens: {t})
            """,
                        e=expression,
                        t=tokens))
        return VersionedRelationship(name=name,
                                     architectures=architectures,
                                     operator=tokens[0],
                                     version=tokens[1])
Exemple #20
0
def retry_limit():
    """
    Generator for interactive prompts that repeat on invalid input.

    :raises: :exc:`TooManyInvalidReplies` when an interactive prompt
             receives repeated invalid input (:data:`MAX_ATTEMPTS`).
    """
    for i in range(MAX_ATTEMPTS):
        yield i + 1
    logger.warning("Too many invalid replies on interactive prompt, aborting! (after %i attempts)", MAX_ATTEMPTS)
    raise TooManyInvalidReplies(compact("""
        Received too many invalid replies on interactive prompt,
        giving up! (tried %i times)
    """, MAX_ATTEMPTS))
Exemple #21
0
def collect_packages_worker(args):
    """Helper for :func:`collect_packages()` that enables concurrent collection."""
    try:
        return collect_related_packages(args[0], cache=args[1], interactive=False)
    except Exception:
        # Log a full traceback in the child process because the multiprocessing
        # module doesn't preserve the traceback when propagating the exception
        # to the parent process.
        logger.exception(compact("""
            Encountered unhandled exception during collection of related
            packages! (propagating exception to parent process)
        """))
        # Propagate the exception to the parent process.
        raise
Exemple #22
0
def retry_limit():
    """
    Generator for interactive prompts that repeat on invalid input.

    :raises: :exc:`TooManyInvalidReplies` when an interactive prompt
             receives repeated invalid input (:data:`MAX_ATTEMPTS`).
    """
    for i in range(MAX_ATTEMPTS):
        yield i + 1
    logger.warning("Too many invalid replies on interactive prompt, aborting! (after %i attempts)", MAX_ATTEMPTS)
    raise TooManyInvalidReplies(compact("""
        Received too many invalid replies on interactive prompt,
        giving up! (tried %i times)
    """, MAX_ATTEMPTS))
Exemple #23
0
 def required_hint(self):
     """A hint about required properties (a string or :data:`None`)."""
     names = sorted(name for name, value in self.custom_properties if value.required)
     if names:
         return compact(
             """
             When you initialize a :class:`{type}` object you are required
             to provide {values} for the {required} {properties}.
             """,
             type=self.type.__name__,
             required=self.format_properties(names),
             values=("a value" if len(names) == 1 else "values"),
             properties=("property" if len(names) == 1 else "properties"),
         )
Exemple #24
0
    def matches(self, name, version=None):
        """
        Check if the relationship matches a given package name.

        :param name: The name of a package (a string).
        :param version: The version number of a package (this parameter is ignored).
        :returns: :data:`True` if the name matches, :data:`None` otherwise.
        :raises: :exc:`~exceptions.NotImplementedError` when :attr:`architectures`
                 is not empty (because evaluation of architecture restrictions
                 hasn't been implemented).
        """
        if self.name == name:
            if self.architectures:
                raise NotImplementedError(compact(ARCHITECTURE_RESTRICTIONS_MESSAGE))
            return True
Exemple #25
0
    def skipTest(self, text, *args, **kw):
        """
        Enable backwards compatible "marking of tests to skip".

        By calling this method from a return statement in the test to be
        skipped the test can be marked as skipped when possible, without
        breaking the test suite when unittest.TestCase.skipTest() isn't
        available.
        """
        reason = compact(text, *args, **kw)
        try:
            super(ProcTestCase, self).skipTest(reason)
        except AttributeError:
            # unittest.TestCase.skipTest() isn't available in Python 2.6.
            logger.warning("%s", reason)
def deprecation_note_callback(app, what, name, obj, options, lines):
    """
    Automatically document aliases defined using :func:`~humanfriendly.deprecation.define_aliases()`.

    Refer to :func:`enable_deprecation_notes()` to enable the use of this
    function (you probably don't want to call :func:`deprecation_note_callback()`
    directly).

    This function implements a callback for ``autodoc-process-docstring`` that
    reformats module docstrings to append an overview of aliases defined by the
    module.

    The parameters expected by this function are those defined for Sphinx event
    callback functions (i.e. I'm not going to document them here :-).
    """
    if isinstance(obj, types.ModuleType) and lines:
        aliases = get_aliases(obj.__name__)
        if aliases:
            # Convert the existing docstring to a string and remove leading
            # indentation from that string, otherwise our generated content
            # would have to match the existing indentation in order not to
            # break docstring parsing (because indentation is significant
            # in the reStructuredText format).
            blocks = [dedent("\n".join(lines))]
            # Use an admonition to group the deprecated aliases together and
            # to distinguish them from the autodoc entries that follow.
            blocks.append(".. note:: Deprecated names")
            indent = " " * 3
            if len(aliases) == 1:
                explanation = """
                    The following alias exists to preserve backwards compatibility,
                    however a :exc:`~exceptions.DeprecationWarning` is triggered
                    when it is accessed, because this alias will be removed
                    in a future release.
                """
            else:
                explanation = """
                    The following aliases exist to preserve backwards compatibility,
                    however a :exc:`~exceptions.DeprecationWarning` is triggered
                    when they are accessed, because these aliases will be
                    removed in a future release.
                """
            blocks.append(indent + compact(explanation))
            for name, target in aliases.items():
                blocks.append(format("%s.. data:: %s", indent, name))
                blocks.append(
                    format("%sAlias for :obj:`%s`.", indent * 2, target))
            update_lines(lines, "\n\n".join(blocks))
Exemple #27
0
def check_mandatory_fields(control_fields):
    """
    Make sure mandatory binary control fields are defined.

    :param control_fields: A dictionary with control file fields.
    :raises: :exc:`~exceptions.ValueError` when a mandatory binary control
             field is not present in the provided control fields (see also
             :data:`MANDATORY_BINARY_CONTROL_FIELDS`).
    """
    missing_fields = [f for f in MANDATORY_BINARY_CONTROL_FIELDS if not control_fields.get(f)]
    if missing_fields:
        raise ValueError(compact(
            "Missing {fields}! ({details})",
            fields=pluralize(len(missing_fields), "mandatory binary package control field"),
            details=concatenate(sorted(missing_fields)),
        ))
 def ensure_exists(self):
     """Make sure the location exists."""
     if not self.context.is_directory(self.directory):
         # This can also happen when we don't have permission to one of the
         # parent directories so we'll point that out in the error message
         # when it seems applicable (so as not to confuse users).
         if self.context.have_superuser_privileges:
             msg = "The directory %s doesn't exist!"
             raise ValueError(msg % self)
         else:
             raise ValueError(compact("""
                 The directory {location} isn't accessible, most likely
                 because it doesn't exist or because of permissions. If
                 you're sure the directory exists you can use the
                 --use-sudo option.
             """, location=self))
Exemple #29
0
    def matches(self, name, version=None):
        """
        Check if the relationship matches a given package name.

        :param name: The name of a package (a string).
        :param version: The version number of a package (this parameter is ignored).
        :returns: :data:`True` if the name matches, :data:`None` otherwise.
        :raises: :exc:`~exceptions.NotImplementedError` when :attr:`architectures`
                 is not empty (because evaluation of architecture restrictions
                 hasn't been implemented).
        """
        if self.name == name:
            if self.architectures:
                raise NotImplementedError(
                    compact(ARCHITECTURE_RESTRICTIONS_MESSAGE))
            return True
 def ensure_exists(self):
     """Make sure the location exists."""
     if not self.context.is_directory(self.directory):
         # This can also happen when we don't have permission to one of the
         # parent directories so we'll point that out in the error message
         # when it seems applicable (so as not to confuse users).
         if self.context.have_superuser_privileges:
             msg = "The directory %s doesn't exist!"
             raise ValueError(msg % self)
         else:
             raise ValueError(compact("""
                 The directory {location} isn't accessible, most likely
                 because it doesn't exist or because of permissions. If
                 you're sure the directory exists you can use the
                 --use-sudo option.
             """, location=self))
Exemple #31
0
 def check_ssh_connection(self):
     """Verify SSH connectivity to the pre-boot environment."""
     if self.test_ssh_connection(self.pre_boot, self.pre_context):
         logger.info("Successfully connected and authenticated over SSH.")
     else:
         msg = format(
             "Failed to authenticate to %s:%i!",
             self.pre_boot.hostname,
             self.pre_boot.port_number,
         )
         if self.pre_boot.username == 'root':
             msg += " " + compact("""
                 Maybe you're accidentally connecting to the post-boot
                 environment and you don't have 'root' access there?
             """)
         raise SystemUnreachableError(msg)
Exemple #32
0
 def initializer_hint(self):
     """A hint that properties can be set using keyword arguments to the initializer (a string or :data:`None`)."""
     names = sorted(
         name for name, value in self.custom_properties
         if value.key or value.required or value.writable
     )
     if names:
         return compact(
             """
             You can set the {values} of the {names} {properties}
             by passing {arguments} to the class initializer.
             """,
             names=self.format_properties(names),
             values=("value" if len(names) == 1 else "values"),
             properties=("property" if len(names) == 1 else "properties"),
             arguments=("a keyword argument" if len(names) == 1 else "keyword arguments"),
         )
Exemple #33
0
def collect_packages_worker(args):
    """Helper for :func:`collect_packages()` that enables concurrent collection."""
    try:
        return collect_related_packages(args[0],
                                        cache=args[1],
                                        interactive=False)
    except Exception:
        # Log a full traceback in the child process because the multiprocessing
        # module doesn't preserve the traceback when propagating the exception
        # to the parent process.
        logger.exception(
            compact("""
            Encountered unhandled exception during collection of related
            packages! (propagating exception to parent process)
        """))
        # Propagate the exception to the parent process.
        raise
Exemple #34
0
    def skipTest(self, text, *args, **kw):
        """
        Enable skipping of tests.

        This method was added in humanfriendly 3.3 as a fall back for the
        :func:`unittest.TestCase.skipTest()` method that was added in Python
        2.7 and 3.1 (because humanfriendly also supports Python 2.6).

        Since then `humanfriendly` has gained a conditional dependency on
        unittest2_ which enables actual skipping of tests (instead of just
        mocking it) on Python 2.6.

        This method now remains for backwards compatibility (and just because
        it's a nice shortcut).

        .. _unittest2: https://pypi.python.org/pypi/unittest2
        """
        raise unittest.SkipTest(compact(text, *args, **kw))
Exemple #35
0
 def kill_interactive_prompt(self):
     """Kill the process responsible for the interactive prompt in the pre-boot environment."""
     logger.info("Looking for '%s' process ..", self.cryptroot_program)
     pid = self.find_process_id(self.cryptroot_program)
     if pid:
         logger.info("Killing interactive prompt with process id %i ..",
                     pid)
         if self.pre_context.execute('kill', '-9', str(pid), check=False):
             return
     # Warn the operator if we fail to identify and kill the interactive
     # prompt, but continue (as opposed to aborting) in the hope that
     # there is no interactive prompt or its not blocking.
     logger.notice(
         compact("""
         Failed to identify and kill the process that's responsible
         for the interactive prompt! The remote system may block on
         the interactive prompt, in this case manual intervention
         will be required.
     """))
Exemple #36
0
    def check_old_files(self):
        """
        Raise an exception when we risk overwriting an existing public or secret key file.

        :returns: A list of filenames with existing files.
        :raises: :exc:`~exceptions.EnvironmentError` as described below.

        When GnuPG < 2.1 is installed :func:`check_old_files()` is called to
        ensure that when :attr:`public_key_file` and :attr:`secret_key_file`
        have been provided, either both of the files already exist or neither
        one exists. This avoids accidentally overwriting an existing file that
        wasn't generated by deb-pkg-tools and shouldn't be touched at all.
        """
        if len(self.existing_files) == 1:
            raise EnvironmentError(
                compact(
                    "Refusing to overwrite existing key file! ({filename})",
                    filename=self.existing_files[0],
                ))
Exemple #37
0
def parse_relationship(expression):
    """
    Parse an expression containing a package name and optional version/architecture restrictions.

    :param expression: A relationship expression (a string).
    :returns: A :class:`Relationship` object.
    :raises: :exc:`~exceptions.ValueError` when parsing fails.

    This function parses relationship expressions containing a package name and
    (optionally) a version relation of the form ``python (>= 2.6)`` and/or an
    architecture restriction (refer to the Debian policy manual's documentation
    on the `syntax of relationship fields`_ for details). Here's an example:

    >>> from deb_pkg_tools.deps import parse_relationship
    >>> parse_relationship('python')
    Relationship(name='python')
    >>> parse_relationship('python (<< 3)')
    VersionedRelationship(name='python', operator='<<', version='3')

    .. _syntax of relationship fields: https://www.debian.org/doc/debian-policy/ch-relationships.html
    """
    logger.debug("Parsing relationship: %r", expression)
    match = EXPRESSION_PATTERN.match(expression)
    name = match.group('name')
    version = match.group('version')
    # Split the architecture restrictions into a tuple of strings.
    architectures = tuple((match.group('architectures') or '').split())
    if name and not version:
        # A package name (and optional architecture restrictions) without version relation.
        return Relationship(name=name, architectures=architectures)
    else:
        # A package name (and optional architecture restrictions) followed by a
        # relationship to specific version(s) of the package.
        tokens = [t.strip() for t in re.split('([<>=]+)', version) if t and not t.isspace()]
        if len(tokens) != 2:
            # Encountered something unexpected!
            raise ValueError(compact("""
                Corrupt package relationship expression: Splitting operator
                from version resulted in more than two tokens!
                (expression: {e}, tokens: {t})
            """, e=expression, t=tokens))
        return VersionedRelationship(name=name, architectures=architectures, operator=tokens[0], version=tokens[1])
Exemple #38
0
def check_mandatory_fields(control_fields):
    """
    Make sure mandatory binary control fields are defined.

    :param control_fields: A dictionary with control file fields.
    :raises: :exc:`~exceptions.ValueError` when a mandatory binary control
             field is not present in the provided control fields (see also
             :data:`MANDATORY_BINARY_CONTROL_FIELDS`).
    """
    missing_fields = [
        f for f in MANDATORY_BINARY_CONTROL_FIELDS if not control_fields.get(f)
    ]
    if missing_fields:
        raise ValueError(
            compact(
                "Missing {fields}! ({details})",
                fields=pluralize(len(missing_fields),
                                 "mandatory binary package control field"),
                details=concatenate(sorted(missing_fields)),
            ))
Exemple #39
0
 def kill_emergency_shell(self):
     """Kill the emergency shell process to resume the boot process."""
     logger.info("Looking for emergency shell process (/bin/sh -i) ..")
     pid = self.find_process_id('/bin/sh', check=False)
     if pid:
         logger.info("Killing emergency shell with process id %i ..", pid)
         try:
             self.pre_context.execute('kill', '-9', str(pid))
         except ExternalCommandFailed as e:
             if isinstance(e, RemoteConnectFailed):
                 raise
     # Warn the operator if we fail to identify and kill the emergency
     # shell process, but continue (as opposed to aborting) in the hope
     # that there is no interactive prompt or its not blocking.
     logger.notice(
         compact("""
         Failed to identify and kill the emergency shell process.
         Booting of the remote system may block until the emergency
         shell is terminated, in this case manual intervention will
         be required.
     """))
Exemple #40
0
def check_version_conflicts(dependency_set, cache=None):
    """
    Check for version conflicts in a dependency set.

    :param dependency_set: A list of filenames (strings) of ``*.deb`` files.
    :param cache: The :class:`.PackageCache` to use (defaults to :data:`None`).
    :raises: :exc:`VersionConflictFound` when one or more version
             conflicts are found.

    For each Debian binary package archive given, check if a newer version of
    the same package exists in the same repository (directory). This analysis
    can be very slow. To make it faster you can use the
    :class:`.PackageCache`.
    """
    timer = Timer()
    summary = []
    dependency_set = list(map(parse_filename, dependency_set))
    spinner = Spinner(total=len(dependency_set))
    logger.info("Checking for version conflicts in %i package(s) ..",
                len(dependency_set))
    for i, archive in enumerate(dependency_set, start=1):
        if archive.newer_versions:
            summary.append(
                compact("""
                    Dependency set includes {dependency} but newer version(s)
                    of that package also exist and will take precedence:
            """,
                        dependency=format_path(archive.filename)))
            summary.append("\n".join(" - %s" % format_path(a.filename)
                                     for a in sorted(archive.newer_versions)))
        spinner.step(label="Checking for version conflicts", progress=i)
    spinner.clear()
    if summary:
        summary.insert(0, "One or more version conflicts found:")
        raise VersionConflictFound('\n\n'.join(summary))
    else:
        logger.info("No version conflicts found (took %s).", timer)
Exemple #41
0
    def wait_for_pre_boot(self):
        """
        Wait for the pre-boot environment to become available.

        :returns: A :class:`ServerDetails` object.
        :raises: The following exceptions can be raised:

                 - :exc:`SystemUnreachableError` when :attr:`connect_timeout`
                   seconds have passed and we still haven't managed to query
                   the SSH server in the pre-boot environment.
                 - :exc:`UnlockAbortedError` when the post-boot environment is
                   detected and the operator aborts the unlock sequence.
        """
        method_timer = Timer()
        logger.info("Waiting for pre-boot environment to become available ..")
        with AutomaticSpinner("Waiting for pre-boot environment",
                              show_time=True):
            while True:
                iteration_timer = Timer()
                server = self.scan_ssh_server(self.pre_boot)
                known_keys = self.get_known_host_keys('pre-boot-host-keys')
                if server.host_keys and known_keys:
                    logger.verbose(
                        "Checking if SSH host keys match known keys ..")
                    if server.host_keys & known_keys:
                        logger.info(
                            "Matched known SSH host keys of pre-boot environment."
                        )
                        break
                    else:
                        logger.warning(
                            compact("""
                            Detected post-boot environment while waiting for
                            pre-boot environment to become available, will keep
                            retrying...
                        """))
                elif server.match_header('dropbear'):
                    logger.info(
                        "Detected Dropbear in pre-boot environment (as expected)."
                    )
                    break
                elif server.match_header('openssh'):
                    logger.warning(
                        compact("""
                        Detected OpenSSH server while connecting to pre-boot
                        environment where I was expecting Dropbear instead!
                        Could it be that you're accidentally connecting
                        to the post-boot environment?
                    """))
                    if self.interactive:
                        if prompt_for_confirmation(
                                "Continue connecting anyway?"):
                            logger.info(
                                "Continuing unlock sequence with operator consent .."
                            )
                        else:
                            raise UnlockAbortedError(
                                "Unlock sequence aborted by operator.")
                    break
                if method_timer.elapsed_time >= self.connect_timeout:
                    raise SystemUnreachableError(
                        format(
                            "Timed out waiting for pre-boot environment of %s to become available within %s!",
                            self.pre_context,
                            format_timespan(self.connect_timeout),
                        ))
                iteration_timer.sleep(self.retry_interval)
        logger.info("Waited %s for pre-boot environment.", method_timer)
        return server
Exemple #42
0
    def transform_version(self, package_to_convert, python_requirement_name, python_requirement_version):
        """
        Transform a Python requirement version to a Debian version number.

        :param package_to_convert: The :class:`.PackageToConvert` whose
                                   requirement is being transformed.
        :param python_requirement_name: The name of a Python package
                                        as found on PyPI (a string).
        :param python_requirement_version: The required version of the
                                           Python package (a string).
        :returns: The transformed version (a string).

        This method is a wrapper for :func:`.normalize_package_version()` that
        takes care of one additional quirk to ensure compatibility with
        :pypi:`pip`. Explaining this quirk requires a bit of context:

        - When package A requires package B (via ``install_requires``) and
          package A absolutely pins the required version of package B using one
          or more trailing zeros (e.g. ``B==1.0.0``) but the actual version
          number of package B (embedded in the metadata of package B) contains
          less trailing zeros (e.g. ``1.0``) then :pypi:`pip` will not complain
          but silently fetch version ``1.0`` of package B to satisfy the
          requirement.

        - However this doesn't change the absolutely pinned version in the
          ``install_requires`` metadata of package A.

        - When py2deb converts the resulting requirement set, the dependency of
          package A is converted as ``B (= 1.0.0)``. The resulting packages
          will not be installable because :man:`apt` considers ``1.0`` to be
          different from ``1.0.0``.

        This method analyzes the requirement set to identify occurrences of
        this quirk and strip trailing zeros in ``install_requires`` metadata
        that would otherwise result in converted packages that cannot be
        installed.
        """
        matching_packages = [
            pkg for pkg in self.packages_to_convert
            if package_names_match(pkg.python_name, python_requirement_name)
        ]
        if len(matching_packages) > 1:
            # My assumption while writing this code is that this should never
            # happen. This check is to make sure that if it does happen it will
            # be noticed because the last thing I want is for this `hack' to
            # result in packages that are silently wrongly converted.
            normalized_name = normalize_package_name(python_requirement_name)
            num_matches = len(matching_packages)
            raise Exception(compact("""
                Expected requirement set to contain exactly one Python package
                whose name can be normalized to {name} but encountered {count}
                packages instead! (matching packages: {matches})
            """, name=normalized_name, count=num_matches, matches=matching_packages))
        elif matching_packages:
            # Check whether the version number included in the requirement set
            # matches the version number in a package's requirements.
            requirement_to_convert = matching_packages[0]
            if python_requirement_version != requirement_to_convert.python_version:
                logger.debug("Checking whether to strip trailing zeros from required version ..")
                # Check whether the version numbers share the same prefix.
                required_version = tokenize_version(python_requirement_version)
                included_version = tokenize_version(requirement_to_convert.python_version)
                common_length = min(len(required_version), len(included_version))
                required_prefix = required_version[:common_length]
                included_prefix = included_version[:common_length]
                prefixes_match = (required_prefix == included_prefix)
                logger.debug("Prefix of required version: %s", required_prefix)
                logger.debug("Prefix of included version: %s", included_prefix)
                logger.debug("Prefixes match? %s", prefixes_match)
                # Check if 1) only the required version has a suffix and 2) this
                # suffix consists only of trailing zeros.
                required_suffix = required_version[common_length:]
                included_suffix = included_version[common_length:]
                logger.debug("Suffix of required version: %s", required_suffix)
                logger.debug("Suffix of included version: %s", included_suffix)
                if prefixes_match and required_suffix and not included_suffix:
                    # Check whether the suffix of the required version contains
                    # only zeros, i.e. pip considers the version numbers the same
                    # although apt would not agree.
                    if all(re.match('^0+$', t) for t in required_suffix if t.isdigit()):
                        modified_version = ''.join(required_prefix)
                        logger.warning("Stripping superfluous trailing zeros from required"
                                       " version of %s required by %s! (%s -> %s)",
                                       python_requirement_name, package_to_convert.python_name,
                                       python_requirement_version, modified_version)
                        python_requirement_version = modified_version
        return normalize_package_version(python_requirement_version, prerelease_workaround=self.prerelease_workaround)
def check_duplicate_files(dependency_set, cache=None):
    """
    Check a collection of Debian package archives for conflicts.

    :param dependency_set: A list of filenames (strings) of ``*.deb`` files.
    :param cache: The :class:`.PackageCache` to use (defaults to :data:`None`).
    :raises: :exc:`exceptions.ValueError` when less than two package
             archives are given (the duplicate check obviously only works if
             there are packages to compare :-).
    :raises: :exc:`DuplicateFilesFound` when duplicate files are found
             within a group of package archives.

    This check looks for duplicate files in package archives that concern
    different packages. Ignores groups of packages that have their 'Provides'
    and 'Replaces' fields set to a common value. Other variants of 'Conflicts'
    are not supported yet.

    Because this analysis involves both the package control file fields and the
    pathnames of files installed by packages it can be really slow. To make it
    faster you can use the :class:`.PackageCache`.
    """
    timer = Timer()
    dependency_set = list(map(parse_filename, dependency_set))
    # Make sure we have something useful to work with.
    num_archives = len(dependency_set)
    if num_archives < 2:
        msg = "To check for duplicate files you need to provide two or more packages archives! (%i given)"
        raise ValueError(msg % num_archives)
    # Build up a global map of all files contained in the given package archives.
    global_contents = collections.defaultdict(set)
    global_fields = {}
    spinner = Spinner(total=num_archives)
    logger.info("Checking for duplicate files in %i package archives ..", num_archives)
    for i, archive in enumerate(optimize_order(dependency_set), start=1):
        spinner.step(label="Scanning %i package archives" % num_archives, progress=i)
        fields, contents = inspect_package(archive.filename, cache=cache)
        global_fields[archive.filename] = fields
        for pathname, stat in contents.items():
            if not stat.permissions.startswith('d'):
                global_contents[pathname].add(archive)
    spinner.clear()
    # Count the number of duplicate files between sets of conflicting packages
    # for more user friendly reporting.
    duplicate_files = collections.defaultdict(lambda: dict(count=0, filenames=[]))
    for pathname, packages in global_contents.items():
        if len(packages) > 1:
            # Override the sort key to be the filename because we don't need
            # to properly sort by version (which is slow on large collections).
            key = tuple(sorted(packages, key=lambda p: p.filename))
            duplicate_files[key]['count'] += 1
            duplicate_files[key]['filenames'].append(pathname)
    for packages, information in sorted(duplicate_files.items()):
        # Never report multiple versions of the same package.
        if len(set(package.name for package in packages)) == 1:
            duplicate_files.pop(packages)
            continue

        # We check for one common case where it's easy to guarantee that
        # we're not dealing with broken packages: All of the packages have
        # marked each other as conflicting via the combination of the
        # fields `Provides:' and `Conflicts:'.
        def find_virtual_name(field_name):
            package_names = set()
            for archive in packages:
                field = global_fields[archive.filename].get(field_name)
                if field:
                    package_names |= field.names
                else:
                    return
            if len(package_names) == 1:
                return list(package_names)[0]

        marked_conflicts = find_virtual_name('Conflicts')
        marked_provides = find_virtual_name('Provides')
        if marked_conflicts and marked_conflicts == marked_provides:
            duplicate_files.pop(packages)
    # Boring string formatting, trying to find a way to clearly present conflicts.
    summary = []
    for packages, information in sorted(duplicate_files.items()):
            block = []
            conflicts = pluralize(information['count'], 'conflict', 'conflicts')
            block.append("Found %s between %i packages:\n" % (conflicts, len(packages)))
            for i, package in enumerate(sorted(packages), start=1):
                block.append("  %i. %s\n" % (i, package.filename))
            block.append("These packages contain %s:\n" % conflicts)
            for i, filename in enumerate(sorted(information['filenames']), start=1):
                block.append("  %i. %s\n" % (i, filename))
            summary.append(''.join(block))
    if summary:
        archives_involved = set(itertools.chain.from_iterable(duplicate_files.keys()))
        files = pluralize(len(duplicate_files), 'duplicate file', 'duplicate files')
        archives = pluralize(len(archives_involved), 'package archive', 'package archives')
        summary.insert(0, "Found %s in %s!\n" % (files, archives))
        summary.append(compact("""
            Hint: If the package contents are correct you can resolve these
            conflicts by marking the packages as conflicting. You do this by
            adding the 'Conflicts' and 'Provides' fields and setting them to a
            common value. That should silence this message.
        """))
        delimiter = '%s\n' % ('-' * 79)
        raise DuplicateFilesFound(delimiter.join(summary))
    else:
        logger.info("No conflicting files found (took %s).", timer)