Esempio n. 1
0
 def test_boolean_coercion(self):
     """Test :func:`humanfriendly.coerce_boolean()`."""
     for value in [True, 'TRUE', 'True', 'true', 'on', 'yes', '1']:
         self.assertEqual(True, humanfriendly.coerce_boolean(value))
     for value in [False, 'FALSE', 'False', 'false', 'off', 'no', '0']:
         self.assertEqual(False, humanfriendly.coerce_boolean(value))
     self.assertRaises(ValueError, humanfriendly.coerce_boolean, 'not a boolean')
Esempio n. 2
0
 def test_boolean_coercion(self):
     """Test :func:`humanfriendly.coerce_boolean()`."""
     for value in [True, 'TRUE', 'True', 'true', 'on', 'yes', '1']:
         self.assertEqual(True, humanfriendly.coerce_boolean(value))
     for value in [False, 'FALSE', 'False', 'false', 'off', 'no', '0']:
         self.assertEqual(False, humanfriendly.coerce_boolean(value))
     self.assertRaises(ValueError, humanfriendly.coerce_boolean, 'not a boolean')
def load_config_file(configuration_file=None):
    """
    Load a configuration file with backup directories and rotation schemes.

    :param configuration_file: Override the pathname of the configuration file
                               to load (a string or :data:`None`).
    :returns: A generator of tuples with four values each:

              1. An execution context created using :mod:`executor.contexts`.
              2. The pathname of a directory with backups (a string).
              3. A dictionary with the rotation scheme.
              4. A dictionary with additional options.
    :raises: :exc:`~exceptions.ValueError` when `configuration_file` is given
             but doesn't exist or can't be loaded.

    When `configuration_file` isn't given :data:`LOCAL_CONFIG_FILE` and
    :data:`GLOBAL_CONFIG_FILE` are checked and the first configuration file
    that exists is loaded. This function is used by :class:`RotateBackups` to
    discover user defined rotation schemes and by :mod:`rotate_backups.cli` to
    discover directories for which backup rotation is configured.
    """
    parser = configparser.RawConfigParser()
    if configuration_file:
        logger.verbose("Reading configuration file %s ..",
                       format_path(configuration_file))
        loaded_files = parser.read(configuration_file)
        if len(loaded_files) == 0:
            msg = "Failed to read configuration file! (%s)"
            raise ValueError(msg % configuration_file)
    else:
        for config_file in LOCAL_CONFIG_FILE, GLOBAL_CONFIG_FILE:
            pathname = parse_path(config_file)
            if parser.read(pathname):
                logger.verbose("Reading configuration file %s ..",
                               format_path(pathname))
                break
    for section in parser.sections():
        items = dict(parser.items(section))
        context_options = {}
        if coerce_boolean(items.get('use-sudo')):
            context_options['sudo'] = True
        if items.get('ssh-user'):
            context_options['ssh_user'] = items['ssh-user']
        location = coerce_location(section, **context_options)
        rotation_scheme = dict((name, coerce_retention_period(items[name]))
                               for name in SUPPORTED_FREQUENCIES
                               if name in items)
        options = dict(include_list=split(items.get('include-list', '')),
                       exclude_list=split(items.get('exclude-list', '')),
                       io_scheduling_class=items.get('ionice'),
                       strict=coerce_boolean(items.get('strict', 'yes')),
                       prefer_recent=coerce_boolean(
                           items.get('prefer-recent', 'no')))
        yield location, rotation_scheme, options
def load_config_file(configuration_file=None):
    """
    Load a configuration file with backup directories and rotation schemes.

    :param configuration_file: Override the pathname of the configuration file
                               to load (a string or :data:`None`).
    :returns: A generator of tuples with four values each:

              1. An execution context created using :mod:`executor.contexts`.
              2. The pathname of a directory with backups (a string).
              3. A dictionary with the rotation scheme.
              4. A dictionary with additional options.
    :raises: :exc:`~exceptions.ValueError` when `configuration_file` is given
             but doesn't exist or can't be loaded.

    When `configuration_file` isn't given :data:`LOCAL_CONFIG_FILE` and
    :data:`GLOBAL_CONFIG_FILE` are checked and the first configuration file
    that exists is loaded. This function is used by :class:`RotateBackups` to
    discover user defined rotation schemes and by :mod:`rotate_backups.cli` to
    discover directories for which backup rotation is configured.
    """
    parser = configparser.RawConfigParser()
    if configuration_file:
        logger.verbose("Reading configuration file %s ..", format_path(configuration_file))
        loaded_files = parser.read(configuration_file)
        if len(loaded_files) == 0:
            msg = "Failed to read configuration file! (%s)"
            raise ValueError(msg % configuration_file)
    else:
        for config_file in LOCAL_CONFIG_FILE, GLOBAL_CONFIG_FILE:
            pathname = parse_path(config_file)
            if parser.read(pathname):
                logger.verbose("Reading configuration file %s ..", format_path(pathname))
                break
    for section in parser.sections():
        items = dict(parser.items(section))
        context_options = {}
        if coerce_boolean(items.get('use-sudo')):
            context_options['sudo'] = True
        if items.get('ssh-user'):
            context_options['ssh_user'] = items['ssh-user']
        location = coerce_location(section, **context_options)
        rotation_scheme = dict((name, coerce_retention_period(items[name]))
                               for name in SUPPORTED_FREQUENCIES
                               if name in items)
        options = dict(include_list=split(items.get('include-list', '')),
                       exclude_list=split(items.get('exclude-list', '')),
                       io_scheduling_class=items.get('ionice'),
                       strict=coerce_boolean(items.get('strict', 'yes')),
                       prefer_recent=coerce_boolean(items.get('prefer-recent', 'no')))
        yield location, rotation_scheme, options
Esempio n. 5
0
    def set_auto_install(self, enabled):
        """
        Enable or disable automatic installation of build time dependencies.

        :param enabled: Any value, evaluated using
                        :func:`~humanfriendly.coerce_boolean()`.
        """
        self.pip_accel.config.auto_install = coerce_boolean(enabled)
Esempio n. 6
0
    def set_auto_install(self, enabled):
        """
        Enable or disable automatic installation of build time dependencies.

        :param enabled: Any value, evaluated using
                        :py:func:`~humanfriendly.coerce_boolean()`.
        """
        self.pip_accel.config.auto_install = coerce_boolean(enabled)
Esempio n. 7
0
    def trust_mod_times(self):
        """
        Whether to trust file modification times for cache invalidation.

        - Environment variable: ``$PIP_ACCEL_TRUST_MOD_TIMES``
        - Configuration option: ``trust-mod-times``
        - Default: :data:`True` unless the AppVeyor_ continuous integration
                   environment is detected (see `issue 62`_).

        .. _AppVeyor: http://www.appveyor.com
        .. _issue 62: https://github.com/paylogic/pip-accel/issues/62
        """
        on_appveyor = coerce_boolean(os.environ.get('APPVEYOR', 'False'))
        return coerce_boolean(self.get(property_name='trust_mod_times',
                                       environment_variable='PIP_ACCEL_TRUST_MOD_TIMES',
                                       configuration_option='trust-mod-times',
                                       default=(not on_appveyor)))
Esempio n. 8
0
    def set_lintian_enabled(self, enabled):
        """
        Enable or disable automatic Lintian_ checks after package building.

        :param enabled: Any value, evaluated using
                        :py:func:`~humanfriendly.coerce_boolean()`.

        .. _Lintian: http://lintian.debian.org/
        """
        self.lintian_enabled = coerce_boolean(enabled)
Esempio n. 9
0
def main():
    from humanfriendly import coerce_boolean

    user_input = coerce_boolean(sys.argv[1]) if len(sys.argv) > 1 else True

    if user_input:
        print("Muting.")
    else:
        print("Unmuting.")

    mixer = Mixer()
    return mixer.mute(user_input)
Esempio n. 10
0
    def is_bare(self):
        """
        :data:`True` if the repository has no working tree, :data:`False` if it does.

        The value of this property is computed by running
        the ``git config --get core.bare`` command.
        """
        # Make sure the local repository exists.
        self.create()
        # Ask git whether this is a bare repository.
        return coerce_boolean(self.context.capture(
            'git', 'config', '--get', 'core.bare',
        ))
Esempio n. 11
0
    def s3_cache_create_bucket(self):
        """
        Whether to automatically create the Amazon S3 bucket when it's missing.

        - Environment variable: ``$PIP_ACCEL_S3_CREATE_BUCKET``
        - Configuration option: ``s3-create-bucket``
        - Default: ``False``

        For details please refer to the :py:mod:`pip_accel.caches.s3` module.
        """
        return coerce_boolean(self.get(property_name='s3_cache_create_bucket',
                                       environment_variable='PIP_ACCEL_S3_CREATE_BUCKET',
                                       configuration_option='s3-create-bucket',
                                       default=False))
Esempio n. 12
0
    def s3_cache_create_bucket(self):
        """
        Whether to automatically create the Amazon S3 bucket when it's missing.

        - Environment variable: ``$PIP_ACCEL_S3_CREATE_BUCKET``
        - Configuration option: ``s3-create-bucket``
        - Default: :data:`False`

        For details please refer to the :mod:`pip_accel.caches.s3` module.
        """
        return coerce_boolean(self.get(property_name='s3_cache_create_bucket',
                                       environment_variable='PIP_ACCEL_S3_CREATE_BUCKET',
                                       configuration_option='s3-create-bucket',
                                       default=False))
Esempio n. 13
0
def is_syslog_supported():
    """
    Determine whether system logging is supported.

    :returns:

        :data:`True` if system logging is supported and can be enabled,
        :data:`False` if system logging is not supported or there are good
        reasons for not enabling it.

    The decision making process here is as follows:

    Override
     If the environment variable ``$COLOREDLOGS_SYSLOG`` is set it is evaluated
     using :func:`~humanfriendly.coerce_boolean()` and the resulting value
     overrides the platform detection discussed below, this allows users to
     override the decision making process if they disagree / know better.

    Linux / UNIX
     On systems that are not Windows or MacOS (see below) we assume UNIX which
     means either syslog is available or sending a bunch of UDP packets to
     nowhere won't hurt anyone...

    Microsoft Windows
     Over the years I've had multiple reports of :pypi:`coloredlogs` spewing
     extremely verbose errno 10057 warning messages to the console (once for
     each log message I suppose) so I now assume it a default that
     "syslog-style system logging" is not generally available on Windows.

    Apple MacOS
     There's cPython issue `#38780`_ which seems to result in a fatal exception
     when the Python interpreter shuts down. This is (way) worse than not
     having system logging enabled. The error message mentioned in `#38780`_
     has actually been following me around for years now, see for example:

     - https://github.com/xolox/python-rotate-backups/issues/9 mentions Docker
       images implying Linux, so not strictly the same as `#38780`_.

     - https://github.com/xolox/python-npm-accel/issues/4 is definitely related
       to `#38780`_ and is what eventually prompted me to add the
       :func:`is_syslog_supported()` logic.

    .. _#38780: https://bugs.python.org/issue38780
    """
    override = os.environ.get("COLOREDLOGS_SYSLOG")
    if override is not None:
        return coerce_boolean(override)
    else:
        return not (on_windows() or on_macos())
Esempio n. 14
0
    def test_system_package_dependency_installation(self):
        """
        Test the (automatic) installation of required system packages.

        This test installs lxml 3.2.1 to confirm that the system packages
        required by lxml are automatically installed by pip-accel to make the
        build of lxml succeed.

        .. warning:: This test forces the removal of the system package
                     ``libxslt1-dev`` before it tries to install lxml, because
                     without this nasty hack the test would only install
                     required system packages on the first run, because on
                     later runs the required system packages would already be
                     installed. Because of this very non conventional behavior
                     the test is skipped unless the environment variable
                     ``PIP_ACCEL_TEST_AUTO_INSTALL=yes`` is set (opt-in).
        """
        # Test system package dependency handling.
        if not coerce_boolean(os.environ.get('PIP_ACCEL_TEST_AUTO_INSTALL')):
            logger.warning("Skipping system package dependency installation test (set the environment variable"
                           " PIP_ACCEL_TEST_AUTO_INSTALL=true to allow the test suite to use `sudo').")
            return
        # Force the removal of a system package required by `lxml' without
        # removing any (reverse) dependencies (we don't actually want to
        # break the system, thank you very much :-). Disclaimer: you opt in
        # to this with $PIP_ACCEL_TEST_AUTO_INSTALL...
        os.system('sudo dpkg --remove --force-depends libxslt1-dev')
        # Make sure that when automatic installation is disabled the system
        # package manager refuses to install the missing dependency.
        accelerator = self.initialize_pip_accel(auto_install=False, data_directory=create_temporary_directory())
        self.assertRaises(DependencyInstallationRefused, accelerator.install_from_arguments, [
            '--ignore-installed', 'lxml==3.2.1'
        ])
        # Try to ask for permission but make the prompt fail because standard
        # input cannot be read (this test suite obviously needs to be
        # non-interactive) and make sure the system package manager refuses to
        # install the missing dependency.
        with PatchedAttribute(sys, 'stdin', open(os.devnull)):
            accelerator = self.initialize_pip_accel(auto_install=None, data_directory=create_temporary_directory())
            self.assertRaises(DependencyInstallationRefused, accelerator.install_from_arguments, [
                '--ignore-installed', 'lxml==3.2.1'
            ])
        # Install lxml while a system dependency is missing and automatic installation is allowed.
        accelerator = self.initialize_pip_accel(auto_install=True,
                                                data_directory=create_temporary_directory())
        num_installed = accelerator.install_from_arguments([
            '--ignore-installed', 'lxml==3.2.1'
        ])
        assert num_installed == 1, "Expected pip-accel to install exactly one package!"
Esempio n. 15
0
def auto_install():
    """
    Automatically call :func:`install()` when ``$COLOREDLOGS_AUTO_INSTALL`` is set.

    The `coloredlogs` package includes a `path configuration file`_ that
    automatically imports the :mod:`coloredlogs` module and calls
    :func:`auto_install()` when the environment variable
    ``$COLOREDLOGS_AUTO_INSTALL`` is set.

    This function uses :func:`~humanfriendly.coerce_boolean()` to check whether
    the value of ``$COLOREDLOGS_AUTO_INSTALL`` should be considered :data:`True`.

    .. _path configuration file: https://docs.python.org/2/library/site.html#module-site
    """
    if coerce_boolean(os.environ.get('COLOREDLOGS_AUTO_INSTALL', 'false')):
        install()
Esempio n. 16
0
def auto_install():
    """
    Automatically call :func:`install()` when ``$COLOREDLOGS_AUTO_INSTALL`` is set.

    The `coloredlogs` package includes a `path configuration file`_ that
    automatically imports the :mod:`coloredlogs` module and calls
    :func:`auto_install()` when the environment variable
    ``$COLOREDLOGS_AUTO_INSTALL`` is set.

    This function uses :func:`~humanfriendly.coerce_boolean()` to check whether
    the value of ``$COLOREDLOGS_AUTO_INSTALL`` should be considered :data:`True`.

    .. _path configuration file: https://docs.python.org/2/library/site.html#module-site
    """
    if coerce_boolean(os.environ.get('COLOREDLOGS_AUTO_INSTALL', 'false')):
        install()
Esempio n. 17
0
    def auto_install(self):
        """
        ``True`` if automatic installation of missing system packages is
        enabled, ``False`` if it is disabled, ``None`` otherwise (in this case
        the user will be prompted at the appropriate time).

        - Environment variable: ``$PIP_ACCEL_AUTO_INSTALL`` (refer to
          :py:func:`~humanfriendly.coerce_boolean()` for details on how the
          value of the environment variable is interpreted)
        - Configuration option: ``auto-install`` (also parsed using
          :py:func:`~humanfriendly.coerce_boolean()`)
        - Default: ``None``
        """
        value = self.get(property_name='auto_install',
                         environment_variable='PIP_ACCEL_AUTO_INSTALL',
                         configuration_option='auto-install')
        if value is not None:
            return coerce_boolean(value)
Esempio n. 18
0
    def auto_install(self):
        """
        ``True`` if automatic installation of missing system packages is
        enabled, ``False`` if it is disabled, ``None`` otherwise (in this case
        the user will be prompted at the appropriate time).

        - Environment variable: ``$PIP_ACCEL_AUTO_INSTALL`` (refer to
          :py:func:`~humanfriendly.coerce_boolean()` for details on how the
          value of the environment variable is interpreted)
        - Configuration option: ``auto-install`` (also parsed using
          :py:func:`~humanfriendly.coerce_boolean()`)
        - Default: ``None``
        """
        value = self.get(property_name='auto_install',
                         environment_variable='PIP_ACCEL_AUTO_INSTALL',
                         configuration_option='auto-install')
        if value is not None:
            return coerce_boolean(value)
Esempio n. 19
0
    def s3_cache_readonly(self):
        """
        If this is ``True`` then the Amazon S3 bucket will only be used for
        :py:class:`~pip_accel.caches.s3.S3CacheBackend.get()` operations (all
        :py:class:`~pip_accel.caches.s3.S3CacheBackend.put()` operations will
        be disabled).

        - Environment variable: ``$PIP_ACCEL_S3_READONLY`` (refer to
          :py:func:`~humanfriendly.coerce_boolean()` for details on how the
          value of the environment variable is interpreted)
        - Configuration option: ``s3-readonly`` (also parsed using
          :py:func:`~humanfriendly.coerce_boolean()`)
        - Default: ``False``

        For details please refer to the :py:mod:`pip_accel.caches.s3` module.
        """
        return coerce_boolean(self.get(property_name='s3_cache_readonly',
                                       environment_variable='PIP_ACCEL_S3_READONLY',
                                       configuration_option='s3-readonly',
                                       default=False))
Esempio n. 20
0
    def s3_cache_readonly(self):
        """
        If this is ``True`` then the Amazon S3 bucket will only be used for
        :py:class:`~pip_accel.caches.s3.S3CacheBackend.get()` operations (all
        :py:class:`~pip_accel.caches.s3.S3CacheBackend.put()` operations will
        be disabled).

        - Environment variable: ``$PIP_ACCEL_S3_READONLY`` (refer to
          :py:func:`~humanfriendly.coerce_boolean()` for details on how the
          value of the environment variable is interpreted)
        - Configuration option: ``s3-readonly`` (also parsed using
          :py:func:`~humanfriendly.coerce_boolean()`)
        - Default: ``False``

        For details please refer to the :py:mod:`pip_accel.caches.s3` module.
        """
        return coerce_boolean(
            self.get(property_name='s3_cache_readonly',
                     environment_variable='PIP_ACCEL_S3_READONLY',
                     configuration_option='s3-readonly',
                     default=False))
Esempio n. 21
0
# The `coloredlogs' package installs a logging handler on the root logger which
# means all loggers automatically write their log messages to the standard
# error stream. In the case of Boto this is a bit confusing because Boto logs
# messages with the ERROR severity even when nothing is wrong, because it
# tries to connect to the Amazon EC2 metadata service which is (obviously) not
# available outside of Amazon EC2:
#
#   boto[6851] DEBUG Retrieving credentials from metadata server.
#   boto[6851] ERROR Caught exception reading instance data
#
# To avoid confusing users of pip-accel (i.e. this is not an error because it's
# properly handled) we silence the Boto logger. To avoid annoying people who
# actually want to debug Boto we'll also provide an escape hatch in the form of
# an environment variable.
if coerce_boolean(os.environ.get('PIP_ACCEL_SILENCE_BOTO', 'true')):
    logging.getLogger('boto').setLevel(logging.FATAL)


class S3CacheBackend(AbstractCacheBackend):
    """The S3 cache backend stores distribution archives in a user defined Amazon S3 bucket."""

    PRIORITY = 20

    def get(self, filename):
        """
        Download a cached distribution archive from the configured Amazon S3
        bucket to the local cache.

        :param filename: The filename of the distribution archive (a string).
        :returns: The pathname of a distribution archive on the local file
Esempio n. 22
0
 def runTest(self):
     """
     A very basic test of the functions that make up the pip-accel command
     using the `virtualenv` package as a test case.
     """
     accelerator = PipAccelerator(Config(), validate=False)
     # We will test the downloading, conversion to binary distribution and
     # installation of the virtualenv package (we simply need a package we
     # know is available from PyPI).
     arguments = ['--ignore-installed', 'virtualenv==1.8.4']
     # First we do a simple sanity check that unpack_source_dists() does NOT
     # connect to PyPI when it's missing source distributions (it should
     # raise a DistributionNotFound exception instead).
     try:
         accelerator.unpack_source_dists(arguments)
         # This line should never be reached.
         self.assertTrue(False)
     except Exception as e:
         # We expect a `DistributionNotFound' exception.
         self.assertTrue(isinstance(e, DistributionNotFound))
     # Download the source distribution from PyPI.
     requirements = accelerator.download_source_dists(arguments)
     self.assertTrue(isinstance(requirements, list))
     self.assertEqual(len(requirements), 1)
     self.assertEqual(requirements[0].name, 'virtualenv')
     self.assertEqual(requirements[0].version, '1.8.4')
     self.assertTrue(os.path.isdir(requirements[0].source_directory))
     # Test the build and installation of the binary package. We have to
     # pass `prefix' explicitly here because the Python process running this
     # test is not inside the virtual environment created to run the
     # tests...
     accelerator.install_requirements(requirements,
                                      prefix=self.virtual_environment,
                                      python=os.path.join(
                                          self.virtual_environment, 'bin',
                                          'python'))
     # Validate that the `virtualenv' package was properly installed.
     logger.debug("Checking that `virtualenv' executable was installed ..")
     self.assertTrue(
         os.path.isfile(
             os.path.join(self.virtual_environment, 'bin', 'virtualenv')))
     logger.debug("Checking that `virtualenv' command works ..")
     command = '%s --help' % pipes.quote(
         os.path.join(self.virtual_environment, 'bin', 'virtualenv'))
     self.assertEqual(os.system(command), 0)
     # We now have a non-empty download cache and source index so this
     # should not raise an exception (it should use the source index).
     accelerator.unpack_source_dists(arguments)
     # Verify that pip-accel properly deals with broken symbolic links
     # pointing from the source index to the download cache.
     os.unlink(
         os.path.join(self.download_cache,
                      os.listdir(self.download_cache)[0]))
     accelerator = PipAccelerator(Config(), validate=False)
     accelerator.install_from_arguments(arguments)
     # Verify that pip-accel properly handles setup.py scripts that break
     # the `bdist_dumb' action but support the `bdist' action as a fall
     # back.
     accelerator = PipAccelerator(Config(), validate=False)
     accelerator.install_from_arguments(['paver==1.2.3'])
     # I'm not yet sure how to effectively test the command line interface,
     # because this test suite abuses validate=False which the command line
     # interface does not expose. That's why the following will report an
     # error message. For now at least we're running the code and making
     # sure there are no syntax errors / incompatibilities.
     try:
         sys.argv = ['pip-accel', 'install', 'virtualenv==1.8.4']
         main()
         # This should not be reached.
         self.assertTrue(False)
     except BaseException as e:
         # For now the main() function is expected to fail and exit with a
         # nonzero status code (explained above).
         self.assertTrue(isinstance(e, SystemExit))
     # Test system package dependency handling.
     if coerce_boolean(os.environ.get('PIP_ACCEL_TEST_AUTO_INSTALL')):
         # Force the removal of a system package required by `lxml' without
         # removing any (reverse) dependencies (we don't actually want to
         # break the system, thank you very much :-). Disclaimer: you opt in
         # to this with $PIP_ACCEL_TEST_AUTO_INSTALL...
         os.system('sudo dpkg --remove --force-depends libxslt1-dev')
         os.environ['PIP_ACCEL_AUTO_INSTALL'] = 'true'
         accelerator = PipAccelerator(Config(), validate=False)
         accelerator.install_from_arguments(
             arguments=['--ignore-installed', 'lxml==3.2.1'],
             prefix=self.virtual_environment,
             python=os.path.join(self.virtual_environment, 'bin', 'python'))
Esempio n. 23
0
 def __init__(self):
     self.enabled = coerce_boolean(os.environ.get('DPT_FORCE_ENTROPY', 'false'))
     if self.enabled:
         self.process = multiprocessing.Process(target=generate_entropy)
Esempio n. 24
0
 def __init__(self):
     """Initialize a :class:`EntropyGenerator` object."""
     self.enabled = coerce_boolean(
         os.environ.get('DPT_FORCE_ENTROPY', 'false'))
     if self.enabled:
         self.process = multiprocessing.Process(target=generate_entropy)
Esempio n. 25
0
 def __init__(self):
     self.enabled = coerce_boolean(
         os.environ.get('DPT_FORCE_ENTROPY', 'false'))
     if self.enabled:
         self.process = multiprocessing.Process(target=generate_entropy)
Esempio n. 26
0
# External dependencies.
from executor import execute, ExternalCommandFailed
from humanfriendly import coerce_boolean, concatenate, format_path, Spinner, Timer
from six.moves import configparser

# Modules included in our package.
from deb_pkg_tools import config
from deb_pkg_tools.control import unparse_control_fields
from deb_pkg_tools.gpg import GPGKey, initialize_gnupg
from deb_pkg_tools.package import find_package_archives, inspect_package_fields
from deb_pkg_tools.utils import atomic_lock, find_installed_version, optimize_order, sha1
from deb_pkg_tools.version import Version

# Enable power users to disable the use of `sudo' (because it
# may not be available in non-Debian build environments).
ALLOW_SUDO = coerce_boolean(os.environ.get('DPT_SUDO', 'true'))

# Initialize a logger.
logger = logging.getLogger(__name__)


def scan_packages(repository, packages_file=None, cache=None):
    """
    A reimplementation of the ``dpkg-scanpackages -m`` command in Python.

    Updates a ``Packages`` file based on the Debian package archive(s) found in
    the given directory. Uses :class:`.PackageCache` to (optionally) speed
    up the process significantly by caching package metadata and hashes on
    disk. This explains why this function can be much faster than
    ``dpkg-scanpackages -m``.
Esempio n. 27
0
 def lintian_enabled(self, value):
     """Automatically coerce :attr:`lintian_enabled` to a boolean value."""
     set_property(self, 'lintian_enabled', coerce_boolean(value))
Esempio n. 28
0
# The `coloredlogs' package installs a logging handler on the root logger which
# means all loggers automatically write their log messages to the standard
# error stream. In the case of Boto this is a bit confusing because Boto logs
# messages with the ERROR severity even when nothing is wrong, because it
# tries to connect to the Amazon EC2 metadata service which is (obviously) not
# available outside of Amazon EC2:
#
#   boto[6851] DEBUG Retrieving credentials from metadata server.
#   boto[6851] ERROR Caught exception reading instance data
#
# To avoid confusing users of pip-accel (i.e. this is not an error because it's
# properly handled) we silence the Boto logger. To avoid annoying people who
# actually want to debug Boto we'll also provide an escape hatch in the form of
# an environment variable.
if coerce_boolean(os.environ.get('PIP_ACCEL_SILENCE_BOTO', 'true')):
    logging.getLogger('boto').setLevel(logging.FATAL)


class S3CacheBackend(AbstractCacheBackend):

    """The S3 cache backend stores distribution archives in a user defined Amazon S3 bucket."""

    PRIORITY = 20

    def get(self, filename):
        """
        Download a distribution archive from the configured Amazon S3 bucket.

        :param filename: The filename of the distribution archive (a string).
        :returns: The pathname of a distribution archive on the local file
Esempio n. 29
0
    '*.pyo',  # Python optimized byte code files (http://lintian.debian.org/tags/package-installs-python-bytecode.html)
    '*~',  # Emacs/Vim backup files (http://lintian.debian.org/tags/backup-file-in-package.html)
    '.*.s??',  # Vim named swap files
    '.bzrignore',  # Bazaar ignore files (http://lintian.debian.org/tags/package-contains-vcs-control-file.html)
    '.DS_Store',  # Mac OS X custom folder attributes (http://lintian.debian.org/tags/macos-ds-store-file-in-package.html)
    '.DS_Store.gz',  # Mac OS X custom folder attributes (http://lintian.debian.org/tags/macos-ds-store-file-in-package.html)
    '._*',  # Mac OS X resource fork (http://lintian.debian.org/tags/macos-resource-fork-file-in-package.html)
    '.gitignore',  # Git ignore files (http://lintian.debian.org/tags/package-contains-vcs-control-file.html)
    '.hg_archival.txt',  # Artefact of `hg archive' (http://lintian.debian.org/tags/package-contains-vcs-control-file.html)
    '.hgignore',  # Mercurial ignore files (http://lintian.debian.org/tags/package-contains-vcs-control-file.html)
    '.hgtags',  # Mercurial ignore files (http://lintian.debian.org/tags/package-contains-vcs-control-file.html)
    '.s??')  # Vim anonymous swap files

# Enable power users to customize how packages are built in order to enable
# limited use of deb-pkg-tools in non-Debian environments like Mac OS X.
ALLOW_CHOWN = coerce_boolean(os.environ.get('DPT_CHOWN_FILES', 'true'))
ALLOW_FAKEROOT_OR_SUDO = coerce_boolean(
    os.environ.get('DPT_ALLOW_FAKEROOT_OR_SUDO', 'true'))
ALLOW_HARD_LINKS = coerce_boolean(os.environ.get('DPT_HARD_LINKS', 'true'))
ALLOW_RESET_SETGID = coerce_boolean(os.environ.get('DPT_RESET_SETGID', 'true'))


def parse_filename(filename):
    """
    Parse the filename of a Debian binary package archive into three fields:
    the name of the package, its version and its architecture. Raises
    :py:exc:`ValueError` when the given filename cannot be parsed. See also
    :py:func:`determine_package_archive()`.

    Here's an example:
Esempio n. 30
0
    def test_system_package_dependency_installation(self):
        """
        Test the (automatic) installation of required system packages.

        This test installs lxml 3.2.1 to confirm that the system packages
        required by lxml are automatically installed by pip-accel to make the
        build of lxml succeed.

        .. warning:: This test forces the removal of the system package
                     ``libxslt1-dev`` before it tries to install lxml, because
                     without this nasty hack the test would only install
                     required system packages on the first run, because on
                     later runs the required system packages would already be
                     installed. Because of this very non conventional behavior
                     the test is skipped unless the environment variable
                     ``PIP_ACCEL_TEST_AUTO_INSTALL=yes`` is set (opt-in).
        """
        if WINDOWS:
            return self.skipTest("""
                Skipping system package dependency installation
                test (not supported on Windows).
            """)
        elif not coerce_boolean(os.environ.get('PIP_ACCEL_TEST_AUTO_INSTALL')):
            return self.skipTest("""
                Skipping system package dependency installation test because
                you need to set $PIP_ACCEL_TEST_AUTO_INSTALL=true to allow the
                test suite to use `sudo'.
            """)
        # Force the removal of a system package required by `lxml' without
        # removing any (reverse) dependencies (we don't actually want to
        # break the system, thank you very much :-). Disclaimer: you opt in
        # to this with $PIP_ACCEL_TEST_AUTO_INSTALL...
        lxml_dependency = 'libxslt1-dev'
        subprocess.call([
            'sudo', '-p', "\n Please provide sudo access to (temporarily) remove %s: " % lxml_dependency,
            'dpkg', '--remove', '--force-depends', lxml_dependency,
        ])
        # Make sure that when automatic installation is disabled the system
        # package manager refuses to install the missing dependency.
        accelerator = self.initialize_pip_accel(auto_install=False, data_directory=create_temporary_directory())
        self.assertRaises(DependencyInstallationRefused, accelerator.install_from_arguments, [
            '--ignore-installed', 'lxml==3.2.1'
        ])

        # A file-like object that always says no :-).
        class FakeStandardInput(object):
            def readline(self):
                return 'no\n'

        # Try to ask for permission but refuse to give it.
        with PatchedAttribute(sys, 'stdin', FakeStandardInput()):
            accelerator = self.initialize_pip_accel(auto_install=None, data_directory=create_temporary_directory())
            self.assertRaises(DependencyInstallationRefused, accelerator.install_from_arguments, [
                '--ignore-installed', 'lxml==3.2.1'
            ])
        # Install lxml while a system dependency is missing and automatic installation is allowed.
        accelerator = self.initialize_pip_accel(auto_install=True,
                                                data_directory=create_temporary_directory())
        num_installed = accelerator.install_from_arguments([
            '--ignore-installed', 'lxml==3.2.1'
        ])
        assert num_installed == 1, "Expected pip-accel to install exactly one package!"
Esempio n. 31
0
def load_config_file(configuration_file=None, expand=True):
    """
    Load a configuration file with backup directories and rotation schemes.

    :param configuration_file: Override the pathname of the configuration file
                               to load (a string or :data:`None`).
    :param expand: :data:`True` to expand filename patterns to their matches,
                   :data:`False` otherwise.
    :returns: A generator of tuples with four values each:

              1. An execution context created using :mod:`executor.contexts`.
              2. The pathname of a directory with backups (a string).
              3. A dictionary with the rotation scheme.
              4. A dictionary with additional options.
    :raises: :exc:`~exceptions.ValueError` when `configuration_file` is given
             but doesn't exist or can't be loaded.

    This function is used by :class:`RotateBackups` to discover user defined
    rotation schemes and by :mod:`rotate_backups.cli` to discover directories
    for which backup rotation is configured. When `configuration_file` isn't
    given :class:`~update_dotdee.ConfigLoader` is used to search for
    configuration files in the following locations:

    - ``/etc/rotate-backups.ini`` and ``/etc/rotate-backups.d/*.ini``
    - ``~/.rotate-backups.ini`` and ``~/.rotate-backups.d/*.ini``
    - ``~/.config/rotate-backups.ini`` and ``~/.config/rotate-backups.d/*.ini``

    All of the available configuration files are loaded in the order given
    above, so that sections in user-specific configuration files override
    sections by the same name in system-wide configuration files.
    """
    expand_notice_given = False
    if configuration_file:
        loader = ConfigLoader(available_files=[configuration_file], strict=True)
    else:
        loader = ConfigLoader(program_name='rotate-backups', strict=False)
    for section in loader.section_names:
        items = dict(loader.get_options(section))
        context_options = {}
        if coerce_boolean(items.get('use-sudo')):
            context_options['sudo'] = True
        if items.get('ssh-user'):
            context_options['ssh_user'] = items['ssh-user']
        location = coerce_location(section, **context_options)
        rotation_scheme = dict((name, coerce_retention_period(items[name]))
                               for name in SUPPORTED_FREQUENCIES
                               if name in items)
        options = dict(include_list=split(items.get('include-list', '')),
                       exclude_list=split(items.get('exclude-list', '')),
                       io_scheduling_class=items.get('ionice'),
                       timestamp=items.get('timestamp'),
                       strict=coerce_boolean(items.get('strict', 'yes')),
                       prefer_recent=coerce_boolean(items.get('prefer-recent', 'no')))
        # Don't override the value of the 'removal_command' property unless the
        # 'removal-command' configuration file option has a value set.
        if items.get('removal-command'):
            options['removal_command'] = shlex.split(items['removal-command'])
        # Expand filename patterns?
        if expand and location.have_wildcards:
            logger.verbose("Expanding filename pattern %s on %s ..", location.directory, location.context)
            if location.is_remote and not expand_notice_given:
                logger.notice("Expanding remote filename patterns (may be slow) ..")
                expand_notice_given = True
            for match in sorted(location.context.glob(location.directory)):
                if location.context.is_directory(match):
                    logger.verbose("Matched directory: %s", match)
                    expanded = Location(context=location.context, directory=match)
                    yield expanded, rotation_scheme, options
                else:
                    logger.verbose("Ignoring match (not a directory): %s", match)
        else:
            yield location, rotation_scheme, options
Esempio n. 32
0
def main():
    """Command line interface for the ``rotate-backups`` program."""
    coloredlogs.install()
    # Command line option defaults.
    rotation_scheme = {}
    kw = dict(include_list=[], exclude_list=[])
    parallel = False
    use_sudo = False
    use_syslog = (not on_windows())
    # Internal state.
    selected_locations = []
    # Parse the command line arguments.
    try:
        options, arguments = getopt.getopt(
            sys.argv[1:], 'M:H:d:w:m:y:t:I:x:jpri:c:C:uS:fnvqh', [
                'minutely=',
                'hourly=',
                'daily=',
                'weekly=',
                'monthly=',
                'yearly=',
                'timestamp-pattern=',
                'include=',
                'exclude=',
                'parallel',
                'prefer-recent',
                'relaxed',
                'ionice=',
                'config=',
                'removal-command=',
                'use-sudo',
                'syslog=',
                'force',
                'dry-run',
                'verbose',
                'quiet',
                'help',
            ])
        for option, value in options:
            if option in ('-M', '--minutely'):
                rotation_scheme['minutely'] = coerce_retention_period(value)
            elif option in ('-H', '--hourly'):
                rotation_scheme['hourly'] = coerce_retention_period(value)
            elif option in ('-d', '--daily'):
                rotation_scheme['daily'] = coerce_retention_period(value)
            elif option in ('-w', '--weekly'):
                rotation_scheme['weekly'] = coerce_retention_period(value)
            elif option in ('-m', '--monthly'):
                rotation_scheme['monthly'] = coerce_retention_period(value)
            elif option in ('-y', '--yearly'):
                rotation_scheme['yearly'] = coerce_retention_period(value)
            elif option in ('-t', '--timestamp-pattern'):
                kw['timestamp_pattern'] = value
            elif option in ('-I', '--include'):
                kw['include_list'].append(value)
            elif option in ('-x', '--exclude'):
                kw['exclude_list'].append(value)
            elif option in ('-j', '--parallel'):
                parallel = True
            elif option in ('-p', '--prefer-recent'):
                kw['prefer_recent'] = True
            elif option in ('-r', '--relaxed'):
                kw['strict'] = False
            elif option in ('-i', '--ionice'):
                value = validate_ionice_class(value.lower().strip())
                kw['io_scheduling_class'] = value
            elif option in ('-c', '--config'):
                kw['config_file'] = parse_path(value)
            elif option in ('-C', '--removal-command'):
                removal_command = shlex.split(value)
                logger.info("Using custom removal command: %s",
                            removal_command)
                kw['removal_command'] = removal_command
            elif option in ('-u', '--use-sudo'):
                use_sudo = True
            elif option in ('-S', '--syslog'):
                use_syslog = coerce_boolean(value)
            elif option in ('-f', '--force'):
                kw['force'] = True
            elif option in ('-n', '--dry-run'):
                logger.info("Performing a dry run (because of %s option) ..",
                            option)
                kw['dry_run'] = True
            elif option in ('-v', '--verbose'):
                coloredlogs.increase_verbosity()
            elif option in ('-q', '--quiet'):
                coloredlogs.decrease_verbosity()
            elif option in ('-h', '--help'):
                usage(__doc__)
                return
            else:
                assert False, "Unhandled option! (programming error)"
        if use_syslog:
            enable_system_logging()
        if rotation_scheme:
            logger.verbose("Rotation scheme defined on command line: %s",
                           rotation_scheme)
        if arguments:
            # Rotation of the locations given on the command line.
            location_source = 'command line arguments'
            selected_locations.extend(
                coerce_location(value, sudo=use_sudo) for value in arguments)
        else:
            # Rotation of all configured locations.
            location_source = 'configuration file'
            selected_locations.extend(
                location
                for location, rotation_scheme, options in load_config_file(
                    configuration_file=kw.get('config_file'), expand=True))
        # Inform the user which location(s) will be rotated.
        if selected_locations:
            logger.verbose("Selected %s based on %s:",
                           pluralize(len(selected_locations), "location"),
                           location_source)
            for number, location in enumerate(selected_locations, start=1):
                logger.verbose(" %i. %s", number, location)
        else:
            # Show the usage message when no directories are given nor configured.
            logger.verbose("No location(s) to rotate selected.")
            usage(__doc__)
            return
    except Exception as e:
        logger.error("%s", e)
        sys.exit(1)
    # Rotate the backups in the selected directories.
    program = RotateBackups(rotation_scheme, **kw)
    if parallel:
        program.rotate_concurrent(*selected_locations)
    else:
        for location in selected_locations:
            program.rotate_backups(location)
Esempio n. 33
0
# The `coloredlogs' package installs a logging handler on the root logger which
# means all loggers automatically write their log messages to the standard
# error stream. In the case of Boto this is a bit confusing because Boto logs
# messages with the ERROR severity even when nothing is wrong, because it
# tries to connect to the Amazon EC2 metadata service which is (obviously) not
# available outside of Amazon EC2:
#
#   boto[6851] DEBUG Retrieving credentials from metadata server.
#   boto[6851] ERROR Caught exception reading instance data
#
# To avoid confusing users of pip-accel (i.e. this is not an error because it's
# properly handled) we silence the Boto logger. To avoid annoying people who
# actually want to debug Boto we'll also provide an escape hatch in the form of
# an environment variable.
if coerce_boolean(os.environ.get("PIP_ACCEL_SILENCE_BOTO", "true")):
    logging.getLogger("boto").setLevel(logging.FATAL)


class S3CacheBackend(AbstractCacheBackend):

    """The S3 cache backend stores distribution archives in a user defined Amazon S3 bucket."""

    PRIORITY = 20

    def get(self, filename):
        """
        Download a distribution archive from the configured Amazon S3 bucket.

        :param filename: The filename of the distribution archive (a string).
        :returns: The pathname of a distribution archive on the local file
Esempio n. 34
0
from deb_pkg_tools.version.native import compare_version_objects

# Public identifiers that require documentation.
__all__ = (
    'DPKG_COMPARISON_CACHE',
    'NATIVE_COMPARISON_CACHE',
    'PREFER_DPKG',
    'Version',
    'coerce_version',
    'compare_versions',
    'compare_versions_native',
    'compare_versions_external',
    'logger',
)

PREFER_DPKG = coerce_boolean(os.environ.get('DPT_VERSION_COMPAT', 'false'))
"""
:data:`True` to prefer :func:`compare_versions_external()` over
:func:`compare_versions_native()`, :data:`False` otherwise (the
default is :data:`False`).

The environment variable ``$DPT_VERSION_COMPAT`` can be used to control the
value of this variable (see :func:`~humanfriendly.coerce_boolean()` for
acceptable values).

.. note:: This option was added in preparation for release 8.0 which
          replaces python-apt_ based version comparison with a pure Python
          implementation that -although tested- definitely has the potential to
          cause regressions. If regressions do surface this option provides an
          easy to use "escape hatch" to restore compatibility.
Esempio n. 35
0
def load_config_file(configuration_file=None, expand=True):
    """
    Load a configuration file with backup directories and rotation schemes.

    :param configuration_file: Override the pathname of the configuration file
                               to load (a string or :data:`None`).
    :param expand: :data:`True` to expand filename patterns to their matches,
                   :data:`False` otherwise.
    :returns: A generator of tuples with four values each:

              1. An execution context created using :mod:`executor.contexts`.
              2. The pathname of a directory with backups (a string).
              3. A dictionary with the rotation scheme.
              4. A dictionary with additional options.
    :raises: :exc:`~exceptions.ValueError` when `configuration_file` is given
             but doesn't exist or can't be loaded.

    This function is used by :class:`RotateBackups` to discover user defined
    rotation schemes and by :mod:`rotate_backups.cli` to discover directories
    for which backup rotation is configured. When `configuration_file` isn't
    given :class:`~update_dotdee.ConfigLoader` is used to search for
    configuration files in the following locations:

    - ``/etc/rotate-backups.ini`` and ``/etc/rotate-backups.d/*.ini``
    - ``~/.rotate-backups.ini`` and ``~/.rotate-backups.d/*.ini``
    - ``~/.config/rotate-backups.ini`` and ``~/.config/rotate-backups.d/*.ini``

    All of the available configuration files are loaded in the order given
    above, so that sections in user-specific configuration files override
    sections by the same name in system-wide configuration files.
    """
    expand_notice_given = False
    if configuration_file:
        loader = ConfigLoader(available_files=[configuration_file], strict=True)
    else:
        loader = ConfigLoader(program_name='rotate-backups', strict=False)
    for section in loader.section_names:
        items = dict(loader.get_options(section))
        context_options = {}
        if coerce_boolean(items.get('use-sudo')):
            context_options['sudo'] = True
        if items.get('ssh-user'):
            context_options['ssh_user'] = items['ssh-user']
        location = coerce_location(section, **context_options)
        rotation_scheme = dict((name, coerce_retention_period(items[name]))
                               for name in SUPPORTED_FREQUENCIES
                               if name in items)
        options = dict(include_list=split(items.get('include-list', '')),
                       exclude_list=split(items.get('exclude-list', '')),
                       io_scheduling_class=items.get('ionice'),
                       strict=coerce_boolean(items.get('strict', 'yes')),
                       prefer_recent=coerce_boolean(items.get('prefer-recent', 'no')))
        # Don't override the value of the 'removal_command' property unless the
        # 'removal-command' configuration file option has a value set.
        if items.get('removal-command'):
            options['removal_command'] = shlex.split(items['removal-command'])
        # Expand filename patterns?
        if expand and location.have_wildcards:
            logger.verbose("Expanding filename pattern %s on %s ..", location.directory, location.context)
            if location.is_remote and not expand_notice_given:
                logger.notice("Expanding remote filename patterns (may be slow) ..")
                expand_notice_given = True
            for match in sorted(location.context.glob(location.directory)):
                if location.context.is_directory(match):
                    logger.verbose("Matched directory: %s", match)
                    expanded = Location(context=location.context, directory=match)
                    yield expanded, rotation_scheme, options
                else:
                    logger.verbose("Ignoring match (not a directory): %s", match)
        else:
            yield location, rotation_scheme, options
Esempio n. 36
0
__version__ = '2.1'
"""Semi-standard module versioning."""

SPHINX_ACTIVE = 'sphinx' in sys.modules
"""
:data:`True` when Sphinx_ is running, :data:`False` otherwise.

We detect whether Sphinx is running by checking for the presence of the
'sphinx' key in :data:`sys.modules`. The result determines the default
value of :data:`USAGE_NOTES_ENABLED`.
"""

USAGE_NOTES_VARIABLE = 'PROPERTY_MANAGER_USAGE_NOTES'
"""The name of the environment variable that controls whether usage notes are enabled (a string)."""

USAGE_NOTES_ENABLED = (coerce_boolean(os.environ[USAGE_NOTES_VARIABLE]) if
                       USAGE_NOTES_VARIABLE in os.environ else SPHINX_ACTIVE)
"""
:data:`True` if usage notes are enabled, :data:`False` otherwise.

This defaults to the environment variable :data:`USAGE_NOTES_VARIABLE` (coerced
using :func:`~humanfriendly.coerce_boolean()`) when available, otherwise
:data:`SPHINX_ACTIVE` determines the default value.

Usage notes are only injected when Sphinx is running because of performance.
It's nothing critical of course, but modifying hundreds or thousands of
docstrings that no one is going to look at seems rather pointless :-).
"""

NOTHING = object()
"""A unique object instance used to detect missing attributes."""
Esempio n. 37
0
 def __init__(self):
     """Initialize a :class:`EntropyGenerator` object."""
     self.enabled = coerce_boolean(os.environ.get('DPT_FORCE_ENTROPY', 'false'))
     if self.enabled:
         self.process = multiprocessing.Process(target=generate_entropy)
Esempio n. 38
0
__all__ = (
    "EntropyGenerator",
    "FORCE_ENTROPY",
    "GPGKey",
    "GPG_AGENT_VARIABLE",
    "create_directory",
    "generate_entropy",
    "have_updated_gnupg",
    "initialize_gnupg",
    "logger",
)

# Initialize a logger.
logger = logging.getLogger(__name__)

FORCE_ENTROPY = coerce_boolean(os.environ.get('DPT_FORCE_ENTROPY', 'false'))
"""
:data:`True` to allow :func:`GPGKey.generate_key_pair()` to force the system to
generate entropy based on disk I/O , :data:`False` to disallow this behavior
(the default).

This was added to facilitate the deb-pkg-tools test suite running on Travis CI.
It is assumed that this rather obscure functionality will only ever be useful
in the same context: Running a test suite in a virtualization environment with
very low entropy.

The environment variable ``$DPT_FORCE_ENTROPY`` can be used to control the
value of this variable (see :func:`~humanfriendly.coerce_boolean()` for
acceptable values).
"""
Esempio n. 39
0
from executor import execute, ExternalCommandFailed
from humanfriendly import coerce_boolean, concatenate, format_path, Spinner, Timer
from humanfriendly.decorators import cached
from six.moves import configparser

# Modules included in our package.
from deb_pkg_tools import config
from deb_pkg_tools.control import unparse_control_fields
from deb_pkg_tools.gpg import GPGKey, initialize_gnupg
from deb_pkg_tools.package import find_package_archives, inspect_package_fields
from deb_pkg_tools.utils import atomic_lock, find_installed_version, optimize_order, sha1
from deb_pkg_tools.version import Version

# Enable power users to disable the use of `sudo' (because it
# may not be available in non-Debian build environments).
ALLOW_SUDO = coerce_boolean(os.environ.get('DPT_SUDO', 'true'))

# Initialize a logger.
logger = logging.getLogger(__name__)


def scan_packages(repository, packages_file=None, cache=None):
    """
    A reimplementation of the ``dpkg-scanpackages -m`` command in Python.

    Updates a ``Packages`` file based on the Debian package archive(s) found in
    the given directory. Uses :class:`.PackageCache` to (optionally) speed
    up the process significantly by caching package metadata and hashes on
    disk. This explains why this function can be much faster than
    ``dpkg-scanpackages -m``.
Esempio n. 40
0
 def runTest(self):
     """
     A very basic test of the functions that make up the pip-accel command
     using the `virtualenv` package as a test case.
     """
     accelerator = PipAccelerator(Config(), validate=False)
     # We will test the downloading, conversion to binary distribution and
     # installation of the virtualenv package (we simply need a package we
     # know is available from PyPI).
     arguments = ['--ignore-installed', 'virtualenv==1.8.4']
     # First we do a simple sanity check that unpack_source_dists() does NOT
     # connect to PyPI when it's missing source distributions (it should
     # raise a DistributionNotFound exception instead).
     try:
         accelerator.unpack_source_dists(arguments)
         # This line should never be reached.
         self.assertTrue(False)
     except Exception as e:
         # We expect a `DistributionNotFound' exception.
         self.assertTrue(isinstance(e, DistributionNotFound))
     # Download the source distribution from PyPI.
     requirements = accelerator.download_source_dists(arguments)
     self.assertTrue(isinstance(requirements, list))
     self.assertEqual(len(requirements), 1)
     self.assertEqual(requirements[0].name, 'virtualenv')
     self.assertEqual(requirements[0].version, '1.8.4')
     self.assertTrue(os.path.isdir(requirements[0].source_directory))
     # Make sure install_requirements() (really install_binary_dist())
     # validates its arguments.
     self.assertRaises(ValueError,
                       accelerator.install_requirements,
                         requirements=requirements,
                         prefix=self.virtual_environment,
                         python='/usr/bin/python')
     # Test the build and installation of the binary package. We have to
     # pass `prefix' explicitly here because the Python process running this
     # test is not inside the virtual environment created to run the
     # tests...
     accelerator.install_requirements(requirements,
                                      prefix=self.virtual_environment,
                                      python=os.path.join(self.virtual_environment, 'bin', 'python'))
     # Validate that the `virtualenv' package was properly installed.
     logger.debug("Checking that `virtualenv' executable was installed ..")
     self.assertTrue(os.path.isfile(os.path.join(self.virtual_environment, 'bin', 'virtualenv')))
     logger.debug("Checking that `virtualenv' command works ..")
     command = '%s --help' % pipes.quote(os.path.join(self.virtual_environment, 'bin', 'virtualenv'))
     self.assertEqual(os.system(command), 0)
     # We now have a non-empty download cache and source index so this
     # should not raise an exception (it should use the source index).
     accelerator.unpack_source_dists(arguments)
     # Verify that pip-accel properly deals with broken symbolic links
     # pointing from the source index to the download cache.
     os.unlink(os.path.join(self.download_cache, os.listdir(self.download_cache)[0]))
     accelerator = PipAccelerator(Config(), validate=False)
     accelerator.install_from_arguments(arguments)
     # I'm not yet sure how to effectively test the command line interface,
     # because this test suite abuses validate=False which the command line
     # interface does not expose. That's why the following will report an
     # error message. For now at least we're running the code and making
     # sure there are no syntax errors / incompatibilities.
     try:
         sys.argv = ['pip-accel', 'install', 'virtualenv==1.8.4']
         main()
         # This should not be reached.
         self.assertTrue(False)
     except BaseException as e:
         # For now the main() function is expected to fail and exit with a
         # nonzero status code (explained above).
         self.assertTrue(isinstance(e, SystemExit))
     # Test system package dependency handling.
     if coerce_boolean(os.environ.get('PIP_ACCEL_TEST_AUTO_INSTALL')):
         # Force the removal of a system package required by `lxml' without
         # removing any (reverse) dependencies (we don't actually want to
         # break the system, thank you very much :-). Disclaimer: you opt in
         # to this with $PIP_ACCEL_TEST_AUTO_INSTALL...
         os.system('sudo dpkg --remove --force-depends libxslt1-dev')
         os.environ['PIP_ACCEL_AUTO_INSTALL'] = 'true'
         accelerator = PipAccelerator(Config(), validate=False)
         accelerator.install_from_arguments(arguments=['--ignore-installed', 'lxml==3.2.1'],
                                            prefix=self.virtual_environment,
                                            python=os.path.join(self.virtual_environment, 'bin', 'python'))
Esempio n. 41
0
def main():
    """Command line interface for the ``chat-archive`` program."""
    # Enable logging to the terminal.
    coloredlogs.install()
    # Parse the command line options.
    program_opts = dict()
    command_name = None
    try:
        options, arguments = getopt.gnu_getopt(
            sys.argv[1:],
            "C:fl:c:p:vqh",
            [
                "context=",
                "force",
                "log-file=",
                "color=",
                "colour=",
                "profile=",
                "verbose",
                "quiet",
                "help",
            ],
        )
        for option, value in options:
            if option in ("-C", "--context"):
                program_opts["context"] = int(value)
            elif option in ("-f", "--force"):
                program_opts["force"] = True
            elif option in ("-l", "--log-file"):
                handler = logging.FileHandler(parse_path(value))
                handler.setFormatter(
                    logging.Formatter(
                        fmt=
                        "%(asctime)s %(name)s[%(process)d] %(levelname)s %(message)s",
                        datefmt="%Y-%m-%d %H:%M:%S"))
                handler.setLevel(logging.DEBUG)
                logging.root.addHandler(handler)
                logging.root.setLevel(logging.NOTSET)
            elif option in ("-c", "--color", "--colour"):
                mapping = dict(always=True, never=False)
                program_opts["use_colors"] = mapping[
                    value] if value in mapping else coerce_boolean(value)
            elif option in ("-p", "--profile"):
                program_opts["profile_file"] = parse_path(value)
            elif option in ("-v", "--verbose"):
                coloredlogs.increase_verbosity()
            elif option in ("-q", "--quiet"):
                coloredlogs.decrease_verbosity()
            elif option in ("-h", "--help"):
                usage(__doc__)
                sys.exit(0)
            else:
                assert False, "Unhandled option!"
        # Make sure the operator provided a command.
        if not arguments:
            usage(__doc__)
            sys.exit(0)
    except Exception as e:
        warning("Failed to parse command line arguments: %s", e)
        sys.exit(1)
    try:
        # We extract any search keywords from the command line arguments before
        # initializing an instance of the UserInterface class, to enable
        # initialization of the KeywordHighlighter class.
        if arguments[0] == "search":
            program_opts["keywords"] = arguments[1:]
        # Initialize the chat archive.
        with UserInterface(**program_opts) as program:
            # Validate the requested command.
            command_name = arguments.pop(0)
            method_name = "%s_cmd" % command_name
            if not hasattr(program, method_name):
                warning("Error: Invalid command name '%s'!", command_name)
                sys.exit(1)
            # Execute the requested command.
            command_fn = getattr(program, method_name)
            command_fn(arguments)
    except KeyboardInterrupt:
        logger.notice("Interrupted by Control-C ..")
        sys.exit(1)
    except Exception:
        logger.exception("Aborting due to unexpected exception!")
        sys.exit(1)
Esempio n. 42
0
                   '*.pyo',            # Python optimized byte code files (http://lintian.debian.org/tags/package-installs-python-bytecode.html)
                   '*~',               # Emacs/Vim backup files (http://lintian.debian.org/tags/backup-file-in-package.html)
                   '.*.s??',           # Vim named swap files
                   '.bzrignore',       # Bazaar ignore files (http://lintian.debian.org/tags/package-contains-vcs-control-file.html)
                   '.DS_Store',        # Mac OS X custom folder attributes (http://lintian.debian.org/tags/macos-ds-store-file-in-package.html)
                   '.DS_Store.gz',     # Mac OS X custom folder attributes (http://lintian.debian.org/tags/macos-ds-store-file-in-package.html)
                   '._*',              # Mac OS X resource fork (http://lintian.debian.org/tags/macos-resource-fork-file-in-package.html)
                   '.gitignore',       # Git ignore files (http://lintian.debian.org/tags/package-contains-vcs-control-file.html)
                   '.hg_archival.txt', # Artefact of `hg archive' (http://lintian.debian.org/tags/package-contains-vcs-control-file.html)
                   '.hgignore',        # Mercurial ignore files (http://lintian.debian.org/tags/package-contains-vcs-control-file.html)
                   '.hgtags',          # Mercurial ignore files (http://lintian.debian.org/tags/package-contains-vcs-control-file.html)
                   '.s??')             # Vim anonymous swap files

# Enable power users to customize how packages are built in order to enable
# limited use of deb-pkg-tools in non-Debian environments like Mac OS X.
ALLOW_CHOWN = coerce_boolean(os.environ.get('DPT_CHOWN_FILES', 'true'))
ALLOW_FAKEROOT_OR_SUDO = coerce_boolean(os.environ.get('DPT_ALLOW_FAKEROOT_OR_SUDO', 'true'))
ALLOW_HARD_LINKS = coerce_boolean(os.environ.get('DPT_HARD_LINKS', 'true'))
ALLOW_RESET_SETGID = coerce_boolean(os.environ.get('DPT_RESET_SETGID', 'true'))

def parse_filename(filename):
    """
    Parse the filename of a Debian binary package archive into three fields:
    the name of the package, its version and its architecture. See also
    :py:func:`determine_package_archive()`.

    :param filename: The pathname of a ``*.deb`` archive (a string).
    :returns: A :py:class:`PackageFile` object.
    :raises: Raises :py:exc:`~exceptions.ValueError` when the given filename
             cannot be parsed.
Esempio n. 43
0
__version__ = '2.1'
"""Semi-standard module versioning."""

SPHINX_ACTIVE = 'sphinx' in sys.modules
"""
:data:`True` when Sphinx_ is running, :data:`False` otherwise.

We detect whether Sphinx is running by checking for the presence of the
'sphinx' key in :data:`sys.modules`. The result determines the default
value of :data:`USAGE_NOTES_ENABLED`.
"""

USAGE_NOTES_VARIABLE = 'PROPERTY_MANAGER_USAGE_NOTES'
"""The name of the environment variable that controls whether usage notes are enabled (a string)."""

USAGE_NOTES_ENABLED = (coerce_boolean(os.environ[USAGE_NOTES_VARIABLE])
                       if USAGE_NOTES_VARIABLE in os.environ
                       else SPHINX_ACTIVE)
"""
:data:`True` if usage notes are enabled, :data:`False` otherwise.

This defaults to the environment variable :data:`USAGE_NOTES_VARIABLE` (coerced
using :func:`~humanfriendly.coerce_boolean()`) when available, otherwise
:data:`SPHINX_ACTIVE` determines the default value.

Usage notes are only injected when Sphinx is running because of performance.
It's nothing critical of course, but modifying hundreds or thousands of
docstrings that no one is going to look at seems rather pointless :-).
"""

NOTHING = object()