Exemplo n.º 1
0
 def test_package_building(self, repository=None, overrides={}, contents={}):
     """Test building of Debian binary packages."""
     with Context() as finalizers:
         build_directory = finalizers.mkdtemp()
         control_fields = merge_control_fields(TEST_PACKAGE_FIELDS, overrides)
         # Create the package template.
         os.mkdir(os.path.join(build_directory, 'DEBIAN'))
         with open(os.path.join(build_directory, 'DEBIAN', 'control'), 'wb') as handle:
             control_fields.dump(handle)
         if contents:
             for filename, data in contents.items():
                 filename = os.path.join(build_directory, filename)
                 directory = os.path.dirname(filename)
                 makedirs(directory)
                 with open(filename, 'w') as handle:
                     handle.write(data)
         else:
             with open(os.path.join(build_directory, 'DEBIAN', 'conffiles'), 'wb') as handle:
                 handle.write(b'/etc/file1\n')
                 handle.write(b'/etc/file2\n')
             # Create the directory with configuration files.
             os.mkdir(os.path.join(build_directory, 'etc'))
             touch(os.path.join(build_directory, 'etc', 'file1'))
             touch(os.path.join(build_directory, 'etc', 'file3'))
             # Create a directory that should be cleaned up by clean_package_tree().
             makedirs(os.path.join(build_directory, 'tmp', '.git'))
             # Create a file that should be cleaned up by clean_package_tree().
             with open(os.path.join(build_directory, 'tmp', '.gitignore'), 'w') as handle:
                 handle.write('\n')
         # Build the package (without any contents :-).
         returncode, output = run_cli(main, '--build', build_directory)
         assert returncode == 0
         package_file = os.path.join(tempfile.gettempdir(),
                                     '%s_%s_%s.deb' % (control_fields['Package'],
                                                       control_fields['Version'],
                                                       control_fields['Architecture']))
         assert os.path.isfile(package_file)
         if repository:
             shutil.move(package_file, repository)
             return os.path.join(repository, os.path.basename(package_file))
         else:
             finalizers.register(os.unlink, package_file)
             # Verify the package metadata.
             fields, contents = inspect_package(package_file)
             for name in TEST_PACKAGE_FIELDS:
                 assert fields[name] == TEST_PACKAGE_FIELDS[name]
             # Verify that the package contains the `/' and `/tmp'
             # directories (since it doesn't contain any actual files).
             assert contents['/'].permissions[0] == 'd'
             assert contents['/'].permissions[1:] == 'rwxr-xr-x'
             assert contents['/'].owner == 'root'
             assert contents['/'].group == 'root'
             assert contents['/tmp/'].permissions[0] == 'd'
             assert contents['/tmp/'].owner == 'root'
             assert contents['/tmp/'].group == 'root'
             # Verify that clean_package_tree() cleaned up properly
             # (`/tmp/.git' and `/tmp/.gitignore' have been cleaned up).
             assert '/tmp/.git/' not in contents
             assert '/tmp/.gitignore' not in contents
             return package_file
Exemplo n.º 2
0
def show_package_metadata(archive):
    """
    Show the metadata and contents of a Debian archive on the terminal.

    :param archive: The pathname of an existing ``*.deb`` archive (a string).
    """
    control_fields, contents = inspect_package(archive)
    say(highlight("Package metadata from %s:"), format_path(archive))
    for field_name in sorted(control_fields.keys()):
        value = control_fields[field_name]
        if field_name == 'Installed-Size':
            value = format_size(int(value) * 1024)
        say(" - %s %s", highlight(field_name + ":"), value)
    say(highlight("Package contents from %s:"), format_path(archive))
    for pathname, entry in sorted(contents.items()):
        size = format_size(entry.size, keep_width=True)
        if len(size) < 10:
            size = ' ' * (10 - len(size)) + size
        if entry.target:
            pathname += ' -> ' + entry.target
        say("{permissions} {owner} {group} {size} {modified} {pathname}",
            permissions=entry.permissions,
            owner=entry.owner,
            group=entry.group,
            size=size,
            modified=entry.modified,
            pathname=pathname)
Exemplo n.º 3
0
    def test_custom_conversion_command(self):
        """
        Convert a simple Python package that requires a custom conversion command.

        Converts Fabric and sanity checks the result. For details please refer
        to :func:`py2deb.converter.PackageConverter.set_conversion_command()`.
        """
        if sys.version_info[0] == 3:
            self.skipTest("Fabric is not Python 3.x compatible")
        with TemporaryDirectory() as directory:
            # Run the conversion command.
            converter = self.create_isolated_converter()
            converter.set_repository(directory)
            converter.set_conversion_command('Fabric', 'rm -Rf paramiko')
            converter.convert(['--no-deps', 'Fabric==0.9.0'])
            # Find the generated Debian package archive.
            archives = glob.glob('%s/*.deb' % directory)
            logger.debug("Found generated archive(s): %s", archives)
            pathname = find_package_archive(archives,
                                            fix_name_prefix('python-fabric'))
            # Use deb-pkg-tools to inspect the generated package.
            metadata, contents = inspect_package(pathname)
            # Check for the two *.py files that should be installed by the package.
            for filename, entry in contents.items():
                if filename.startswith(
                        '/usr/lib') and not entry.permissions.startswith('d'):
                    assert 'fabric' in filename.lower()
                    assert 'paramiko' not in filename.lower()
Exemplo n.º 4
0
 def test_package_building(self, repository=None, overrides={}, contents={}):
     """Test building of Debian binary packages."""
     with Context() as finalizers:
         build_directory = finalizers.mkdtemp()
         control_fields = merge_control_fields(TEST_PACKAGE_FIELDS, overrides)
         # Create the package template.
         os.mkdir(os.path.join(build_directory, 'DEBIAN'))
         with open(os.path.join(build_directory, 'DEBIAN', 'control'), 'wb') as handle:
             control_fields.dump(handle)
         if contents:
             for filename, data in contents.items():
                 filename = os.path.join(build_directory, filename)
                 directory = os.path.dirname(filename)
                 makedirs(directory)
                 with open(filename, 'w') as handle:
                     handle.write(data)
         else:
             with open(os.path.join(build_directory, 'DEBIAN', 'conffiles'), 'wb') as handle:
                 handle.write(b'/etc/file1\n')
                 handle.write(b'/etc/file2\n')
             # Create the directory with configuration files.
             os.mkdir(os.path.join(build_directory, 'etc'))
             touch(os.path.join(build_directory, 'etc', 'file1'))
             touch(os.path.join(build_directory, 'etc', 'file3'))
             # Create a directory that should be cleaned up by clean_package_tree().
             makedirs(os.path.join(build_directory, 'tmp', '.git'))
             # Create a file that should be cleaned up by clean_package_tree().
             with open(os.path.join(build_directory, 'tmp', '.gitignore'), 'w') as handle:
                 handle.write('\n')
         # Build the package (without any contents :-).
         returncode, output = run_cli(main, '--build', build_directory)
         assert returncode == 0
         package_file = os.path.join(tempfile.gettempdir(),
                                     '%s_%s_%s.deb' % (control_fields['Package'],
                                                       control_fields['Version'],
                                                       control_fields['Architecture']))
         assert os.path.isfile(package_file)
         if repository:
             shutil.move(package_file, repository)
             return os.path.join(repository, os.path.basename(package_file))
         else:
             finalizers.register(os.unlink, package_file)
             # Verify the package metadata.
             fields, contents = inspect_package(package_file)
             for name in TEST_PACKAGE_FIELDS:
                 assert fields[name] == TEST_PACKAGE_FIELDS[name]
             # Verify that the package contains the `/' and `/tmp'
             # directories (since it doesn't contain any actual files).
             assert contents['/'].permissions[0] == 'd'
             assert contents['/'].permissions[1:] == 'rwxr-xr-x'
             assert contents['/'].owner == 'root'
             assert contents['/'].group == 'root'
             assert contents['/tmp/'].permissions[0] == 'd'
             assert contents['/tmp/'].owner == 'root'
             assert contents['/tmp/'].group == 'root'
             # Verify that clean_package_tree() cleaned up properly
             # (`/tmp/.git' and `/tmp/.gitignore' have been cleaned up).
             assert '/tmp/.git/' not in contents
             assert '/tmp/.gitignore' not in contents
             return package_file
Exemplo n.º 5
0
    def test_install_requires_version_munging(self):
        """
        Convert a package with a requirement whose version is "munged" by pip.

        Refer to :func:`py2deb.converter.PackageConverter.transform_version()`
        for details about the purpose of this test.
        """
        with TemporaryDirectory() as repository_directory:
            with TemporaryDirectory() as distribution_directory:
                # Create a temporary (and rather trivial :-) Python package.
                with open(os.path.join(distribution_directory, 'setup.py'), 'w') as handle:
                    handle.write(dedent('''
                        from setuptools import setup
                        setup(
                            name='install-requires-munging-test',
                            version='1.0',
                            install_requires=['humanfriendly==1.30.0'],
                        )
                    '''))
                # Run the conversion command.
                converter = self.create_isolated_converter()
                converter.set_repository(repository_directory)
                archives, relationships = converter.convert([distribution_directory])
                # Find the generated *.deb archive.
                pathname = find_package_archive(archives, 'python-install-requires-munging-test')
                # Use deb-pkg-tools to inspect the package metadata.
                metadata, contents = inspect_package(pathname)
                logger.debug("Metadata of generated package: %s", dict(metadata))
                logger.debug("Contents of generated package: %s", dict(contents))
                # Inspect the converted package's dependency.
                assert metadata['Depends'].matches('python-humanfriendly', '1.30'), \
                    "py2deb failed to rewrite version of dependency!"
                assert not metadata['Depends'].matches('python-humanfriendly', '1.30.0'), \
                    "py2deb failed to rewrite version of dependency!"
Exemplo n.º 6
0
    def test_conversion_of_extras(self):
        """
        Convert a package with extras.

        Converts ``raven[flask]==3.6.0`` and sanity checks the result.
        """
        with TemporaryDirectory() as directory:
            # Run the conversion command.
            converter = self.create_isolated_converter()
            converter.set_repository(directory)
            archives, relationships = converter.convert([
                # Flask 1.0 drops Python 2.6 compatibility so we explicitly
                # include an older version to prevent raven[flask] from pulling
                # in the latest version of flask, causing this test to fail.
                'flask==0.12.4',
                'raven[flask]==3.6.0',
            ])
            # Check that a relationship with the extra in the package name was generated.
            expression = '%s (= 3.6.0)' % fix_name_prefix('python-raven-flask')
            assert expression in relationships
            # Check that a package with the extra in the filename was generated.
            archive = find_package_archive(
                archives, fix_name_prefix('python-raven-flask'))
            assert archive
            # Use deb-pkg-tools to inspect the package metadata.
            metadata, contents = inspect_package(archive)
            logger.debug("Metadata of generated package: %s", dict(metadata))
            # Check that a "Provides" field was added.
            assert metadata['Provides'].matches(
                fix_name_prefix('python-raven'))
Exemplo n.º 7
0
    def test_conversion_of_binary_package_with_executable(self):
        """
        Convert a package that includes a binary executable file.

        Converts ``uwsgi==2.0.17.1`` and sanity checks the result. The goal of
        this test is to verify that pydeb preserves binary executables instead
        of truncating them as it did until `issue 9`_ was reported.

        .. _issue 9: https://github.com/paylogic/py2deb/issues/9
        """
        with TemporaryDirectory() as directory:
            # Run the conversion command.
            converter = self.create_isolated_converter()
            converter.set_repository(directory)
            converter.set_install_prefix('/usr/lib/py2deb/uwsgi')
            archives, relationships = converter.convert(['uwsgi==2.0.17.1'])
            # Find the generated *.deb archive.
            pathname = find_package_archive(archives,
                                            fix_name_prefix('python-uwsgi'))
            # Use deb-pkg-tools to inspect the package metadata.
            metadata, contents = inspect_package(pathname)
            logger.debug("Contents of generated package: %s", dict(contents))
            # Find the binary executable file.
            executable = find_file(contents, '/usr/lib/py2deb/uwsgi/bin/uwsgi')
            assert executable.size > 0
Exemplo n.º 8
0
    def test_conversion_of_environment_markers(self):
        """
        Convert a package with installation requirements using environment markers.

        Converts ``weasyprint==0.42`` and sanity checks that the ``cairosvg``
        dependency is present.
        """
        with TemporaryDirectory() as directory:
            # Find our constraints file.
            module_directory = os.path.dirname(os.path.abspath(__file__))
            project_directory = os.path.dirname(module_directory)
            constraints_file = os.path.join(project_directory,
                                            'constraints.txt')
            # Run the conversion command.
            converter = self.create_isolated_converter()
            converter.set_repository(directory)
            # Constrain tinycss2 to avoid Python 2 incompatibilities:
            # https://travis-ci.org/github/paylogic/py2deb/jobs/713388666
            archives, relationships = converter.convert(
                ['--constraint=%s' % constraints_file, 'weasyprint==0.42'])
            # Check that the dependency is present.
            pathname = find_package_archive(
                archives, fix_name_prefix('python-weasyprint'))
            metadata, contents = inspect_package(pathname)
            # Make sure the dependency on cairosvg was added (this confirms
            # that environment markers have been evaluated).
            assert fix_name_prefix(
                'python-cairosvg') in metadata['Depends'].names
Exemplo n.º 9
0
    def test_custom_conversion_command(self):
        """
        Convert a simple Python package that requires a custom conversion command.

        Converts Fabric and sanity checks the result. For details please refer
        to :py:func:`py2deb.converter.PackageConverter.set_conversion_command()`.
        """
        if sys.version_info[0] == 3:
            logger.warning("Skipping Fabric conversion test! (Fabric is not Python 3.x compatible)")
            return
        with TemporaryDirectory() as directory:
            # Run the conversion command.
            converter = self.create_isolated_converter()
            converter.set_repository(directory)
            converter.set_conversion_command('Fabric', 'rm -Rf paramiko')
            converter.convert(['Fabric==0.9.0'])
            # Find the generated Debian package archive.
            archives = glob.glob('%s/*.deb' % directory)
            logger.debug("Found generated archive(s): %s", archives)
            pathname = find_package_archive(archives, 'python-fabric')
            # Use deb-pkg-tools to inspect the generated package.
            metadata, contents = inspect_package(pathname)
            # Check for the two *.py files that should be installed by the package.
            for filename, entry in contents.items():
                if filename.startswith('/usr/lib') and not entry.permissions.startswith('d'):
                    assert 'fabric' in filename.lower()
                    assert 'paramiko' not in filename.lower()
Exemplo n.º 10
0
 def test_conversion_with_system_package(self):
     """Convert a package and map one of its requirements to a system package."""
     with TemporaryDirectory() as repository_directory:
         with TemporaryDirectory() as distribution_directory:
             # Create a temporary (and rather trivial :-) Python package.
             with open(os.path.join(distribution_directory, 'setup.py'),
                       'w') as handle:
                 handle.write(
                     dedent('''
                     from setuptools import setup
                     setup(
                         name='system-package-conversion-test',
                         version='1.0',
                         install_requires=['dbus-python'],
                     )
                 '''))
             # Run the conversion command.
             converter = self.create_isolated_converter()
             converter.set_repository(repository_directory)
             converter.use_system_package('dbus-python',
                                          fix_name_prefix('python-dbus'))
             archives, relationships = converter.convert(
                 [distribution_directory])
             # Make sure only one archive was generated.
             assert len(archives) == 1
             # Use deb-pkg-tools to inspect the package metadata.
             metadata, contents = inspect_package(archives[0])
             logger.debug("Metadata of generated package: %s",
                          dict(metadata))
             logger.debug("Contents of generated package: %s",
                          dict(contents))
             # Inspect the converted package's dependency.
             assert metadata['Depends'].matches(fix_name_prefix('python-dbus')), \
                 "py2deb failed to rewrite dependency name!"
Exemplo n.º 11
0
    def test_install_requires_version_munging(self):
        """
        Convert a package with a requirement whose version is "munged" by pip.

        Refer to :func:`py2deb.converter.PackageConverter.transform_version()`
        for details about the purpose of this test.
        """
        with TemporaryDirectory() as repository_directory:
            with TemporaryDirectory() as distribution_directory:
                # Create a temporary (and rather trivial :-) Python package.
                with open(os.path.join(distribution_directory, 'setup.py'), 'w') as handle:
                    handle.write(dedent('''
                        from setuptools import setup
                        setup(
                            name='install-requires-munging-test',
                            version='1.0',
                            install_requires=['humanfriendly==1.30.0'],
                        )
                    '''))
                # Run the conversion command.
                converter = self.create_isolated_converter()
                converter.set_repository(repository_directory)
                archives, relationships = converter.convert([distribution_directory])
                # Find the generated *.deb archive.
                pathname = find_package_archive(archives, 'python-install-requires-munging-test')
                # Use deb-pkg-tools to inspect the package metadata.
                metadata, contents = inspect_package(pathname)
                logger.debug("Metadata of generated package: %s", dict(metadata))
                logger.debug("Contents of generated package: %s", dict(contents))
                # Inspect the converted package's dependency.
                assert metadata['Depends'].matches('python-humanfriendly', '1.30'), \
                    "py2deb failed to rewrite version of dependency!"
                assert not metadata['Depends'].matches('python-humanfriendly', '1.30.0'), \
                    "py2deb failed to rewrite version of dependency!"
Exemplo n.º 12
0
    def test_conversion_of_package_with_dependencies(self):
        """
        Convert a non trivial Python package with several dependencies.

        Converts deb-pkg-tools_ to a Debian package archive and sanity checks the
        result. Performs static checks on the metadata (dependencies) of the
        resulting package archive.

        .. _deb-pkg-tools: https://pypi.python.org/pypi/deb-pkg-tools
        """
        # Use a temporary directory as py2deb's repository directory so that we
        # can easily find the *.deb archive generated by py2deb.
        with TemporaryDirectory() as directory:
            # Run the conversion command.
            py2deb('--repository=%s' % directory, 'deb-pkg-tools==1.22')
            # Find the generated Debian package archives.
            archives = glob.glob('%s/*.deb' % directory)
            logger.debug("Found generated archive(s): %s", archives)
            # Make sure the expected dependencies have been converted.
            converted_dependencies = set(
                parse_filename(a).name for a in archives)
            expected_dependencies = set([
                'python-cached-property',
                'python-chardet',
                'python-coloredlogs',
                'python-deb-pkg-tools',
                'python-debian',
                'python-executor',
                'python-humanfriendly',
                'python-six',
            ])
            assert expected_dependencies.issubset(converted_dependencies)
            # Use deb-pkg-tools to inspect ... deb-pkg-tools :-)
            pathname = find_package_archive(archives, 'python-deb-pkg-tools')
            metadata, contents = inspect_package(pathname)
            logger.debug("Metadata of generated package: %s", dict(metadata))
            logger.debug("Contents of generated package: %s", dict(contents))
            # Make sure the dependencies defined in `stdeb.cfg' have been preserved.
            for configured_dependency in [
                    'apt', 'apt-utils', 'dpkg-dev', 'fakeroot', 'gnupg',
                    'lintian'
            ]:
                logger.debug("Checking configured dependency %s ..",
                             configured_dependency)
                assert metadata['Depends'].matches(
                    configured_dependency) is not None
            # Make sure the dependencies defined in `setup.py' have been preserved.
            expected_dependencies = [
                'python-chardet', 'python-coloredlogs', 'python-debian',
                'python-executor', 'python-humanfriendly'
            ]
            for python_dependency in expected_dependencies:
                logger.debug("Checking Python dependency %s ..",
                             python_dependency)
                assert metadata['Depends'].matches(
                    python_dependency) is not None
Exemplo n.º 13
0
 def test_package_cache_invalidation(self):
     with Context() as finalizers:
         directory = finalizers.mkdtemp()
         package_file = self.test_package_building(directory, overrides=dict(Package='deb-pkg-tools-package-1', Version='1'))
         for i in range(5):
             fields, contents = inspect_package(package_file, cache=self.package_cache)
             if i % 2 == 0:
                 os.utime(package_file, None)
             else:
                 self.load_package_cache()
Exemplo n.º 14
0
    def check_converted_pip_accel_packages(self, directory):
        """
        Check a group of packages converted with a custom name and installation prefix.

        Check the results of :func:`test_conversion_of_isolated_packages()` and
        :func:`test_conversion_with_configuration_file()`.
        """
        # Find the generated Debian package archives.
        archives = glob.glob('%s/*.deb' % directory)
        logger.debug("Found generated archive(s): %s", archives)
        # Make sure the expected dependencies have been converted.
        converted_dependencies = set(parse_filename(a).name for a in archives)
        expected_dependencies = set([
            'pip-accel',
            'pip-accel-coloredlogs-renamed',
            'pip-accel-humanfriendly',
            'pip-accel-pip',
        ])
        assert expected_dependencies.issubset(converted_dependencies)
        # Use deb-pkg-tools to inspect pip-accel.
        pathname = find_package_archive(archives, 'pip-accel')
        metadata, contents = inspect_package(pathname)
        logger.debug("Metadata of generated package: %s", dict(metadata))
        logger.debug("Contents of generated package: %s", dict(contents))
        # Make sure the dependencies defined in `setup.py' have been
        # preserved while their names have been converted.
        assert metadata['Depends'].matches('pip-accel-coloredlogs-renamed',
                                           '0.4.6')
        assert metadata['Depends'].matches('pip-accel-humanfriendly', '1.6')
        assert metadata['Depends'].matches('pip-accel-pip', '1.4')
        assert not metadata['Depends'].matches('pip-accel-pip', '1.3')
        assert not metadata['Depends'].matches('pip-accel-pip', '1.5')
        # Make sure the executable script has been installed and is marked as executable.
        pip_accel_executable = find_file(contents,
                                         '/usr/lib/pip-accel/bin/pip-accel')
        assert pip_accel_executable.permissions == '-rwxr-xr-x'
        # Verify the existence of some expected files (picked more or less at random).
        assert find_file(contents,
                         '/usr/lib/pip-accel/lib/pip_accel/__init__.py')
        assert find_file(contents,
                         '/usr/lib/pip-accel/lib/pip_accel/deps/debian.ini')
        assert find_file(
            contents,
            '/usr/lib/pip-accel/lib/pip_accel-0.12.6*.egg-info/PKG-INFO')
        # Verify that all files are installed in the custom installation
        # prefix. We have to ignore directories, otherwise we would start
        # complaining about the parent directories /, /usr, /usr/lib, etc.
        paths_to_ignore = ['/usr/share/lintian/overrides/pip-accel']
        for filename, properties in contents.items():
            if filename not in paths_to_ignore:
                is_directory = properties.permissions.startswith('d')
                in_isolated_directory = filename.startswith(
                    '/usr/lib/pip-accel/')
                assert is_directory or in_isolated_directory
Exemplo n.º 15
0
 def test_package_cache_invalidation(self):
     with Context() as finalizers:
         directory = finalizers.mkdtemp()
         package_file = self.test_package_building(
             directory,
             overrides=dict(Package='deb-pkg-tools-package-1', Version='1'))
         for i in range(5):
             fields, contents = inspect_package(package_file,
                                                cache=self.package_cache)
             if i % 2 == 0:
                 os.utime(package_file, None)
             else:
                 self.load_package_cache()
Exemplo n.º 16
0
    def test_conversion_of_package_with_dependencies(self):
        """
        Convert a non trivial Python package with several dependencies.

        Converts deb-pkg-tools_ to a Debian package archive and sanity checks the
        result. Performs static checks on the metadata (dependencies) of the
        resulting package archive.

        .. _deb-pkg-tools: https://pypi.python.org/pypi/deb-pkg-tools
        """
        # Use a temporary directory as py2deb's repository directory so that we
        # can easily find the *.deb archive generated by py2deb.
        with TemporaryDirectory() as directory:
            # Run the conversion command.
            py2deb('--repository=%s' % directory, 'deb-pkg-tools==1.22')
            # Find the generated Debian package archives.
            archives = glob.glob('%s/*.deb' % directory)
            logger.debug("Found generated archive(s): %s", archives)
            # Make sure the expected dependencies have been converted.
            converted_dependencies = set(parse_filename(a).name for a in archives)
            expected_dependencies = set([
                'python-cached-property',
                'python-chardet',
                'python-coloredlogs',
                'python-deb-pkg-tools',
                'python-debian',
                'python-executor',
                'python-humanfriendly',
                'python-six',
            ])
            assert expected_dependencies.issubset(converted_dependencies)
            # Use deb-pkg-tools to inspect ... deb-pkg-tools :-)
            pathname = find_package_archive(archives, 'python-deb-pkg-tools')
            metadata, contents = inspect_package(pathname)
            logger.debug("Metadata of generated package: %s", dict(metadata))
            logger.debug("Contents of generated package: %s", dict(contents))
            # Make sure the dependencies defined in `stdeb.cfg' have been preserved.
            for configured_dependency in ['apt', 'apt-utils', 'dpkg-dev', 'fakeroot', 'gnupg', 'lintian']:
                logger.debug("Checking configured dependency %s ..", configured_dependency)
                assert metadata['Depends'].matches(configured_dependency) is not None
            # Make sure the dependencies defined in `setup.py' have been preserved.
            expected_dependencies = [
                'python-chardet', 'python-coloredlogs', 'python-debian',
                'python-executor', 'python-humanfriendly'
            ]
            for python_dependency in expected_dependencies:
                logger.debug("Checking Python dependency %s ..", python_dependency)
                assert metadata['Depends'].matches(python_dependency) is not None
Exemplo n.º 17
0
    def check_converted_pip_accel_packages(self, directory):
        """
        Check a group of packages converted with a custom name and installation prefix.

        Check the results of :py:func:`test_conversion_of_isolated_packages()` and
        :py:func:`test_conversion_with_configuration_file()`.
        """
        # Find the generated Debian package archives.
        archives = glob.glob('%s/*.deb' % directory)
        logger.debug("Found generated archive(s): %s", archives)
        # Make sure the expected dependencies have been converted.
        converted_dependencies = set(parse_filename(a).name for a in archives)
        expected_dependencies = set([
            'pip-accel',
            'pip-accel-coloredlogs-renamed',
            'pip-accel-humanfriendly',
            'pip-accel-pip',
        ])
        assert expected_dependencies.issubset(converted_dependencies)
        # Use deb-pkg-tools to inspect pip-accel.
        pathname = find_package_archive(archives, 'pip-accel')
        metadata, contents = inspect_package(pathname)
        logger.debug("Metadata of generated package: %s", dict(metadata))
        logger.debug("Contents of generated package: %s", dict(contents))
        # Make sure the dependencies defined in `setup.py' have been
        # preserved while their names have been converted.
        assert metadata['Depends'].matches('pip-accel-coloredlogs-renamed', '0.4.6')
        assert metadata['Depends'].matches('pip-accel-humanfriendly', '1.6')
        assert metadata['Depends'].matches('pip-accel-pip', '1.4')
        assert not metadata['Depends'].matches('pip-accel-pip', '1.3')
        assert not metadata['Depends'].matches('pip-accel-pip', '1.5')
        # Make sure the executable script has been installed and is marked as executable.
        pip_accel_executable = find_file(contents, '/usr/lib/pip-accel/bin/pip-accel')
        assert pip_accel_executable.permissions == '-rwxr-xr-x'
        # Verify the existence of some expected files (picked more or less at random).
        assert find_file(contents, '/usr/lib/pip-accel/lib/pip_accel/__init__.py')
        assert find_file(contents, '/usr/lib/pip-accel/lib/pip_accel/deps/debian.ini')
        assert find_file(contents, '/usr/lib/pip-accel/lib/pip_accel-0.12.6*.egg-info/PKG-INFO')
        # Verify that all files are installed in the custom installation
        # prefix. We have to ignore directories, otherwise we would start
        # complaining about the parent directories /, /usr, /usr/lib, etc.
        paths_to_ignore = ['/usr/share/lintian/overrides/pip-accel']
        for filename, properties in contents.items():
            if filename not in paths_to_ignore:
                is_directory = properties.permissions.startswith('d')
                in_isolated_directory = filename.startswith('/usr/lib/pip-accel/')
                assert is_directory or in_isolated_directory
Exemplo n.º 18
0
    def test_conversion_of_binary_package(self):
        """
        Convert a package that includes a ``*.so`` file (a shared object file).

        Converts ``setproctitle==1.1.8`` and sanity checks the result. The goal
        of this test is to verify that pydeb properly handles packages with
        binary components (including dpkg-shlibdeps_ magic). This explains why
        I chose the setproctitle_ package:

        1. This package is known to require a compiled shared object file for
           proper functioning.

        2. Despite requiring a compiled shared object file the package is
           fairly lightweight and has little dependencies so including this
           test on every run of the test suite won't slow things down so much
           that it becomes annoying.

        3. The package is documented to support Python 3.x as well which means
           we can run this test on all supported Python versions.

        .. _setproctitle: https://pypi.org/project/setproctitle/
        .. _dpkg-shlibdeps: https://manpages.debian.org/dpkg-shlibdeps
        """
        with TemporaryDirectory() as directory:
            # Run the conversion command.
            converter = self.create_isolated_converter()
            converter.set_repository(directory)
            archives, relationships = converter.convert(
                ['setproctitle==1.1.8'])
            # Find the generated *.deb archive.
            pathname = find_package_archive(
                archives, fix_name_prefix('python-setproctitle'))
            # Use deb-pkg-tools to inspect the package metadata.
            metadata, contents = inspect_package(pathname)
            logger.debug("Metadata of generated package: %s", dict(metadata))
            logger.debug("Contents of generated package: %s", dict(contents))
            # Make sure the package's architecture was properly set.
            assert metadata['Architecture'] != 'all'
            # Make sure the shared object file is included in the package.
            assert find_file(contents, '/usr/lib/*/setproctitle*.so')
            # Make sure a dependency on libc was added (this shows that
            # dpkg-shlibdeps was run successfully).
            assert 'libc6' in metadata['Depends'].names
Exemplo n.º 19
0
def show_package_metadata(archive):
    control_fields, contents = inspect_package(archive)
    print("Package metadata from %s:" % format_path(archive))
    for field_name in sorted(control_fields.keys()):
        value = control_fields[field_name]
        if field_name == 'Installed-Size':
            value = format_size(int(value) * 1024)
        print(" - %s: %s" % (field_name, value))
    print("Package contents from %s:" % format_path(archive))
    for pathname, entry in sorted(contents.items()):
        size = format_size(entry.size, keep_width=True)
        if len(size) < 10:
            size = ' ' * (10 - len(size)) + size
        if entry.target:
            pathname += ' -> ' + entry.target
        print("{permissions} {owner} {group} {size} {modified} {pathname}".format(
            permissions=entry.permissions, owner=entry.owner,
            group=entry.group, size=size, modified=entry.modified,
            pathname=pathname))
Exemplo n.º 20
0
    def test_conversion_of_environment_markers(self):
        """
        Convert a package with installation requirements using environment markers.

        Converts ``weasyprint==0.42`` and sanity checks that the ``cairosvg``
        dependency is present.
        """
        if sys.version_info[:2] == (2, 6):
            self.skipTest("WeasyPrint 0.42 is not Python 2.6 compatible")
        with TemporaryDirectory() as directory:
            # Run the conversion command.
            converter = self.create_isolated_converter()
            converter.set_repository(directory)
            archives, relationships = converter.convert(['weasyprint==0.42'])
            # Check that the dependency is present.
            pathname = find_package_archive(archives, 'python-weasyprint')
            metadata, contents = inspect_package(pathname)
            # Make sure the dependency on cairosvg was added (this confirms
            # that environment markers have been evaluated).
            assert 'python-cairosvg' in metadata['Depends'].names
Exemplo n.º 21
0
 def check_python_callback(self, expression):
     """Test for Python callback logic manipulating the build of a package."""
     with TemporaryDirectory() as repository_directory:
         # Run the conversion command.
         converter = self.create_isolated_converter()
         converter.set_repository(repository_directory)
         converter.set_python_callback(expression)
         converter.set_name_prefix('callback-test')
         archives, relationships = converter.convert(['naturalsort'])
         # Find the generated *.deb archive.
         pathname = find_package_archive(archives, 'callback-test-naturalsort')
         # Use deb-pkg-tools to inspect the package metadata.
         metadata, contents = inspect_package(pathname)
         logger.debug("Metadata of generated package: %s", dict(metadata))
         logger.debug("Contents of generated package: %s", dict(contents))
         # Inspect the converted package's dependency.
         assert metadata['Breaks'].matches('callback-test-natsort'), \
             "Result of Python callback not visible?!"
         assert metadata['Replaces'].matches('callback-test-natsort'), \
             "Result of Python callback not visible?!"
Exemplo n.º 22
0
 def check_python_callback(self, expression):
     """Test for Python callback logic manipulating the build of a package."""
     with TemporaryDirectory() as repository_directory:
         # Run the conversion command.
         converter = self.create_isolated_converter()
         converter.set_repository(repository_directory)
         converter.set_python_callback(expression)
         converter.set_name_prefix('callback-test')
         archives, relationships = converter.convert(['naturalsort'])
         # Find the generated *.deb archive.
         pathname = find_package_archive(archives, 'callback-test-naturalsort')
         # Use deb-pkg-tools to inspect the package metadata.
         metadata, contents = inspect_package(pathname)
         logger.debug("Metadata of generated package: %s", dict(metadata))
         logger.debug("Contents of generated package: %s", dict(contents))
         # Inspect the converted package's dependency.
         assert metadata['Breaks'].matches('callback-test-natsort'), \
             "Result of Python callback not visible?!"
         assert metadata['Replaces'].matches('callback-test-natsort'), \
             "Result of Python callback not visible?!"
Exemplo n.º 23
0
    def test_conversion_of_binary_package(self):
        """
        Convert a package that includes a ``*.so`` file (a shared object file).

        Converts ``setproctitle==1.1.8`` and sanity checks the result. The goal
        of this test is to verify that pydeb properly handles packages with
        binary components (including dpkg-shlibdeps_ magic). This explains why
        I chose the setproctitle_ package:

        1. This package is known to require a compiled shared object file for
           proper functioning.

        2. Despite requiring a compiled shared object file the package is
           fairly lightweight and has little dependencies so including this
           test on every run of the test suite won't slow things down so much
           that it becomes annoying.

        3. The package is documented to support Python 3.x as well which means
           we can run this test on all supported Python versions.

        .. _setproctitle: https://pypi.python.org/pypi/setproctitle/
        """
        with TemporaryDirectory() as directory:
            # Run the conversion command.
            converter = self.create_isolated_converter()
            converter.set_repository(directory)
            archives, relationships = converter.convert(['setproctitle==1.1.8'])
            # Find the generated *.deb archive.
            pathname = find_package_archive(archives, 'python-setproctitle')
            # Use deb-pkg-tools to inspect the package metadata.
            metadata, contents = inspect_package(pathname)
            logger.debug("Metadata of generated package: %s", dict(metadata))
            logger.debug("Contents of generated package: %s", dict(contents))
            # Make sure the package's architecture was properly set.
            assert metadata['Architecture'] != 'all'
            # Make sure the shared object file is included in the package.
            assert find_file(contents, '/usr/lib/*/setproctitle*.so')
            # Make sure a dependency on libc was added (this shows that
            # dpkg-shlibdeps was run successfully).
            assert 'libc6' in metadata['Depends'].names
Exemplo n.º 24
0
def show_package_metadata(archive):
    control_fields, contents = inspect_package(archive)
    print("Package metadata from %s:" % format_path(archive))
    for field_name in sorted(control_fields.keys()):
        value = control_fields[field_name]
        if field_name == 'Installed-Size':
            value = format_size(int(value) * 1024)
        print(" - %s: %s" % (field_name, value))
    print("Package contents from %s:" % format_path(archive))
    for pathname, entry in sorted(contents.items()):
        size = format_size(entry.size, keep_width=True)
        if len(size) < 10:
            size = ' ' * (10 - len(size)) + size
        if entry.target:
            pathname += ' -> ' + entry.target
        print("{permissions} {owner} {group} {size} {modified} {pathname}".
              format(permissions=entry.permissions,
                     owner=entry.owner,
                     group=entry.group,
                     size=size,
                     modified=entry.modified,
                     pathname=pathname))
Exemplo n.º 25
0
def show_package_metadata(archive):
    """
    Show the metadata and contents of a Debian archive on the terminal.

    :param archive: The pathname of an existing ``*.deb`` archive (a string).
    """
    control_fields, contents = inspect_package(archive)
    say(highlight("Package metadata from %s:"), format_path(archive))
    for field_name in sorted(control_fields.keys()):
        value = control_fields[field_name]
        if field_name == 'Installed-Size':
            value = format_size(int(value) * 1024)
        say(" - %s %s", highlight(field_name + ":"), value)
    say(highlight("Package contents from %s:"), format_path(archive))
    for pathname, entry in sorted(contents.items()):
        size = format_size(entry.size, keep_width=True)
        if len(size) < 10:
            size = ' ' * (10 - len(size)) + size
        if entry.target:
            pathname += ' -> ' + entry.target
        say("{permissions} {owner} {group} {size} {modified} {pathname}",
            permissions=entry.permissions, owner=entry.owner,
            group=entry.group, size=size, modified=entry.modified,
            pathname=pathname)
Exemplo n.º 26
0
def check_duplicate_files(dependency_set, cache=None):
    """
    Check a collection of Debian package archives for conflicts.

    Looks for duplicate files in unrelated package archives. Ignores groups of
    packages that have their 'Provides' and 'Replaces' fields set to a common
    value. Other variants of 'Conflicts' are not supported yet.

    Because this analysis involves both the package control file fields and the
    pathnames of files installed by packages it can be slow. To make it faster
    you can use the :py:class:`.PackageCache`.

    :param dependency_set: A list of filenames (strings) of ``*.deb`` files.
    :param cache: The :py:class:`.PackageCache` to use (defaults to ``None``).
    :raises: :py:class:`exceptions.ValueError` when less than two package
             archives are given (the duplicate check obviously only works if
             there are packages to compare :-).
    :raises: :py:class:`DuplicateFilesFound` when duplicate files are found
             within a group of package archives.
    """
    timer = Timer()
    dependency_set = list(map(parse_filename, dependency_set))
    # Make sure we have something useful to work with.
    num_archives = len(dependency_set)
    if num_archives < 2:
        msg = "To check for duplicate files you need to provide two or more packages archives! (%i given)"
        raise ValueError(msg % num_archives)
    # Build up a global map of all files contained in the given package archives.
    global_contents = collections.defaultdict(set)
    global_fields = {}
    spinner = Spinner(total=num_archives)
    logger.info("Checking for duplicate files in %i package archives ..",
                num_archives)
    for i, archive in enumerate(optimize_order(dependency_set), start=1):
        spinner.step(label="Scanning %i package archives" % num_archives,
                     progress=i)
        fields, contents = inspect_package(archive.filename, cache=cache)
        global_fields[archive.filename] = fields
        for pathname, stat in contents.items():
            if not stat.permissions.startswith('d'):
                global_contents[pathname].add(archive)
    spinner.clear()
    # Count the number of duplicate files between sets of conflicting packages
    # for more user friendly reporting.
    duplicate_files = collections.defaultdict(
        lambda: dict(count=0, filenames=[]))
    for pathname, packages in global_contents.items():
        if len(packages) > 1:
            # Override the sort key to be the filename because we don't need
            # to properly sort by version (which is slow on large collections).
            key = tuple(sorted(packages, key=lambda p: p.filename))
            duplicate_files[key]['count'] += 1
            duplicate_files[key]['filenames'].append(pathname)
    for packages, information in sorted(duplicate_files.items()):
        # Never report multiple versions of the same package.
        if len(set(package.name for package in packages)) == 1:
            duplicate_files.pop(packages)
            continue
        # We check for one common case where it's easy to guarantee that
        # we're not dealing with broken packages: All of the packages have
        # marked each other as conflicting via the combination of the
        # fields `Provides:' and `Conflicts:'.
        def find_virtual_name(field_name):
            package_names = set()
            for archive in packages:
                field = global_fields[archive.filename].get(field_name)
                if field:
                    package_names |= field.names
                else:
                    return
            if len(package_names) == 1:
                return list(package_names)[0]

        marked_conflicts = find_virtual_name('Conflicts')
        marked_provides = find_virtual_name('Provides')
        if marked_conflicts and marked_conflicts == marked_provides:
            duplicate_files.pop(packages)
    # Boring string formatting, trying to find a way to clearly present conflicts.
    summary = []
    for packages, information in sorted(duplicate_files.items()):
        block = []
        conflicts = pluralize(information['count'], 'conflict', 'conflicts')
        block.append("Found %s between %i packages:\n" %
                     (conflicts, len(packages)))
        for i, package in enumerate(sorted(packages), start=1):
            block.append("  %i. %s\n" % (i, package.filename))
        block.append("These packages contain %s:\n" % conflicts)
        for i, filename in enumerate(sorted(information['filenames']),
                                     start=1):
            block.append("  %i. %s\n" % (i, filename))
        summary.append(''.join(block))
    if summary:
        archives_involved = set(
            itertools.chain.from_iterable(duplicate_files.keys()))
        files = pluralize(len(duplicate_files), 'duplicate file',
                          'duplicate files')
        archives = pluralize(len(archives_involved), 'package archive',
                             'package archives')
        summary.insert(0, "Found %s in %s!\n" % (files, archives))
        summary.append(
            compact("""
            Hint: If the package contents are correct you can resolve these
            conflicts by marking the packages as conflicting. You do this by
            adding the 'Conflicts' and 'Provides' fields and setting them to a
            common value. That should silence this message.
        """))
        delimiter = '%s\n' % ('-' * 79)
        raise DuplicateFilesFound(delimiter.join(summary))
    else:
        logger.info("No conflicting files found (took %s).", timer)
Exemplo n.º 27
0
    def test_conversion_of_simple_package(self):
        """
        Convert a simple Python package without any dependencies.

        Converts coloredlogs_ and sanity checks the result. Performs several static
        checks on the metadata and contents of the resulting package archive.

        .. _coloredlogs: https://pypi.python.org/pypi/coloredlogs
        """
        # Use a temporary directory as py2deb's repository directory so that we
        # can easily find the *.deb archive generated by py2deb.
        with TemporaryDirectory() as directory:
            # Run the conversion twice to check that existing archives are not overwritten.
            last_modified_time = 0
            for i in range(2):
                # Prepare a control file to be patched.
                control_file = os.path.join(directory, 'control')
                with open(control_file, 'w') as handle:
                    handle.write('Depends: vim\n')
                # Run the conversion command.
                py2deb('--verbose',
                       '--yes',
                       '--repository=%s' % directory,
                       '--report-dependencies=%s' % control_file,
                       'coloredlogs==0.5')
                # Check that the control file was patched.
                control_fields = load_control_file(control_file)
                assert control_fields['Depends'].matches('vim')
                assert control_fields['Depends'].matches('python-coloredlogs', '0.5')
                # Find the generated Debian package archive.
                archives = glob.glob('%s/*.deb' % directory)
                logger.debug("Found generated archive(s): %s", archives)
                assert len(archives) == 1
                # Verify that existing archives are not overwritten.
                if not last_modified_time:
                    # Capture the last modified time of the archive in the first iteration.
                    last_modified_time = os.path.getmtime(archives[0])
                else:
                    # Verify the last modified time of the archive in the second iteration.
                    assert last_modified_time == os.path.getmtime(archives[0])
                # Use deb-pkg-tools to inspect the generated package.
                metadata, contents = inspect_package(archives[0])
                logger.debug("Metadata of generated package: %s", dict(metadata))
                logger.debug("Contents of generated package: %s", dict(contents))
                # Check the package metadata.
                assert metadata['Package'] == 'python-coloredlogs'
                assert metadata['Version'].startswith('0.5')
                assert metadata['Architecture'] == 'all'
                # There should be exactly one dependency: some version of Python.
                assert metadata['Depends'].matches('python%i.%i' % sys.version_info[:2])
                # Don't care about the format here as long as essential information is retained.
                assert 'Peter Odding' in metadata['Maintainer']
                assert '*****@*****.**' in metadata['Maintainer']
                # Check the package contents.
                # Check for the two *.py files that should be installed by the package.
                assert find_file(contents, '/usr/lib/python*/dist-packages/coloredlogs/__init__.py')
                assert find_file(contents, '/usr/lib/python*/dist-packages/coloredlogs/converter.py')
                # Make sure the file ownership and permissions are sane.
                archive_entry = find_file(contents, '/usr/lib/python*/dist-packages/coloredlogs/__init__.py')
                assert archive_entry.owner == 'root'
                assert archive_entry.group == 'root'
                assert archive_entry.permissions == '-rw-r--r--'
Exemplo n.º 28
0
    def test_conversion_of_simple_package(self):
        """
        Convert a simple Python package without any dependencies.

        Converts coloredlogs_ and sanity checks the result. Performs several static
        checks on the metadata and contents of the resulting package archive.

        .. _coloredlogs: https://pypi.org/project/coloredlogs
        """
        # Use a temporary directory as py2deb's repository directory so that we
        # can easily find the *.deb archive generated by py2deb.
        with TemporaryDirectory() as directory:
            # Run the conversion twice to check that existing archives are not overwritten.
            last_modified_time = 0
            for i in range(2):
                # Prepare a control file to be patched.
                control_file = os.path.join(directory, 'control')
                with open(control_file, 'w') as handle:
                    handle.write('Depends: vim\n')
                # Run the conversion command.
                exit_code, output = run_cli(
                    main,
                    '--verbose',
                    '--yes',
                    '--repository=%s' % directory,
                    '--report-dependencies=%s' % control_file,
                    'coloredlogs==0.5',
                )
                assert exit_code == 0
                # Check that the control file was patched.
                control_fields = load_control_file(control_file)
                assert control_fields['Depends'].matches('vim')
                assert control_fields['Depends'].matches(
                    fix_name_prefix('python-coloredlogs'), '0.5')
                # Find the generated Debian package archive.
                archives = glob.glob('%s/*.deb' % directory)
                logger.debug("Found generated archive(s): %s", archives)
                assert len(archives) == 1
                # Verify that existing archives are not overwritten.
                if not last_modified_time:
                    # Capture the last modified time of the archive in the first iteration.
                    last_modified_time = os.path.getmtime(archives[0])
                else:
                    # Verify the last modified time of the archive in the second iteration.
                    assert last_modified_time == os.path.getmtime(archives[0])
                # Use deb-pkg-tools to inspect the generated package.
                metadata, contents = inspect_package(archives[0])
                logger.debug("Metadata of generated package: %s",
                             dict(metadata))
                logger.debug("Contents of generated package: %s",
                             dict(contents))
                # Check the package metadata.
                assert metadata['Package'] == fix_name_prefix(
                    'python-coloredlogs')
                assert metadata['Version'].startswith('0.5')
                assert metadata['Architecture'] == 'all'
                # There should be exactly one dependency: some version of Python.
                assert metadata['Depends'].matches(python_version())
                # Don't care about the format here as long as essential information is retained.
                assert 'Peter Odding' in metadata['Maintainer']
                assert '*****@*****.**' in metadata['Maintainer']
                # Check the package contents.
                # Check for the two *.py files that should be installed by the package.
                assert find_file(
                    contents,
                    '/usr/lib/py*/dist-packages/coloredlogs/__init__.py')
                assert find_file(
                    contents,
                    '/usr/lib/py*/dist-packages/coloredlogs/converter.py')
                # Make sure the file ownership and permissions are sane.
                archive_entry = find_file(
                    contents,
                    '/usr/lib/py*/dist-packages/coloredlogs/__init__.py')
                assert archive_entry.owner == 'root'
                assert archive_entry.group == 'root'
                assert archive_entry.permissions == '-rw-r--r--'
Exemplo n.º 29
0
def check_duplicate_files(dependency_set, cache=None):
    """
    Check a collection of Debian package archives for conflicts.

    :param dependency_set: A list of filenames (strings) of ``*.deb`` files.
    :param cache: The :class:`.PackageCache` to use (defaults to :data:`None`).
    :raises: :exc:`exceptions.ValueError` when less than two package
             archives are given (the duplicate check obviously only works if
             there are packages to compare :-).
    :raises: :exc:`DuplicateFilesFound` when duplicate files are found
             within a group of package archives.

    This check looks for duplicate files in package archives that concern
    different packages. Ignores groups of packages that have their 'Provides'
    and 'Replaces' fields set to a common value. Other variants of 'Conflicts'
    are not supported yet.

    Because this analysis involves both the package control file fields and the
    pathnames of files installed by packages it can be really slow. To make it
    faster you can use the :class:`.PackageCache`.
    """
    timer = Timer()
    dependency_set = list(map(parse_filename, dependency_set))
    # Make sure we have something useful to work with.
    num_archives = len(dependency_set)
    if num_archives < 2:
        msg = "To check for duplicate files you need to provide two or more packages archives! (%i given)"
        raise ValueError(msg % num_archives)
    # Build up a global map of all files contained in the given package archives.
    global_contents = collections.defaultdict(set)
    global_fields = {}
    spinner = Spinner(total=num_archives)
    logger.info("Checking for duplicate files in %i package archives ..", num_archives)
    for i, archive in enumerate(optimize_order(dependency_set), start=1):
        spinner.step(label="Scanning %i package archives" % num_archives, progress=i)
        fields, contents = inspect_package(archive.filename, cache=cache)
        global_fields[archive.filename] = fields
        for pathname, stat in contents.items():
            if not stat.permissions.startswith('d'):
                global_contents[pathname].add(archive)
    spinner.clear()
    # Count the number of duplicate files between sets of conflicting packages
    # for more user friendly reporting.
    duplicate_files = collections.defaultdict(lambda: dict(count=0, filenames=[]))
    for pathname, packages in global_contents.items():
        if len(packages) > 1:
            # Override the sort key to be the filename because we don't need
            # to properly sort by version (which is slow on large collections).
            key = tuple(sorted(packages, key=lambda p: p.filename))
            duplicate_files[key]['count'] += 1
            duplicate_files[key]['filenames'].append(pathname)
    for packages, information in sorted(duplicate_files.items()):
        # Never report multiple versions of the same package.
        if len(set(package.name for package in packages)) == 1:
            duplicate_files.pop(packages)
            continue

        # We check for one common case where it's easy to guarantee that
        # we're not dealing with broken packages: All of the packages have
        # marked each other as conflicting via the combination of the
        # fields `Provides:' and `Conflicts:'.
        def find_virtual_name(field_name):
            package_names = set()
            for archive in packages:
                field = global_fields[archive.filename].get(field_name)
                if field:
                    package_names |= field.names
                else:
                    return
            if len(package_names) == 1:
                return list(package_names)[0]

        marked_conflicts = find_virtual_name('Conflicts')
        marked_provides = find_virtual_name('Provides')
        if marked_conflicts and marked_conflicts == marked_provides:
            duplicate_files.pop(packages)
    # Boring string formatting, trying to find a way to clearly present conflicts.
    summary = []
    for packages, information in sorted(duplicate_files.items()):
            block = []
            conflicts = pluralize(information['count'], 'conflict', 'conflicts')
            block.append("Found %s between %i packages:\n" % (conflicts, len(packages)))
            for i, package in enumerate(sorted(packages), start=1):
                block.append("  %i. %s\n" % (i, package.filename))
            block.append("These packages contain %s:\n" % conflicts)
            for i, filename in enumerate(sorted(information['filenames']), start=1):
                block.append("  %i. %s\n" % (i, filename))
            summary.append(''.join(block))
    if summary:
        archives_involved = set(itertools.chain.from_iterable(duplicate_files.keys()))
        files = pluralize(len(duplicate_files), 'duplicate file', 'duplicate files')
        archives = pluralize(len(archives_involved), 'package archive', 'package archives')
        summary.insert(0, "Found %s in %s!\n" % (files, archives))
        summary.append(compact("""
            Hint: If the package contents are correct you can resolve these
            conflicts by marking the packages as conflicting. You do this by
            adding the 'Conflicts' and 'Provides' fields and setting them to a
            common value. That should silence this message.
        """))
        delimiter = '%s\n' % ('-' * 79)
        raise DuplicateFilesFound(delimiter.join(summary))
    else:
        logger.info("No conflicting files found (took %s).", timer)