Example #1
0
 def test_duplicates_check(self):
     with Context() as finalizers:
         # Check that duplicate files raise an exception.
         directory = finalizers.mkdtemp()
         # Build a package containing some files.
         self.test_package_building(directory, overrides=dict(Package='deb-pkg-tools-package-1', Version='1'))
         # Build an unrelated package containing the same files.
         self.test_package_building(directory, overrides=dict(Package='deb-pkg-tools-package-2'))
         # Build two versions of one package.
         duplicate_contents = {'foo/bar': 'some random file'}
         self.test_package_building(directory,
                                    overrides=dict(Package='deb-pkg-tools-package-3', Version='1'),
                                    contents=duplicate_contents)
         self.test_package_building(directory,
                                    overrides=dict(Package='deb-pkg-tools-package-3', Version='2'),
                                    contents=duplicate_contents)
         # Build two packages related by their `Conflicts' and `Provides' fields.
         virtual_package = 'deb-pkg-tools-virtual-package'
         duplicate_contents = {'foo/baz': 'another random file'}
         self.test_package_building(directory,
                                    overrides=dict(Package='deb-pkg-tools-package-4',
                                                   Conflicts=virtual_package,
                                                   Provides=virtual_package),
                                    contents=duplicate_contents)
         self.test_package_building(directory,
                                    overrides=dict(Package='deb-pkg-tools-package-5',
                                                   Conflicts=virtual_package,
                                                   Provides=virtual_package),
                                    contents=duplicate_contents)
         # Test the duplicate files check.
         package_archives = find_package_archives(directory)
         self.assertRaises(DuplicateFilesFound, check_duplicate_files, package_archives, cache=self.package_cache)
         # Verify that invalid arguments are checked.
         self.assertRaises(ValueError, check_duplicate_files, [])
Example #2
0
 def test_duplicates_check(self):
     """Test static analysis of duplicate files."""
     with Context() as finalizers:
         # Check that duplicate files raise an exception.
         directory = finalizers.mkdtemp()
         # Build a package containing some files.
         self.test_package_building(directory, overrides=dict(Package='deb-pkg-tools-package-1', Version='1'))
         # Build an unrelated package containing the same files.
         self.test_package_building(directory, overrides=dict(Package='deb-pkg-tools-package-2'))
         # Build two versions of one package.
         duplicate_contents = {'foo/bar': 'some random file'}
         self.test_package_building(directory,
                                    overrides=dict(Package='deb-pkg-tools-package-3', Version='1'),
                                    contents=duplicate_contents)
         self.test_package_building(directory,
                                    overrides=dict(Package='deb-pkg-tools-package-3', Version='2'),
                                    contents=duplicate_contents)
         # Build two packages related by their `Conflicts' and `Provides' fields.
         virtual_package = 'deb-pkg-tools-virtual-package'
         duplicate_contents = {'foo/baz': 'another random file'}
         self.test_package_building(directory,
                                    overrides=dict(Package='deb-pkg-tools-package-4',
                                                   Conflicts=virtual_package,
                                                   Provides=virtual_package),
                                    contents=duplicate_contents)
         self.test_package_building(directory,
                                    overrides=dict(Package='deb-pkg-tools-package-5',
                                                   Conflicts=virtual_package,
                                                   Provides=virtual_package),
                                    contents=duplicate_contents)
         # Test the duplicate files check.
         package_archives = find_package_archives(directory)
         self.assertRaises(DuplicateFilesFound, check_duplicate_files, package_archives, cache=self.package_cache)
         # Verify that invalid arguments are checked.
         self.assertRaises(ValueError, check_duplicate_files, [])
Example #3
0
 def test_find_package_archives(self):
     """Test searching for package archives."""
     with Context() as finalizers:
         directory = finalizers.mkdtemp()
         for filename in 'some-random-file', 'regular-package_1.0_all.deb', 'micro-package_1.5_all.udeb':
             touch(os.path.join(directory, filename))
         matches = find_package_archives(directory)
         assert len(matches) == 2
         assert any(p.name == 'regular-package' and p.version == '1.0'
                    and p.architecture == 'all' for p in matches)
         assert any(p.name == 'micro-package' and p.version == '1.5'
                    and p.architecture == 'all' for p in matches)
Example #4
0
 def test_find_package_archives(self):
     """Test searching for package archives."""
     with Context() as finalizers:
         directory = finalizers.mkdtemp()
         for filename in 'some-random-file', 'regular-package_1.0_all.deb', 'micro-package_1.5_all.udeb':
             touch(os.path.join(directory, filename))
         matches = find_package_archives(directory)
         assert len(matches) == 2
         assert any(p.name == 'regular-package' and
                    p.version == '1.0' and
                    p.architecture == 'all'
                    for p in matches)
         assert any(p.name == 'micro-package' and
                    p.version == '1.5' and
                    p.architecture == 'all'
                    for p in matches)
Example #5
0
    def archives(self):
        """
        Find archive(s) in package repository / directory.

        :returns: A sorted list of package archives, same as the return value
                  of :py:func:`deb_pkg_tools.package.find_package_archives()`.

        An example:

        >>> from py2deb import PackageRepository
        >>> repo = PackageRepository('/tmp')
        >>> repo.archives
        [PackageFile(name='py2deb', version='0.1', architecture='all',
                     filename='/tmp/py2deb_0.1_all.deb'),
         PackageFile(name='py2deb-cached-property', version='0.1.5', architecture='all',
                     filename='/tmp/py2deb-cached-property_0.1.5_all.deb'),
         PackageFile(name='py2deb-chardet', version='2.2.1', architecture='all',
                     filename='/tmp/py2deb-chardet_2.2.1_all.deb'),
         PackageFile(name='py2deb-coloredlogs', version='0.5', architecture='all',
                     filename='/tmp/py2deb-coloredlogs_0.5_all.deb'),
         PackageFile(name='py2deb-deb-pkg-tools', version='1.20.4', architecture='all',
                     filename='/tmp/py2deb-deb-pkg-tools_1.20.4_all.deb'),
         PackageFile(name='py2deb-docutils', version='0.11', architecture='all',
                     filename='/tmp/py2deb-docutils_0.11_all.deb'),
         PackageFile(name='py2deb-executor', version='1.2', architecture='all',
                     filename='/tmp/py2deb-executor_1.2_all.deb'),
         PackageFile(name='py2deb-html2text', version='2014.4.5', architecture='all',
                     filename='/tmp/py2deb-html2text_2014.4.5_all.deb'),
         PackageFile(name='py2deb-humanfriendly', version='1.8.2', architecture='all',
                     filename='/tmp/py2deb-humanfriendly_1.8.2_all.deb'),
         PackageFile(name='py2deb-pkginfo', version='1.1', architecture='all',
                     filename='/tmp/py2deb-pkginfo_1.1_all.deb'),
         PackageFile(name='py2deb-python-debian', version='0.1.21-nmu2', architecture='all',
                     filename='/tmp/py2deb-python-debian_0.1.21-nmu2_all.deb'),
         PackageFile(name='py2deb-six', version='1.6.1', architecture='all',
                     filename='/tmp/py2deb-six_1.6.1_all.deb')]

        """
        return find_package_archives(self.directory)
Example #6
0
def update_repository(directory, release_fields={}, gpg_key=None, cache=None):
    """
    Create or update a `trivial repository`_.

    :param directory: The pathname of a directory with ``*.deb`` packages.
    :param release_fields: An optional dictionary with fields to set inside the
                           ``Release`` file.
    :param gpg_key: The :class:`.GPGKey` object used to sign the repository.
                    Defaults to the result of :func:`select_gpg_key()`.
    :param cache: The :class:`.PackageCache` to use (defaults to :data:`None`).
    :raises: :exc:`.ResourceLockedException` when the given repository
             directory is being updated by another process.

    This function is based on the Debian commands ``dpkg-scanpackages``
    (reimplemented as :class:`scan_packages()`) and ``apt-ftparchive`` (also
    uses the external programs ``gpg`` and ``gzip``).
    """
    with atomic_lock(directory):
        timer = Timer()
        gpg_key = gpg_key or select_gpg_key(directory)
        # Figure out when the repository contents were last updated.
        contents_last_updated = os.path.getmtime(directory)
        for archive in find_package_archives(directory):
            contents_last_updated = max(contents_last_updated,
                                        os.path.getmtime(archive.filename))
        # Figure out when the repository metadata was last updated.
        try:
            metadata_files = ['Packages', 'Packages.gz', 'Release']
            # XXX If 1) no GPG key was provided, 2) apt doesn't require the
            # repository to be signed and 3) `Release.gpg' doesn't exist, it should
            # not cause an unnecessary repository update. That would turn the
            # conditional update into an unconditional update, which is not the
            # intention here :-)
            if os.path.isfile(os.path.join(directory,
                                           'Release.gpg')) or gpg_key:
                metadata_files.append('Release.gpg')
            metadata_last_updated = max(
                os.path.getmtime(os.path.join(directory, fn))
                for fn in metadata_files)
        except Exception:
            metadata_last_updated = 0
        # If the repository doesn't actually need to be updated we'll skip the update.
        if metadata_last_updated >= contents_last_updated:
            logger.info(
                "Contents of repository %s didn't change, so no need to update it.",
                directory)
            return
        # The generated files `Packages', `Packages.gz', `Release' and `Release.gpg'
        # are created in a temporary directory. Only once all of the files have been
        # successfully generated they are moved to the repository directory. There
        # are two reasons for this:
        #
        # 1. If the repository directory is being served to apt-get clients we
        #    don't want them to catch us in the middle of updating the repository
        #    because it will be in an inconsistent state.
        #
        # 2. If we fail to generate one of the files it's better not to have
        #    changed any of them, for the same reason as point one :-)
        logger.info("%s trivial repository %s ..",
                    "Updating" if metadata_last_updated else "Creating",
                    directory)
        temporary_directory = tempfile.mkdtemp()
        try:
            # Generate the `Packages' file.
            logger.debug("Generating file: %s",
                         format_path(os.path.join(directory, 'Packages')))
            scan_packages(repository=directory,
                          packages_file=os.path.join(temporary_directory,
                                                     'Packages'),
                          cache=cache)
            # Generate the `Packages.gz' file by compressing the `Packages' file.
            logger.debug("Generating file: %s",
                         format_path(os.path.join(directory, 'Packages.gz')))
            execute("gzip < Packages > Packages.gz",
                    directory=temporary_directory,
                    logger=logger)
            # Generate the `Release' file.
            logger.debug("Generating file: %s",
                         format_path(os.path.join(directory, 'Release')))
            # Get APT::FTPArchive::Release::* options from configuration file.
            release_fields = dict(
                (k.lower(), v) for k, v in release_fields.items())
            for name, value in load_config(directory).items():
                if name.startswith('release-'):
                    name = re.sub('^release-', '', name)
                    if name not in release_fields:
                        release_fields[name] = value
            # Override APT::FTPArchive::Release::* options from configuration file
            # with options given to update_repository() explicitly by the caller.
            options = []
            for name, value in release_fields.items():
                name = 'APT::FTPArchive::Release::%s' % name.capitalize()
                options.append('-o %s' % pipes.quote('%s=%s' % (name, value)))
            command = "LANG= apt-ftparchive %s release ." % ' '.join(options)
            release_listing = execute(command,
                                      capture=True,
                                      directory=temporary_directory,
                                      logger=logger)
            with open(os.path.join(temporary_directory, 'Release'),
                      'w') as handle:
                handle.write(release_listing + '\n')
            # Generate the `Release.gpg' file by signing the `Release' file with GPG?
            gpg_key_file = os.path.join(directory, 'Release.gpg')
            if gpg_key:
                logger.debug("Generating file: %s", format_path(gpg_key_file))
                initialize_gnupg()
                command = "{gpg} --armor --sign --detach-sign --output Release.gpg Release"
                execute(command.format(gpg=gpg_key.gpg_command),
                        directory=temporary_directory,
                        logger=logger)
            elif os.path.isfile(gpg_key_file):
                # XXX If 1) no GPG key was provided, 2) apt doesn't require the
                # repository to be signed and 3) `Release.gpg' exists from a
                # previous run, this file should be removed so we don't create an
                # inconsistent repository index (when `Release' is updated but
                # `Release.gpg' is not updated the signature becomes invalid).
                os.unlink(gpg_key_file)
            # Move the generated files into the repository directory.
            for entry in os.listdir(temporary_directory):
                shutil.copy(os.path.join(temporary_directory, entry),
                            os.path.join(directory, entry))
            logger.info("Finished updating trivial repository in %s.", timer)
        finally:
            shutil.rmtree(temporary_directory)
Example #7
0
def update_repository(directory, release_fields={}, gpg_key=None, cache=None):
    """
    Create or update a `trivial repository`_ using the Debian commands
    ``dpkg-scanpackages`` (reimplemented as :py:class:`scan_packages()`) and
    ``apt-ftparchive`` (also uses the external programs ``gpg`` and ``gzip``).
    Raises :py:exc:`.ResourceLockedException` when the given repository
    directory is being updated by another process.

    :param directory: The pathname of a directory with ``*.deb`` packages.
    :param release_fields: An optional dictionary with fields to set inside the
                           ``Release`` file.
    :param gpg_key: The :py:class:`.GPGKey` object used to sign the repository.
                    Defaults to the result of :py:func:`select_gpg_key()`.
    :param cache: The :py:class:`.PackageCache` to use (defaults to ``None``).
    """
    with atomic_lock(directory):
        timer = Timer()
        gpg_key = gpg_key or select_gpg_key(directory)
        # Figure out when the repository contents were last updated.
        contents_last_updated = os.path.getmtime(directory)
        for archive in find_package_archives(directory):
            contents_last_updated = max(contents_last_updated, os.path.getmtime(archive.filename))
        # Figure out when the repository metadata was last updated.
        try:
            metadata_files = ['Packages', 'Packages.gz', 'Release']
            # XXX If 1) no GPG key was provided, 2) apt doesn't require the
            # repository to be signed and 3) `Release.gpg' doesn't exist, it should
            # not cause an unnecessary repository update. That would turn the
            # conditional update into an unconditional update, which is not the
            # intention here :-)
            if os.path.isfile(os.path.join(directory, 'Release.gpg')) or gpg_key:
                metadata_files.append('Release.gpg')
            metadata_last_updated = max(os.path.getmtime(os.path.join(directory, fn)) for fn in metadata_files)
        except Exception:
            metadata_last_updated = 0
        # If the repository doesn't actually need to be updated we'll skip the update.
        if metadata_last_updated >= contents_last_updated:
            logger.info("Contents of repository %s didn't change, so no need to update it.", directory)
            return
        # The generated files `Packages', `Packages.gz', `Release' and `Release.gpg'
        # are created in a temporary directory. Only once all of the files have been
        # successfully generated they are moved to the repository directory. There
        # are two reasons for this:
        #
        # 1. If the repository directory is being served to apt-get clients we
        #    don't want them to catch us in the middle of updating the repository
        #    because it will be in an inconsistent state.
        #
        # 2. If we fail to generate one of the files it's better not to have
        #    changed any of them, for the same reason as point one :-)
        logger.info("%s trivial repository %s ..", "Updating" if metadata_last_updated else "Creating", directory)
        temporary_directory = tempfile.mkdtemp()
        try:
            # Generate the `Packages' file.
            logger.debug("Generating file: %s", format_path(os.path.join(directory, 'Packages')))
            scan_packages(repository=directory,
                          packages_file=os.path.join(temporary_directory, 'Packages'),
                          cache=cache)
            # Generate the `Packages.gz' file by compressing the `Packages' file.
            logger.debug("Generating file: %s", format_path(os.path.join(directory, 'Packages.gz')))
            execute("gzip < Packages > Packages.gz", directory=temporary_directory, logger=logger)
            # Generate the `Release' file.
            logger.debug("Generating file: %s", format_path(os.path.join(directory, 'Release')))
            # Get APT::FTPArchive::Release::* options from configuration file.
            release_fields = dict((k.lower(), v) for k, v in release_fields.items())
            for name, value in load_config(directory).items():
                if name.startswith('release-'):
                    name = re.sub('^release-', '', name)
                    if name not in release_fields:
                        release_fields[name] = value
            # Override APT::FTPArchive::Release::* options from configuration file
            # with options given to update_repository() explicitly by the caller.
            options = []
            for name, value in release_fields.items():
                name = 'APT::FTPArchive::Release::%s' % name.capitalize()
                options.append('-o %s' % pipes.quote('%s=%s' % (name, value)))
            command = "LANG= apt-ftparchive %s release ." % ' '.join(options)
            release_listing = execute(command, capture=True, directory=temporary_directory, logger=logger)
            with open(os.path.join(temporary_directory, 'Release'), 'w') as handle:
                handle.write(release_listing + '\n')
            # Generate the `Release.gpg' file by signing the `Release' file with GPG?
            gpg_key_file = os.path.join(directory, 'Release.gpg')
            if gpg_key:
                logger.debug("Generating file: %s", format_path(gpg_key_file))
                initialize_gnupg()
                command = "{gpg} --armor --sign --detach-sign --output Release.gpg Release"
                execute(command.format(gpg=gpg_key.gpg_command), directory=temporary_directory, logger=logger)
            elif os.path.isfile(gpg_key_file):
                # XXX If 1) no GPG key was provided, 2) apt doesn't require the
                # repository to be signed and 3) `Release.gpg' exists from a
                # previous run, this file should be removed so we don't create an
                # inconsistent repository index (when `Release' is updated but
                # `Release.gpg' is not updated the signature becomes invalid).
                os.unlink(gpg_key_file)
            # Move the generated files into the repository directory.
            for entry in os.listdir(temporary_directory):
                shutil.copy(os.path.join(temporary_directory, entry), os.path.join(directory, entry))
            logger.info("Finished updating trivial repository in %s.", timer)
        finally:
            shutil.rmtree(temporary_directory)
Example #8
0
def update_repository(directory, release_fields={}, gpg_key=None, cache=None):
    """
    Create or update a `trivial repository`_.

    :param directory: The pathname of a directory with ``*.deb`` packages.
    :param release_fields: An optional dictionary with fields to set inside the
                           ``Release`` file.
    :param gpg_key: The :class:`.GPGKey` object used to sign the repository.
                    Defaults to the result of :func:`select_gpg_key()`.
    :param cache: The :class:`.PackageCache` to use (defaults to :data:`None`).
    :raises: :exc:`.ResourceLockedException` when the given repository
             directory is being updated by another process.

    This function is based on the Debian programs dpkg-scanpackages_ and
    apt-ftparchive_ and also uses gpg_ and gzip_. The following files are
    generated:

    ===============  ==========================================================
    Filename         Description
    ===============  ==========================================================
    ``Packages``     Provides the metadata of all ``*.deb`` packages in the
                     `trivial repository`_ as a single text file. Generated
                     using :class:`scan_packages()` (as a faster alternative
                     to dpkg-scanpackages_).
    ``Packages.gz``  A compressed version of the package metadata generated
                     using gzip_.
    ``Release``      Metadata about the release and hashes of the ``Packages``
                     and ``Packages.gz`` files. Generated using
                     apt-ftparchive_.
    ``Release.gpg``  An ASCII-armored detached GPG signature of the ``Release``
                     file. Generated using ``gpg --armor --sign
                     --detach-sign``.
    ``InRelease``    The contents of the ``Release`` file and its GPG signature
                     combined into a single human readable file. Generated
                     using ``gpg --armor --sign --clearsign``.
    ===============  ==========================================================

    For more details about the ``Release.gpg`` and ``InRelease`` files please
    refer to the Debian wiki's section on secure-apt_.

    .. _apt-ftparchive: https://manpages.debian.org/apt-ftparchive
    .. _dpkg-scanpackages: https://manpages.debian.org/dpkg-scanpackages
    .. _gpg: https://manpages.debian.org/gpg
    .. _gzip: https://manpages.debian.org/gzip
    .. _secure-apt: https://wiki.debian.org/SecureApt
    """
    with atomic_lock(directory):
        timer = Timer()
        gpg_key = gpg_key or select_gpg_key(directory)
        # Figure out when the repository contents were last updated.
        contents_last_updated = os.path.getmtime(directory)
        for archive in find_package_archives(directory):
            contents_last_updated = max(contents_last_updated, os.path.getmtime(archive.filename))
        # Figure out when the repository metadata was last updated.
        try:
            metadata_files = ['Packages', 'Packages.gz', 'Release']
            # XXX If 1) no GPG key was provided, 2) apt doesn't require the
            # repository to be signed and 3) `Release.gpg' doesn't exist, it should
            # not cause an unnecessary repository update. That would turn the
            # conditional update into an unconditional update, which is not the
            # intention here :-)
            for signed_file in 'Release.gpg', 'InRelease':
                if os.path.isfile(os.path.join(directory, signed_file)) or gpg_key:
                    metadata_files.append(signed_file)
            metadata_last_updated = max(os.path.getmtime(os.path.join(directory, fn)) for fn in metadata_files)
        except Exception:
            metadata_last_updated = 0
        # If the repository doesn't actually need to be updated we'll skip the update.
        if metadata_last_updated >= contents_last_updated:
            logger.info("Contents of repository %s didn't change, so no need to update it.", directory)
            return
        # The generated files `Packages', `Packages.gz', `Release' and `Release.gpg'
        # are created in a temporary directory. Only once all of the files have been
        # successfully generated they are moved to the repository directory. There
        # are two reasons for this:
        #
        # 1. If the repository directory is being served to apt-get clients we
        #    don't want them to catch us in the middle of updating the repository
        #    because it will be in an inconsistent state.
        #
        # 2. If we fail to generate one of the files it's better not to have
        #    changed any of them, for the same reason as point one :-)
        logger.info("%s trivial repository %s ..", "Updating" if metadata_last_updated else "Creating", directory)
        temporary_directory = tempfile.mkdtemp(prefix='deb-pkg-tools-', suffix='-update-repo-stage')
        logger.debug("Using temporary directory: %s", temporary_directory)
        try:
            # Generate the `Packages' file.
            logger.debug("Generating file: %s", format_path(os.path.join(directory, 'Packages')))
            scan_packages(repository=directory,
                          packages_file=os.path.join(temporary_directory, 'Packages'),
                          cache=cache)
            # Generate the `Packages.gz' file by compressing the `Packages' file.
            logger.debug("Generating file: %s", format_path(os.path.join(directory, 'Packages.gz')))
            execute("gzip < Packages > Packages.gz", directory=temporary_directory, logger=logger)
            # Generate the `Release' file.
            logger.debug("Generating file: %s", format_path(os.path.join(directory, 'Release')))
            # Get APT::FTPArchive::Release::* options from configuration file.
            release_fields = dict((k.lower(), v) for k, v in release_fields.items())
            for name, value in load_config(directory).items():
                if name.startswith('release-'):
                    name = re.sub('^release-', '', name)
                    if name not in release_fields:
                        release_fields[name] = value
            # Override APT::FTPArchive::Release::* options from configuration file
            # with options given to update_repository() explicitly by the caller.
            options = []
            for name, value in release_fields.items():
                name = 'APT::FTPArchive::Release::%s' % name.capitalize()
                options.append('-o %s' % pipes.quote('%s=%s' % (name, value)))
            command = "LANG= apt-ftparchive %s release ." % ' '.join(options)
            release_listing = execute(command, capture=True, directory=temporary_directory, logger=logger)
            with open(os.path.join(temporary_directory, 'Release'), 'w') as handle:
                handle.write(release_listing + '\n')
            # Generate the `Release.gpg' and `InRelease' files by signing the `Release' file with GPG?
            gpg_key_file = os.path.join(directory, 'Release.gpg')
            in_release_file = os.path.join(directory, 'InRelease')
            if gpg_key:
                initialize_gnupg()
                logger.debug("Generating file: %s", format_path(gpg_key_file))
                command = "{gpg} --armor --sign --detach-sign --output Release.gpg Release"
                execute(command.format(gpg=gpg_key.gpg_command), directory=temporary_directory, logger=logger)
                logger.debug("Generating file: %s", format_path(in_release_file))
                command = "{gpg} --armor --sign --clearsign --output InRelease Release"
                execute(command.format(gpg=gpg_key.gpg_command), directory=temporary_directory, logger=logger)
            else:
                # XXX If 1) no GPG key was provided, 2) apt doesn't require the
                # repository to be signed and 3) `Release.gpg' exists from a
                # previous run, this file should be removed so we don't create an
                # inconsistent repository index (when `Release' is updated but
                # `Release.gpg' is not updated the signature becomes invalid).
                for stale_file in gpg_key_file, in_release_file:
                    if os.path.isfile(stale_file):
                        os.unlink(stale_file)
            # Move the generated files into the repository directory.
            for entry in os.listdir(temporary_directory):
                shutil.copy(os.path.join(temporary_directory, entry), os.path.join(directory, entry))
            logger.info("Finished updating trivial repository in %s.", timer)
        finally:
            shutil.rmtree(temporary_directory)