Example #1
0
 def initialize(self):
     """
     Initialize (create and/or upgrade) the package cache database.
     """
     if self.db is None:
         # Create any missing directories.
         makedirs(os.path.dirname(self.filename))
         with atomic_lock(self.filename):
             # Open the SQLite database connection, enable autocommit.
             self.db = sqlite3.connect(database=self.filename, isolation_level=None)
             # Initialize the database schema.
             self.upgrade_schema(1, '''
                 create table package_cache (
                     pathname text primary key,
                     timestamp real not null,
                     control_fields blob null,
                     package_fields blob null,
                     contents blob null
                 );
             ''')
         # Enable 8-bit bytestrings so we can store binary data.
         try:
             self.db.text_factory = bytes
         except NameError:
             self.db.text_factory = str
         # Use a custom row factory to implement lazy evaluation.
         self.db.row_factory = functools.partial(CachedPackage, cache=self)
Example #2
0
    def initialize(self):
        """
        Initialize (create and/or upgrade) the package cache database.
        """
        if self.db is None:
            # Create any missing directories.
            makedirs(os.path.dirname(self.filename))
            with atomic_lock(self.filename):
                # Open the SQLite database connection, enable autocommit.
                self.db = sqlite3.connect(database=self.filename,
                                          isolation_level=None)
                # Initialize the database schema.
                self.upgrade_schema(
                    1, '''
                    create table package_cache (
                        pathname text primary key,
                        timestamp real not null,
                        control_fields blob null,
                        package_fields blob null,
                        contents blob null
                    );
                ''')
                # In deb-pkg-tools 1.32.1 the parsing of the `Pre-Depends'
                # field was changed. Because of this change data cached by
                # older versions of deb-pkg-tools cannot be used by newer
                # versions of deb-pkg-tools.
                self.upgrade_schema(2, 'delete from package_cache;')
                # In deb-pkg-tools 1.35 the parsing of the `Breaks' field was
                # changed. Because of this change data cached by older versions
                # of deb-pkg-tools cannot be used by newer versions of
                # deb-pkg-tools.
                self.upgrade_schema(3, 'delete from package_cache;')
            # Enable 8-bit bytestrings so we can store binary data.
            try:
                # Python 3.x.
                self.db.text_factory = bytes
            except NameError:
                # Python 2.x.
                self.db.text_factory = str
            # Use a custom row factory to implement lazy evaluation. Previously
            # this used functools.partial() to inject self (a PackageCache
            # object) into the CachedPackage constructor, however as of Python
            # 3.4.2 this causes the following error to be raised:
            #
            #   TypeError: Row() does not take keyword arguments
            #   https://travis-ci.org/xolox/python-deb-pkg-tools/jobs/44186883#L746
            #
            # Looks like this was caused by the changes referenced in
            # http://bugs.python.org/issue21975.
            class CachedPackagePartial(CachedPackage):
                cache = self

            self.db.row_factory = CachedPackagePartial
Example #3
0
 def initialize(self):
     """
     Initialize (create and/or upgrade) the package cache database.
     """
     if self.db is None:
         # Create any missing directories.
         makedirs(os.path.dirname(self.filename))
         with atomic_lock(self.filename):
             # Open the SQLite database connection, enable autocommit.
             self.db = sqlite3.connect(database=self.filename, isolation_level=None)
             # Initialize the database schema.
             self.upgrade_schema(1, '''
                 create table package_cache (
                     pathname text primary key,
                     timestamp real not null,
                     control_fields blob null,
                     package_fields blob null,
                     contents blob null
                 );
             ''')
             # In deb-pkg-tools 1.32.1 the parsing of the `Pre-Depends'
             # field was changed. Because of this change data cached by
             # older versions of deb-pkg-tools cannot be used by newer
             # versions of deb-pkg-tools.
             self.upgrade_schema(2, 'delete from package_cache;')
             # In deb-pkg-tools 1.35 the parsing of the `Breaks' field was
             # changed. Because of this change data cached by older versions
             # of deb-pkg-tools cannot be used by newer versions of
             # deb-pkg-tools.
             self.upgrade_schema(3, 'delete from package_cache;')
         # Enable 8-bit bytestrings so we can store binary data.
         try:
             # Python 3.x.
             self.db.text_factory = bytes
         except NameError:
             # Python 2.x.
             self.db.text_factory = str
         # Use a custom row factory to implement lazy evaluation. Previously
         # this used functools.partial() to inject self (a PackageCache
         # object) into the CachedPackage constructor, however as of Python
         # 3.4.2 this causes the following error to be raised:
         #
         #   TypeError: Row() does not take keyword arguments
         #   https://travis-ci.org/xolox/python-deb-pkg-tools/jobs/44186883#L746
         #
         # Looks like this was caused by the changes referenced in
         # http://bugs.python.org/issue21975.
         class CachedPackagePartial(CachedPackage):
             cache = self
         self.db.row_factory = CachedPackagePartial
Example #4
0
def update_repository(directory, release_fields={}, gpg_key=None, cache=None):
    """
    Create or update a `trivial repository`_.

    :param directory: The pathname of a directory with ``*.deb`` packages.
    :param release_fields: An optional dictionary with fields to set inside the
                           ``Release`` file.
    :param gpg_key: The :class:`.GPGKey` object used to sign the repository.
                    Defaults to the result of :func:`select_gpg_key()`.
    :param cache: The :class:`.PackageCache` to use (defaults to :data:`None`).
    :raises: :exc:`.ResourceLockedException` when the given repository
             directory is being updated by another process.

    This function is based on the Debian commands ``dpkg-scanpackages``
    (reimplemented as :class:`scan_packages()`) and ``apt-ftparchive`` (also
    uses the external programs ``gpg`` and ``gzip``).
    """
    with atomic_lock(directory):
        timer = Timer()
        gpg_key = gpg_key or select_gpg_key(directory)
        # Figure out when the repository contents were last updated.
        contents_last_updated = os.path.getmtime(directory)
        for archive in find_package_archives(directory):
            contents_last_updated = max(contents_last_updated,
                                        os.path.getmtime(archive.filename))
        # Figure out when the repository metadata was last updated.
        try:
            metadata_files = ['Packages', 'Packages.gz', 'Release']
            # XXX If 1) no GPG key was provided, 2) apt doesn't require the
            # repository to be signed and 3) `Release.gpg' doesn't exist, it should
            # not cause an unnecessary repository update. That would turn the
            # conditional update into an unconditional update, which is not the
            # intention here :-)
            if os.path.isfile(os.path.join(directory,
                                           'Release.gpg')) or gpg_key:
                metadata_files.append('Release.gpg')
            metadata_last_updated = max(
                os.path.getmtime(os.path.join(directory, fn))
                for fn in metadata_files)
        except Exception:
            metadata_last_updated = 0
        # If the repository doesn't actually need to be updated we'll skip the update.
        if metadata_last_updated >= contents_last_updated:
            logger.info(
                "Contents of repository %s didn't change, so no need to update it.",
                directory)
            return
        # The generated files `Packages', `Packages.gz', `Release' and `Release.gpg'
        # are created in a temporary directory. Only once all of the files have been
        # successfully generated they are moved to the repository directory. There
        # are two reasons for this:
        #
        # 1. If the repository directory is being served to apt-get clients we
        #    don't want them to catch us in the middle of updating the repository
        #    because it will be in an inconsistent state.
        #
        # 2. If we fail to generate one of the files it's better not to have
        #    changed any of them, for the same reason as point one :-)
        logger.info("%s trivial repository %s ..",
                    "Updating" if metadata_last_updated else "Creating",
                    directory)
        temporary_directory = tempfile.mkdtemp()
        try:
            # Generate the `Packages' file.
            logger.debug("Generating file: %s",
                         format_path(os.path.join(directory, 'Packages')))
            scan_packages(repository=directory,
                          packages_file=os.path.join(temporary_directory,
                                                     'Packages'),
                          cache=cache)
            # Generate the `Packages.gz' file by compressing the `Packages' file.
            logger.debug("Generating file: %s",
                         format_path(os.path.join(directory, 'Packages.gz')))
            execute("gzip < Packages > Packages.gz",
                    directory=temporary_directory,
                    logger=logger)
            # Generate the `Release' file.
            logger.debug("Generating file: %s",
                         format_path(os.path.join(directory, 'Release')))
            # Get APT::FTPArchive::Release::* options from configuration file.
            release_fields = dict(
                (k.lower(), v) for k, v in release_fields.items())
            for name, value in load_config(directory).items():
                if name.startswith('release-'):
                    name = re.sub('^release-', '', name)
                    if name not in release_fields:
                        release_fields[name] = value
            # Override APT::FTPArchive::Release::* options from configuration file
            # with options given to update_repository() explicitly by the caller.
            options = []
            for name, value in release_fields.items():
                name = 'APT::FTPArchive::Release::%s' % name.capitalize()
                options.append('-o %s' % pipes.quote('%s=%s' % (name, value)))
            command = "LANG= apt-ftparchive %s release ." % ' '.join(options)
            release_listing = execute(command,
                                      capture=True,
                                      directory=temporary_directory,
                                      logger=logger)
            with open(os.path.join(temporary_directory, 'Release'),
                      'w') as handle:
                handle.write(release_listing + '\n')
            # Generate the `Release.gpg' file by signing the `Release' file with GPG?
            gpg_key_file = os.path.join(directory, 'Release.gpg')
            if gpg_key:
                logger.debug("Generating file: %s", format_path(gpg_key_file))
                initialize_gnupg()
                command = "{gpg} --armor --sign --detach-sign --output Release.gpg Release"
                execute(command.format(gpg=gpg_key.gpg_command),
                        directory=temporary_directory,
                        logger=logger)
            elif os.path.isfile(gpg_key_file):
                # XXX If 1) no GPG key was provided, 2) apt doesn't require the
                # repository to be signed and 3) `Release.gpg' exists from a
                # previous run, this file should be removed so we don't create an
                # inconsistent repository index (when `Release' is updated but
                # `Release.gpg' is not updated the signature becomes invalid).
                os.unlink(gpg_key_file)
            # Move the generated files into the repository directory.
            for entry in os.listdir(temporary_directory):
                shutil.copy(os.path.join(temporary_directory, entry),
                            os.path.join(directory, entry))
            logger.info("Finished updating trivial repository in %s.", timer)
        finally:
            shutil.rmtree(temporary_directory)
Example #5
0
def update_repository(directory, release_fields={}, gpg_key=None, cache=None):
    """
    Create or update a `trivial repository`_ using the Debian commands
    ``dpkg-scanpackages`` (reimplemented as :py:class:`scan_packages()`) and
    ``apt-ftparchive`` (also uses the external programs ``gpg`` and ``gzip``).
    Raises :py:exc:`.ResourceLockedException` when the given repository
    directory is being updated by another process.

    :param directory: The pathname of a directory with ``*.deb`` packages.
    :param release_fields: An optional dictionary with fields to set inside the
                           ``Release`` file.
    :param gpg_key: The :py:class:`.GPGKey` object used to sign the repository.
                    Defaults to the result of :py:func:`select_gpg_key()`.
    :param cache: The :py:class:`.PackageCache` to use (defaults to ``None``).
    """
    with atomic_lock(directory):
        timer = Timer()
        gpg_key = gpg_key or select_gpg_key(directory)
        # Figure out when the repository contents were last updated.
        contents_last_updated = os.path.getmtime(directory)
        for archive in find_package_archives(directory):
            contents_last_updated = max(contents_last_updated, os.path.getmtime(archive.filename))
        # Figure out when the repository metadata was last updated.
        try:
            metadata_files = ['Packages', 'Packages.gz', 'Release']
            # XXX If 1) no GPG key was provided, 2) apt doesn't require the
            # repository to be signed and 3) `Release.gpg' doesn't exist, it should
            # not cause an unnecessary repository update. That would turn the
            # conditional update into an unconditional update, which is not the
            # intention here :-)
            if os.path.isfile(os.path.join(directory, 'Release.gpg')) or gpg_key:
                metadata_files.append('Release.gpg')
            metadata_last_updated = max(os.path.getmtime(os.path.join(directory, fn)) for fn in metadata_files)
        except Exception:
            metadata_last_updated = 0
        # If the repository doesn't actually need to be updated we'll skip the update.
        if metadata_last_updated >= contents_last_updated:
            logger.info("Contents of repository %s didn't change, so no need to update it.", directory)
            return
        # The generated files `Packages', `Packages.gz', `Release' and `Release.gpg'
        # are created in a temporary directory. Only once all of the files have been
        # successfully generated they are moved to the repository directory. There
        # are two reasons for this:
        #
        # 1. If the repository directory is being served to apt-get clients we
        #    don't want them to catch us in the middle of updating the repository
        #    because it will be in an inconsistent state.
        #
        # 2. If we fail to generate one of the files it's better not to have
        #    changed any of them, for the same reason as point one :-)
        logger.info("%s trivial repository %s ..", "Updating" if metadata_last_updated else "Creating", directory)
        temporary_directory = tempfile.mkdtemp()
        try:
            # Generate the `Packages' file.
            logger.debug("Generating file: %s", format_path(os.path.join(directory, 'Packages')))
            scan_packages(repository=directory,
                          packages_file=os.path.join(temporary_directory, 'Packages'),
                          cache=cache)
            # Generate the `Packages.gz' file by compressing the `Packages' file.
            logger.debug("Generating file: %s", format_path(os.path.join(directory, 'Packages.gz')))
            execute("gzip < Packages > Packages.gz", directory=temporary_directory, logger=logger)
            # Generate the `Release' file.
            logger.debug("Generating file: %s", format_path(os.path.join(directory, 'Release')))
            # Get APT::FTPArchive::Release::* options from configuration file.
            release_fields = dict((k.lower(), v) for k, v in release_fields.items())
            for name, value in load_config(directory).items():
                if name.startswith('release-'):
                    name = re.sub('^release-', '', name)
                    if name not in release_fields:
                        release_fields[name] = value
            # Override APT::FTPArchive::Release::* options from configuration file
            # with options given to update_repository() explicitly by the caller.
            options = []
            for name, value in release_fields.items():
                name = 'APT::FTPArchive::Release::%s' % name.capitalize()
                options.append('-o %s' % pipes.quote('%s=%s' % (name, value)))
            command = "LANG= apt-ftparchive %s release ." % ' '.join(options)
            release_listing = execute(command, capture=True, directory=temporary_directory, logger=logger)
            with open(os.path.join(temporary_directory, 'Release'), 'w') as handle:
                handle.write(release_listing + '\n')
            # Generate the `Release.gpg' file by signing the `Release' file with GPG?
            gpg_key_file = os.path.join(directory, 'Release.gpg')
            if gpg_key:
                logger.debug("Generating file: %s", format_path(gpg_key_file))
                initialize_gnupg()
                command = "{gpg} --armor --sign --detach-sign --output Release.gpg Release"
                execute(command.format(gpg=gpg_key.gpg_command), directory=temporary_directory, logger=logger)
            elif os.path.isfile(gpg_key_file):
                # XXX If 1) no GPG key was provided, 2) apt doesn't require the
                # repository to be signed and 3) `Release.gpg' exists from a
                # previous run, this file should be removed so we don't create an
                # inconsistent repository index (when `Release' is updated but
                # `Release.gpg' is not updated the signature becomes invalid).
                os.unlink(gpg_key_file)
            # Move the generated files into the repository directory.
            for entry in os.listdir(temporary_directory):
                shutil.copy(os.path.join(temporary_directory, entry), os.path.join(directory, entry))
            logger.info("Finished updating trivial repository in %s.", timer)
        finally:
            shutil.rmtree(temporary_directory)
Example #6
0
def update_repository(directory, release_fields={}, gpg_key=None, cache=None):
    """
    Create or update a `trivial repository`_.

    :param directory: The pathname of a directory with ``*.deb`` packages.
    :param release_fields: An optional dictionary with fields to set inside the
                           ``Release`` file.
    :param gpg_key: The :class:`.GPGKey` object used to sign the repository.
                    Defaults to the result of :func:`select_gpg_key()`.
    :param cache: The :class:`.PackageCache` to use (defaults to :data:`None`).
    :raises: :exc:`.ResourceLockedException` when the given repository
             directory is being updated by another process.

    This function is based on the Debian programs dpkg-scanpackages_ and
    apt-ftparchive_ and also uses gpg_ and gzip_. The following files are
    generated:

    ===============  ==========================================================
    Filename         Description
    ===============  ==========================================================
    ``Packages``     Provides the metadata of all ``*.deb`` packages in the
                     `trivial repository`_ as a single text file. Generated
                     using :class:`scan_packages()` (as a faster alternative
                     to dpkg-scanpackages_).
    ``Packages.gz``  A compressed version of the package metadata generated
                     using gzip_.
    ``Release``      Metadata about the release and hashes of the ``Packages``
                     and ``Packages.gz`` files. Generated using
                     apt-ftparchive_.
    ``Release.gpg``  An ASCII-armored detached GPG signature of the ``Release``
                     file. Generated using ``gpg --armor --sign
                     --detach-sign``.
    ``InRelease``    The contents of the ``Release`` file and its GPG signature
                     combined into a single human readable file. Generated
                     using ``gpg --armor --sign --clearsign``.
    ===============  ==========================================================

    For more details about the ``Release.gpg`` and ``InRelease`` files please
    refer to the Debian wiki's section on secure-apt_.

    .. _apt-ftparchive: https://manpages.debian.org/apt-ftparchive
    .. _dpkg-scanpackages: https://manpages.debian.org/dpkg-scanpackages
    .. _gpg: https://manpages.debian.org/gpg
    .. _gzip: https://manpages.debian.org/gzip
    .. _secure-apt: https://wiki.debian.org/SecureApt
    """
    with atomic_lock(directory):
        timer = Timer()
        gpg_key = gpg_key or select_gpg_key(directory)
        # Figure out when the repository contents were last updated.
        contents_last_updated = os.path.getmtime(directory)
        for archive in find_package_archives(directory):
            contents_last_updated = max(contents_last_updated, os.path.getmtime(archive.filename))
        # Figure out when the repository metadata was last updated.
        try:
            metadata_files = ['Packages', 'Packages.gz', 'Release']
            # XXX If 1) no GPG key was provided, 2) apt doesn't require the
            # repository to be signed and 3) `Release.gpg' doesn't exist, it should
            # not cause an unnecessary repository update. That would turn the
            # conditional update into an unconditional update, which is not the
            # intention here :-)
            for signed_file in 'Release.gpg', 'InRelease':
                if os.path.isfile(os.path.join(directory, signed_file)) or gpg_key:
                    metadata_files.append(signed_file)
            metadata_last_updated = max(os.path.getmtime(os.path.join(directory, fn)) for fn in metadata_files)
        except Exception:
            metadata_last_updated = 0
        # If the repository doesn't actually need to be updated we'll skip the update.
        if metadata_last_updated >= contents_last_updated:
            logger.info("Contents of repository %s didn't change, so no need to update it.", directory)
            return
        # The generated files `Packages', `Packages.gz', `Release' and `Release.gpg'
        # are created in a temporary directory. Only once all of the files have been
        # successfully generated they are moved to the repository directory. There
        # are two reasons for this:
        #
        # 1. If the repository directory is being served to apt-get clients we
        #    don't want them to catch us in the middle of updating the repository
        #    because it will be in an inconsistent state.
        #
        # 2. If we fail to generate one of the files it's better not to have
        #    changed any of them, for the same reason as point one :-)
        logger.info("%s trivial repository %s ..", "Updating" if metadata_last_updated else "Creating", directory)
        temporary_directory = tempfile.mkdtemp(prefix='deb-pkg-tools-', suffix='-update-repo-stage')
        logger.debug("Using temporary directory: %s", temporary_directory)
        try:
            # Generate the `Packages' file.
            logger.debug("Generating file: %s", format_path(os.path.join(directory, 'Packages')))
            scan_packages(repository=directory,
                          packages_file=os.path.join(temporary_directory, 'Packages'),
                          cache=cache)
            # Generate the `Packages.gz' file by compressing the `Packages' file.
            logger.debug("Generating file: %s", format_path(os.path.join(directory, 'Packages.gz')))
            execute("gzip < Packages > Packages.gz", directory=temporary_directory, logger=logger)
            # Generate the `Release' file.
            logger.debug("Generating file: %s", format_path(os.path.join(directory, 'Release')))
            # Get APT::FTPArchive::Release::* options from configuration file.
            release_fields = dict((k.lower(), v) for k, v in release_fields.items())
            for name, value in load_config(directory).items():
                if name.startswith('release-'):
                    name = re.sub('^release-', '', name)
                    if name not in release_fields:
                        release_fields[name] = value
            # Override APT::FTPArchive::Release::* options from configuration file
            # with options given to update_repository() explicitly by the caller.
            options = []
            for name, value in release_fields.items():
                name = 'APT::FTPArchive::Release::%s' % name.capitalize()
                options.append('-o %s' % pipes.quote('%s=%s' % (name, value)))
            command = "LANG= apt-ftparchive %s release ." % ' '.join(options)
            release_listing = execute(command, capture=True, directory=temporary_directory, logger=logger)
            with open(os.path.join(temporary_directory, 'Release'), 'w') as handle:
                handle.write(release_listing + '\n')
            # Generate the `Release.gpg' and `InRelease' files by signing the `Release' file with GPG?
            gpg_key_file = os.path.join(directory, 'Release.gpg')
            in_release_file = os.path.join(directory, 'InRelease')
            if gpg_key:
                initialize_gnupg()
                logger.debug("Generating file: %s", format_path(gpg_key_file))
                command = "{gpg} --armor --sign --detach-sign --output Release.gpg Release"
                execute(command.format(gpg=gpg_key.gpg_command), directory=temporary_directory, logger=logger)
                logger.debug("Generating file: %s", format_path(in_release_file))
                command = "{gpg} --armor --sign --clearsign --output InRelease Release"
                execute(command.format(gpg=gpg_key.gpg_command), directory=temporary_directory, logger=logger)
            else:
                # XXX If 1) no GPG key was provided, 2) apt doesn't require the
                # repository to be signed and 3) `Release.gpg' exists from a
                # previous run, this file should be removed so we don't create an
                # inconsistent repository index (when `Release' is updated but
                # `Release.gpg' is not updated the signature becomes invalid).
                for stale_file in gpg_key_file, in_release_file:
                    if os.path.isfile(stale_file):
                        os.unlink(stale_file)
            # Move the generated files into the repository directory.
            for entry in os.listdir(temporary_directory):
                shutil.copy(os.path.join(temporary_directory, entry), os.path.join(directory, entry))
            logger.info("Finished updating trivial repository in %s.", timer)
        finally:
            shutil.rmtree(temporary_directory)