Ejemplo n.º 1
0
 def test_package_building(self, repository=None, overrides={}, contents={}):
     """Test building of Debian binary packages."""
     with Context() as finalizers:
         build_directory = finalizers.mkdtemp()
         control_fields = merge_control_fields(TEST_PACKAGE_FIELDS, overrides)
         # Create the package template.
         os.mkdir(os.path.join(build_directory, 'DEBIAN'))
         with open(os.path.join(build_directory, 'DEBIAN', 'control'), 'wb') as handle:
             control_fields.dump(handle)
         if contents:
             for filename, data in contents.items():
                 filename = os.path.join(build_directory, filename)
                 directory = os.path.dirname(filename)
                 makedirs(directory)
                 with open(filename, 'w') as handle:
                     handle.write(data)
         else:
             with open(os.path.join(build_directory, 'DEBIAN', 'conffiles'), 'wb') as handle:
                 handle.write(b'/etc/file1\n')
                 handle.write(b'/etc/file2\n')
             # Create the directory with configuration files.
             os.mkdir(os.path.join(build_directory, 'etc'))
             touch(os.path.join(build_directory, 'etc', 'file1'))
             touch(os.path.join(build_directory, 'etc', 'file3'))
             # Create a directory that should be cleaned up by clean_package_tree().
             makedirs(os.path.join(build_directory, 'tmp', '.git'))
             # Create a file that should be cleaned up by clean_package_tree().
             with open(os.path.join(build_directory, 'tmp', '.gitignore'), 'w') as handle:
                 handle.write('\n')
         # Build the package (without any contents :-).
         returncode, output = run_cli(main, '--build', build_directory)
         assert returncode == 0
         package_file = os.path.join(tempfile.gettempdir(),
                                     '%s_%s_%s.deb' % (control_fields['Package'],
                                                       control_fields['Version'],
                                                       control_fields['Architecture']))
         assert os.path.isfile(package_file)
         if repository:
             shutil.move(package_file, repository)
             return os.path.join(repository, os.path.basename(package_file))
         else:
             finalizers.register(os.unlink, package_file)
             # Verify the package metadata.
             fields, contents = inspect_package(package_file)
             for name in TEST_PACKAGE_FIELDS:
                 assert fields[name] == TEST_PACKAGE_FIELDS[name]
             # Verify that the package contains the `/' and `/tmp'
             # directories (since it doesn't contain any actual files).
             assert contents['/'].permissions[0] == 'd'
             assert contents['/'].permissions[1:] == 'rwxr-xr-x'
             assert contents['/'].owner == 'root'
             assert contents['/'].group == 'root'
             assert contents['/tmp/'].permissions[0] == 'd'
             assert contents['/tmp/'].owner == 'root'
             assert contents['/tmp/'].group == 'root'
             # Verify that clean_package_tree() cleaned up properly
             # (`/tmp/.git' and `/tmp/.gitignore' have been cleaned up).
             assert '/tmp/.git/' not in contents
             assert '/tmp/.gitignore' not in contents
             return package_file
Ejemplo n.º 2
0
    def set_value(self, value):
        """
        Set the cache entry's value.

        :param value: The metadata to save in the cache.
        """
        # Cache the value in memory.
        self.in_memory = dict(
            last_modified=self.last_modified,
            pathname=self.pathname,
            revision=CACHE_FORMAT_REVISION,
            value=value,
        )
        # Cache the value in memcached.
        self.set_memcached()
        # Cache the value on the filesystem.
        directory, filename = os.path.split(self.cache_file)
        temporary_file = os.path.join(directory,
                                      '.%s-%i' % (filename, os.getpid()))
        try:
            # Try to write the cache file.
            self.write_file(temporary_file)
        except EnvironmentError as e:
            # We may be missing the cache directory.
            if e.errno == errno.ENOENT:
                # Make sure the cache directory exists.
                makedirs(directory)
                # Try to write the cache file again.
                self.write_file(temporary_file)
            else:
                # Don't swallow exceptions we can't handle.
                raise
        # Move the temporary file into place, trusting the
        # filesystem to handle this operation atomically.
        os.rename(temporary_file, self.cache_file)
Ejemplo n.º 3
0
def create_control_file(control_file, control_fields):
    """
    Create a Debian control file.

    :param control_file: The filename of the control file to create (a string).
    :param control_fields: A dictionary with control file fields. This
                           dictionary is merged with the values in
                           :data:`DEFAULT_CONTROL_FIELDS`.
    :raises: :exc:`~exceptions.ValueError` when a mandatory binary control
             field is not present in the provided control fields (see also
             :data:`MANDATORY_BINARY_CONTROL_FIELDS`).
    """
    logger.debug("Creating control file: %s", format_path(control_file))
    # Merge the defaults with the fields defined by the caller.
    merged_fields = merge_control_fields(DEFAULT_CONTROL_FIELDS, control_fields)
    # Sanity check for mandatory fields that are missing.
    missing_fields = [f for f in MANDATORY_BINARY_CONTROL_FIELDS if f not in merged_fields]
    if missing_fields:
        raise ValueError("Missing %s! (%s)" % (pluralize(len(missing_fields), "mandatory binary package control field"),
                                               concatenate(sorted(missing_fields))))
    # Make sure the parent directory of the control file exists.
    makedirs(os.path.dirname(control_file))
    # Remove the control file if it already exists in case it's a hard link to
    # an inode with multiple hard links that should _not_ be changed by us.
    if os.path.exists(control_file):
        os.unlink(control_file)
    # Write the control file.
    with open(control_file, 'wb') as handle:
        merged_fields.dump(handle)
Ejemplo n.º 4
0
 def initialize(self):
     """
     Initialize (create and/or upgrade) the package cache database.
     """
     if self.db is None:
         # Create any missing directories.
         makedirs(os.path.dirname(self.filename))
         with atomic_lock(self.filename):
             # Open the SQLite database connection, enable autocommit.
             self.db = sqlite3.connect(database=self.filename, isolation_level=None)
             # Initialize the database schema.
             self.upgrade_schema(1, '''
                 create table package_cache (
                     pathname text primary key,
                     timestamp real not null,
                     control_fields blob null,
                     package_fields blob null,
                     contents blob null
                 );
             ''')
         # Enable 8-bit bytestrings so we can store binary data.
         try:
             self.db.text_factory = bytes
         except NameError:
             self.db.text_factory = str
         # Use a custom row factory to implement lazy evaluation.
         self.db.row_factory = functools.partial(CachedPackage, cache=self)
Ejemplo n.º 5
0
def create_control_file(control_file, control_fields):
    """
    Create a Debian control file.

    :param control_file: The filename of the control file to create (a string).
    :param control_fields: A dictionary with control file fields. This
                           dictionary is merged with the values in
                           :data:`DEFAULT_CONTROL_FIELDS`.
    :raises: :exc:`~exceptions.ValueError` when a mandatory binary control
             field is not present in the provided control fields (see also
             :data:`MANDATORY_BINARY_CONTROL_FIELDS`).
    """
    logger.debug("Creating control file: %s", format_path(control_file))
    # Merge the defaults with the fields defined by the caller.
    merged_fields = merge_control_fields(DEFAULT_CONTROL_FIELDS, control_fields)
    # Sanity check for mandatory fields that are missing.
    missing_fields = [f for f in MANDATORY_BINARY_CONTROL_FIELDS if f not in merged_fields]
    if missing_fields:
        raise ValueError(
            "Missing %s! (%s)"
            % (
                pluralize(len(missing_fields), "mandatory binary package control field"),
                concatenate(sorted(missing_fields)),
            )
        )
    # Make sure the parent directory of the control file exists.
    makedirs(os.path.dirname(control_file))
    # Remove the control file if it already exists in case it's a hard link to
    # an inode with multiple hard links that should _not_ be changed by us.
    if os.path.exists(control_file):
        os.unlink(control_file)
    # Write the control file.
    with open(control_file, "wb") as handle:
        merged_fields.dump(handle)
Ejemplo n.º 6
0
 def test_package_building(self, repository=None, overrides={}, contents={}):
     """Test building of Debian binary packages."""
     with Context() as finalizers:
         build_directory = finalizers.mkdtemp()
         control_fields = merge_control_fields(TEST_PACKAGE_FIELDS, overrides)
         # Create the package template.
         os.mkdir(os.path.join(build_directory, 'DEBIAN'))
         with open(os.path.join(build_directory, 'DEBIAN', 'control'), 'wb') as handle:
             control_fields.dump(handle)
         if contents:
             for filename, data in contents.items():
                 filename = os.path.join(build_directory, filename)
                 directory = os.path.dirname(filename)
                 makedirs(directory)
                 with open(filename, 'w') as handle:
                     handle.write(data)
         else:
             with open(os.path.join(build_directory, 'DEBIAN', 'conffiles'), 'wb') as handle:
                 handle.write(b'/etc/file1\n')
                 handle.write(b'/etc/file2\n')
             # Create the directory with configuration files.
             os.mkdir(os.path.join(build_directory, 'etc'))
             touch(os.path.join(build_directory, 'etc', 'file1'))
             touch(os.path.join(build_directory, 'etc', 'file3'))
             # Create a directory that should be cleaned up by clean_package_tree().
             makedirs(os.path.join(build_directory, 'tmp', '.git'))
             # Create a file that should be cleaned up by clean_package_tree().
             with open(os.path.join(build_directory, 'tmp', '.gitignore'), 'w') as handle:
                 handle.write('\n')
         # Build the package (without any contents :-).
         returncode, output = run_cli(main, '--build', build_directory)
         assert returncode == 0
         package_file = os.path.join(tempfile.gettempdir(),
                                     '%s_%s_%s.deb' % (control_fields['Package'],
                                                       control_fields['Version'],
                                                       control_fields['Architecture']))
         assert os.path.isfile(package_file)
         if repository:
             shutil.move(package_file, repository)
             return os.path.join(repository, os.path.basename(package_file))
         else:
             finalizers.register(os.unlink, package_file)
             # Verify the package metadata.
             fields, contents = inspect_package(package_file)
             for name in TEST_PACKAGE_FIELDS:
                 assert fields[name] == TEST_PACKAGE_FIELDS[name]
             # Verify that the package contains the `/' and `/tmp'
             # directories (since it doesn't contain any actual files).
             assert contents['/'].permissions[0] == 'd'
             assert contents['/'].permissions[1:] == 'rwxr-xr-x'
             assert contents['/'].owner == 'root'
             assert contents['/'].group == 'root'
             assert contents['/tmp/'].permissions[0] == 'd'
             assert contents['/tmp/'].owner == 'root'
             assert contents['/tmp/'].group == 'root'
             # Verify that clean_package_tree() cleaned up properly
             # (`/tmp/.git' and `/tmp/.gitignore' have been cleaned up).
             assert '/tmp/.git/' not in contents
             assert '/tmp/.gitignore' not in contents
             return package_file
Ejemplo n.º 7
0
def create_control_file(control_file, control_fields):
    """
    Create a Debian control file.

    :param control_file: The filename of the control file to create (a string).
    :param control_fields: A dictionary with control file fields. This
                           dictionary is merged with the values in
                           :data:`DEFAULT_CONTROL_FIELDS`.
    :raises: See :func:`check_mandatory_fields()`.
    """
    logger.debug("Creating control file: %s", format_path(control_file))
    # Merge the defaults with the fields defined by the caller.
    merged_fields = merge_control_fields(DEFAULT_CONTROL_FIELDS,
                                         control_fields)
    # Sanity check for mandatory fields that are missing.
    check_mandatory_fields(merged_fields)
    # Make sure the parent directory of the control file exists.
    makedirs(os.path.dirname(control_file))
    # Remove the control file if it already exists in case it's a hard link to
    # an inode with multiple hard links that should _not_ be changed by us.
    if os.path.exists(control_file):
        os.unlink(control_file)
    # Write the control file.
    with open(control_file, 'wb') as handle:
        merged_fields.dump(handle)
Ejemplo n.º 8
0
def create_directory(pathname):
    """
    Create a GnuPG directory with sane permissions (to avoid GnuPG warnings).

    :param pathname: The directory to create (a string).
    """
    makedirs(pathname)
    os.chmod(pathname, 0o700)
Ejemplo n.º 9
0
def copy_package_files(from_directory, to_directory, hard_links=True):
    """
    Copy package files to a temporary directory, using hard links when possible.

    :param from_directory: The pathname of a directory tree suitable for
                           packaging with ``dpkg-deb --build``.
    :param to_directory: The pathname of a temporary build directory.
    :param hard_links: Use hard links to speed up copying when possible.

    This function copies a directory tree suitable for packaging with
    ``dpkg-deb --build`` to a temporary build directory so that individual
    files can be replaced without changing the original directory tree. If the
    build directory is on the same file system as the source directory, hard
    links are used to speed up the copy. This function is used by
    :func:`build_package()`.
    """
    logger.info("Copying files (%s) to temporary directory (%s) ..",
                format_path(from_directory), format_path(to_directory))
    command = ['cp', '-a']
    makedirs(to_directory)
    if hard_links and ALLOW_HARD_LINKS:
        # Check whether we can use hard links to speed up the copy. In the past
        # this used the following simple and obvious check:
        #
        #   os.stat(source_directory).st_dev == os.stat(build_directory).st_dev
        #
        # However this expression holds true inside schroot, yet `cp -al' fails
        # when trying to create the hard links! This is why the following code now
        # tries to create an actual hard link to verify that `cp -al' can be used.
        test_file_from = None
        test_file_to = None
        try:
            # Find a unique filename that we can create and destroy without
            # touching any of the caller's files.
            while True:
                test_name = 'deb-pkg-tools-hard-link-test-%d' % random.randint(
                    1, 1000)
                test_file_from = os.path.join(from_directory, test_name)
                test_file_to = os.path.join(to_directory, test_name)
                if not os.path.isfile(test_file_from):
                    break
            # Create the test file.
            with open(test_file_from, 'w') as handle:
                handle.write('test')
            os.link(test_file_from, test_file_to)
            logger.debug("Speeding up file copy using hard links ..")
            command.append('-l')
        except (IOError, OSError):
            pass
        finally:
            for test_file in [test_file_from, test_file_to]:
                if test_file and os.path.isfile(test_file):
                    os.unlink(test_file)
    # I know this looks really funky, but this is a valid use of shell escaping
    # and globbing (obviously I tested it ;-).
    command.append('%s/*' % pipes.quote(from_directory))
    command.append(pipes.quote(to_directory))
    execute(' '.join(command), logger=logger)
Ejemplo n.º 10
0
 def test_makedirs(self):
     """Test that makedirs() can deal with race conditions."""
     with Context() as finalizers:
         parent = finalizers.mkdtemp()
         child = os.path.join(parent, 'nested')
         # This will create the directory.
         makedirs(child)
         # This should not complain that the directory already exists.
         makedirs(child)
Ejemplo n.º 11
0
 def test_makedirs(self):
     """Test that makedirs() can deal with race conditions."""
     with Context() as finalizers:
         parent = finalizers.mkdtemp()
         child = os.path.join(parent, 'nested')
         # This will create the directory.
         makedirs(child)
         # This should not complain that the directory already exists.
         makedirs(child)
Ejemplo n.º 12
0
def copy_package_files(from_directory, to_directory, hard_links=True):
    """
    Copy package files to a temporary directory, using hard links when possible.

    :param from_directory: The pathname of a directory tree suitable for
                           packaging with ``dpkg-deb --build``.
    :param to_directory: The pathname of a temporary build directory.
    :param hard_links: Use hard links to speed up copying when possible.

    This function copies a directory tree suitable for packaging with
    ``dpkg-deb --build`` to a temporary build directory so that individual
    files can be replaced without changing the original directory tree. If the
    build directory is on the same file system as the source directory, hard
    links are used to speed up the copy. This function is used by
    :func:`build_package()`.
    """
    logger.info("Copying files (%s) to temporary directory (%s) ..",
                format_path(from_directory), format_path(to_directory))
    command = ['cp', '-a']
    makedirs(to_directory)
    if hard_links and ALLOW_HARD_LINKS:
        # Check whether we can use hard links to speed up the copy. In the past
        # this used the following simple and obvious check:
        #
        #   os.stat(source_directory).st_dev == os.stat(build_directory).st_dev
        #
        # However this expression holds true inside schroot, yet `cp -al' fails
        # when trying to create the hard links! This is why the following code now
        # tries to create an actual hard link to verify that `cp -al' can be used.
        test_file_from = None
        test_file_to = None
        try:
            # Find a unique filename that we can create and destroy without
            # touching any of the caller's files.
            while True:
                test_name = 'deb-pkg-tools-hard-link-test-%d' % random.randint(1, 1000)
                test_file_from = os.path.join(from_directory, test_name)
                test_file_to = os.path.join(to_directory, test_name)
                if not os.path.isfile(test_file_from):
                    break
            # Create the test file.
            with open(test_file_from, 'w') as handle:
                handle.write('test')
            os.link(test_file_from, test_file_to)
            logger.debug("Speeding up file copy using hard links ..")
            command.append('-l')
        except (IOError, OSError):
            pass
        finally:
            for test_file in [test_file_from, test_file_to]:
                if test_file and os.path.isfile(test_file):
                    os.unlink(test_file)
    # I know this looks really funky, but this is a valid use of shell escaping
    # and globbing (obviously I tested it ;-).
    command.append('%s/*' % pipes.quote(from_directory))
    command.append(pipes.quote(to_directory))
    execute(' '.join(command), logger=logger)
Ejemplo n.º 13
0
def initialize_gnupg():
    """
    Make sure the ``~/.gnupg`` directory exists.

    Older versions of GPG can/will fail when the ``~/.gnupg`` directory doesn't
    exist (e.g. in a newly created chroot). GPG itself creates the directory
    after noticing that it's missing, but then still fails! Later runs work
    fine however. To avoid this problem we make sure ``~/.gnupg`` exists before
    we run GPG.
    """
    makedirs(parse_path('~/.gnupg'))
Ejemplo n.º 14
0
def initialize_gnupg():
    """
    Make sure the ``~/.gnupg`` directory exists.

    Older versions of GPG can/will fail when the ``~/.gnupg`` directory doesn't
    exist (e.g. in a newly created chroot). GPG itself creates the directory
    after noticing that it's missing, but then still fails! Later runs work
    fine however. To avoid this problem we make sure ``~/.gnupg`` exists before
    we run GPG.
    """
    makedirs(parse_path('~/.gnupg'))
Ejemplo n.º 15
0
    def initialize(self):
        """
        Initialize (create and/or upgrade) the package cache database.
        """
        if self.db is None:
            # Create any missing directories.
            makedirs(os.path.dirname(self.filename))
            with atomic_lock(self.filename):
                # Open the SQLite database connection, enable autocommit.
                self.db = sqlite3.connect(database=self.filename,
                                          isolation_level=None)
                # Initialize the database schema.
                self.upgrade_schema(
                    1, '''
                    create table package_cache (
                        pathname text primary key,
                        timestamp real not null,
                        control_fields blob null,
                        package_fields blob null,
                        contents blob null
                    );
                ''')
                # In deb-pkg-tools 1.32.1 the parsing of the `Pre-Depends'
                # field was changed. Because of this change data cached by
                # older versions of deb-pkg-tools cannot be used by newer
                # versions of deb-pkg-tools.
                self.upgrade_schema(2, 'delete from package_cache;')
                # In deb-pkg-tools 1.35 the parsing of the `Breaks' field was
                # changed. Because of this change data cached by older versions
                # of deb-pkg-tools cannot be used by newer versions of
                # deb-pkg-tools.
                self.upgrade_schema(3, 'delete from package_cache;')
            # Enable 8-bit bytestrings so we can store binary data.
            try:
                # Python 3.x.
                self.db.text_factory = bytes
            except NameError:
                # Python 2.x.
                self.db.text_factory = str
            # Use a custom row factory to implement lazy evaluation. Previously
            # this used functools.partial() to inject self (a PackageCache
            # object) into the CachedPackage constructor, however as of Python
            # 3.4.2 this causes the following error to be raised:
            #
            #   TypeError: Row() does not take keyword arguments
            #   https://travis-ci.org/xolox/python-deb-pkg-tools/jobs/44186883#L746
            #
            # Looks like this was caused by the changes referenced in
            # http://bugs.python.org/issue21975.
            class CachedPackagePartial(CachedPackage):
                cache = self

            self.db.row_factory = CachedPackagePartial
Ejemplo n.º 16
0
 def initialize(self):
     """
     Initialize (create and/or upgrade) the package cache database.
     """
     if self.db is None:
         # Create any missing directories.
         makedirs(os.path.dirname(self.filename))
         with atomic_lock(self.filename):
             # Open the SQLite database connection, enable autocommit.
             self.db = sqlite3.connect(database=self.filename, isolation_level=None)
             # Initialize the database schema.
             self.upgrade_schema(1, '''
                 create table package_cache (
                     pathname text primary key,
                     timestamp real not null,
                     control_fields blob null,
                     package_fields blob null,
                     contents blob null
                 );
             ''')
             # In deb-pkg-tools 1.32.1 the parsing of the `Pre-Depends'
             # field was changed. Because of this change data cached by
             # older versions of deb-pkg-tools cannot be used by newer
             # versions of deb-pkg-tools.
             self.upgrade_schema(2, 'delete from package_cache;')
             # In deb-pkg-tools 1.35 the parsing of the `Breaks' field was
             # changed. Because of this change data cached by older versions
             # of deb-pkg-tools cannot be used by newer versions of
             # deb-pkg-tools.
             self.upgrade_schema(3, 'delete from package_cache;')
         # Enable 8-bit bytestrings so we can store binary data.
         try:
             # Python 3.x.
             self.db.text_factory = bytes
         except NameError:
             # Python 2.x.
             self.db.text_factory = str
         # Use a custom row factory to implement lazy evaluation. Previously
         # this used functools.partial() to inject self (a PackageCache
         # object) into the CachedPackage constructor, however as of Python
         # 3.4.2 this causes the following error to be raised:
         #
         #   TypeError: Row() does not take keyword arguments
         #   https://travis-ci.org/xolox/python-deb-pkg-tools/jobs/44186883#L746
         #
         # Looks like this was caused by the changes referenced in
         # http://bugs.python.org/issue21975.
         class CachedPackagePartial(CachedPackage):
             cache = self
         self.db.row_factory = CachedPackagePartial
Ejemplo n.º 17
0
def create_control_file(control_file, control_fields):
    """
    Create a Debian control file.

    :param control_file: The filename of the control file to create (a string).
    :param control_fields: A dictionary with control file fields. This
                           dictionary is merged with the values in
                           :data:`DEFAULT_CONTROL_FIELDS`.
    :raises: See :func:`check_mandatory_fields()`.
    """
    logger.debug("Creating control file: %s", format_path(control_file))
    # Merge the defaults with the fields defined by the caller.
    merged_fields = merge_control_fields(DEFAULT_CONTROL_FIELDS, control_fields)
    # Sanity check for mandatory fields that are missing.
    check_mandatory_fields(merged_fields)
    # Make sure the parent directory of the control file exists.
    makedirs(os.path.dirname(control_file))
    # Remove the control file if it already exists in case it's a hard link to
    # an inode with multiple hard links that should _not_ be changed by us.
    if os.path.exists(control_file):
        os.unlink(control_file)
    # Write the control file.
    with open(control_file, 'wb') as handle:
        merged_fields.dump(handle)
Ejemplo n.º 18
0
    def __init__(self, name=None, description=None, secret_key_file=None, public_key_file=None, key_id=None):
        """
        Initialize a GPG key object.

        :param name: The name of the GPG key pair (a string). Used only when
                     the key pair is generated because it doesn't exist yet.
        :param description: The description of the GPG key pair (a string).
                            Used only when the key pair is generated because it
                            doesn't exist yet.
        :param secret_key_file: The absolute pathname of the secret key file (a
                                string). Defaults to ``~/.gnupg/secring.gpg``.
        :param public_key_file: The absolute pathname of the public key file (a
                                string). Defaults to ``~/.gnupg/pubring.gpg``.
        :param key_id: The key ID of an existing key pair to use (a string). If
                       this argument is provided then the key pair's secret and
                       public key files must already exist.

        This method initializes a GPG key object in one of several ways:

        1. If `key_id` is specified then the GPG key must have been created
           previously. If `secret_key_file` and `public_key_file` are not
           specified they default to ``~/.gnupg/secring.gpg`` and
           ``~/.gnupg/pubring.gpg``. In this case `key_id` is the only
           required argument.

           The following example assumes that the provided GPG key ID is
           defined in the default keyring of the current user:

           >>> from deb_pkg_tools.gpg import GPGKey
           >>> key = GPGKey(key_id='58B6B02B')
           >>> key.gpg_command
           'gpg --no-default-keyring --secret-keyring /home/peter/.gnupg/secring.gpg --keyring /home/peter/.gnupg/pubring.gpg --recipient 58B6B02B'

        2. If `secret_key_file` and `public_key_file` are specified but the
           files don't exist yet, a GPG key will be generated for you. In this
           case `name` and `description` are required arguments and `key_id`
           must be :data:`None` (the default). An example:

           >>> name = 'deb-pkg-tools'
           >>> description = 'Automatic signing key for deb-pkg-tools'
           >>> secret_key_file = '/home/peter/.deb-pkg-tools/automatic-signing-key.sec'
           >>> public_key_file = '/home/peter/.deb-pkg-tools/automatic-signing-key.pub'
           >>> key = GPGKey(name, description, secret_key_file, public_key_file)
           >>> key.gpg_command
           'gpg --no-default-keyring --secret-keyring /home/peter/.deb-pkg-tools/automatic-signing-key.sec --keyring /home/peter/.deb-pkg-tools/automatic-signing-key.pub'
        """
        # If the secret or public key file is provided, the other key file must
        # be provided as well.
        if secret_key_file and not public_key_file:
            raise Exception("You provided a GPG secret key file without a public key file; please provide both!")
        elif public_key_file and not secret_key_file:
            raise Exception("You provided a GPG public key file without a secret key file; please provide both!")

        # If neither of the key files is provided we'll default to the
        # locations that GnuPG uses by default.
        if not secret_key_file and not public_key_file:
            gnupg_directory = parse_path('~/.gnupg')
            secret_key_file = os.path.join(gnupg_directory, 'secring.gpg')
            public_key_file = os.path.join(gnupg_directory, 'pubring.gpg')

        # If a key ID was specified then the two key files must already exist;
        # we won't generate them because that makes no sense :-)
        if key_id and not os.path.isfile(secret_key_file):
            text = "The provided GPG secret key file (%s) doesn't exist but a key ID was specified!"
            raise Exception(text % secret_key_file)
        if key_id and not os.path.isfile(public_key_file):
            text = "The provided GPG public key file (%s) doesn't exist but a key ID was specified!"
            raise Exception(text % public_key_file)

        # If we're going to generate a GPG key for the caller we don't want to
        # overwrite a secret or public key file without its counterpart. We'll
        # also need a name and description for the generated key.
        existing_files = list(filter(os.path.isfile, [secret_key_file, public_key_file]))
        if len(existing_files) not in (0, 2):
            text = "Refusing to overwrite existing key file! (%s)"
            raise Exception(text % existing_files[0])
        elif len(existing_files) == 0 and not (name and description):
            logger.error("GPG key pair doesn't exist! (%s and %s)",
                         format_path(secret_key_file),
                         format_path(public_key_file))
            raise Exception("To generate a GPG key you must provide a name and description!")

        # Store the arguments.
        self.name = name
        self.description = description
        self.secret_key_file = secret_key_file
        self.public_key_file = public_key_file
        self.key_id = key_id

        # Generate the GPG key pair if required.
        if not existing_files:

            # Make sure the directories of the secret/public key files exist.
            for filename in [secret_key_file, public_key_file]:
                makedirs(os.path.dirname(filename))

            # Generate a file with batch instructions
            # suitable for `gpg --batch --gen-key'.
            fd, gpg_script = tempfile.mkstemp()
            with open(gpg_script, 'w') as handle:
                handle.write(textwrap.dedent('''
                    Key-Type: DSA
                    Key-Length: 1024
                    Subkey-Type: ELG-E
                    Subkey-Length: 1024
                    Name-Real: {name}
                    Name-Comment: {description}
                    Name-Email: none
                    Expire-Date: 0
                    %pubring {public_key_file}
                    %secring {secret_key_file}
                    %commit
                ''').format(name=self.name,
                            description=self.description,
                            secret_key_file=self.secret_key_file,
                            public_key_file=self.public_key_file))

            # Generate the GPG key pair.
            logger.info("Generating GPG key pair %s (%s) ..", self.name, self.description)
            logger.debug("Private key: %s", format_path(self.secret_key_file))
            logger.debug("Public key: %s", format_path(self.public_key_file))
            logger.info("Please note: Generating a GPG key pair can take a long time. "
                        "If you are logged into a virtual machine or a remote server "
                        "over SSH, now is a good time to familiarize yourself with "
                        "the concept of entropy and how to make more of it :-)")
            start_time = time.time()
            initialize_gnupg()
            with EntropyGenerator():
                execute('gpg', '--batch', '--gen-key', gpg_script, logger=logger)
            logger.info("Finished generating GPG key pair in %s.",
                        format_timespan(time.time() - start_time))
            os.unlink(gpg_script)
Ejemplo n.º 19
0
    def __init__(self,
                 name=None,
                 description=None,
                 secret_key_file=None,
                 public_key_file=None,
                 key_id=None):
        """
        Initialize a GPG key object.

        :param name: The name of the GPG key pair (a string). Used only when
                     the key pair is generated because it doesn't exist yet.
        :param description: The description of the GPG key pair (a string).
                            Used only when the key pair is generated because it
                            doesn't exist yet.
        :param secret_key_file: The absolute pathname of the secret key file (a
                                string). Defaults to ``~/.gnupg/secring.gpg``.
        :param public_key_file: The absolute pathname of the public key file (a
                                string). Defaults to ``~/.gnupg/pubring.gpg``.
        :param key_id: The key ID of an existing key pair to use (a string). If
                       this argument is provided then the key pair's secret and
                       public key files must already exist.

        This method initializes a GPG key object in one of several ways:

        1. If `key_id` is specified then the GPG key must have been created
           previously. If `secret_key_file` and `public_key_file` are not
           specified they default to ``~/.gnupg/secring.gpg`` and
           ``~/.gnupg/pubring.gpg``. In this case `key_id` is the only
           required argument.

           The following example assumes that the provided GPG key ID is
           defined in the default keyring of the current user:

           >>> from deb_pkg_tools.gpg import GPGKey
           >>> key = GPGKey(key_id='58B6B02B')
           >>> key.gpg_command
           'gpg --no-default-keyring --secret-keyring /home/peter/.gnupg/secring.gpg --keyring /home/peter/.gnupg/pubring.gpg --recipient 58B6B02B'

        2. If `secret_key_file` and `public_key_file` are specified but the
           files don't exist yet, a GPG key will be generated for you. In this
           case `name` and `description` are required arguments and `key_id`
           must be :data:`None` (the default). An example:

           >>> name = 'deb-pkg-tools'
           >>> description = 'Automatic signing key for deb-pkg-tools'
           >>> secret_key_file = '/home/peter/.deb-pkg-tools/automatic-signing-key.sec'
           >>> public_key_file = '/home/peter/.deb-pkg-tools/automatic-signing-key.pub'
           >>> key = GPGKey(name, description, secret_key_file, public_key_file)
           >>> key.gpg_command
           'gpg --no-default-keyring --secret-keyring /home/peter/.deb-pkg-tools/automatic-signing-key.sec --keyring /home/peter/.deb-pkg-tools/automatic-signing-key.pub'
        """
        # If the secret or public key file is provided, the other key file must
        # be provided as well.
        if secret_key_file and not public_key_file:
            raise Exception(
                "You provided a GPG secret key file without a public key file; please provide both!"
            )
        elif public_key_file and not secret_key_file:
            raise Exception(
                "You provided a GPG public key file without a secret key file; please provide both!"
            )

        # If neither of the key files is provided we'll default to the
        # locations that GnuPG uses by default.
        if not secret_key_file and not public_key_file:
            gnupg_directory = parse_path('~/.gnupg')
            secret_key_file = os.path.join(gnupg_directory, 'secring.gpg')
            public_key_file = os.path.join(gnupg_directory, 'pubring.gpg')

        # If a key ID was specified then the two key files must already exist;
        # we won't generate them because that makes no sense :-)
        if key_id and not os.path.isfile(secret_key_file):
            text = "The provided GPG secret key file (%s) doesn't exist but a key ID was specified!"
            raise Exception(text % secret_key_file)
        if key_id and not os.path.isfile(public_key_file):
            text = "The provided GPG public key file (%s) doesn't exist but a key ID was specified!"
            raise Exception(text % public_key_file)

        # If we're going to generate a GPG key for the caller we don't want to
        # overwrite a secret or public key file without its counterpart. We'll
        # also need a name and description for the generated key.
        existing_files = list(
            filter(os.path.isfile, [secret_key_file, public_key_file]))
        if len(existing_files) not in (0, 2):
            text = "Refusing to overwrite existing key file! (%s)"
            raise Exception(text % existing_files[0])
        elif len(existing_files) == 0 and not (name and description):
            logger.error("GPG key pair doesn't exist! (%s and %s)",
                         format_path(secret_key_file),
                         format_path(public_key_file))
            raise Exception(
                "To generate a GPG key you must provide a name and description!"
            )

        # Store the arguments.
        self.name = name
        self.description = description
        self.secret_key_file = secret_key_file
        self.public_key_file = public_key_file
        self.key_id = key_id

        # Generate the GPG key pair if required.
        if not existing_files:

            # Make sure the directories of the secret/public key files exist.
            for filename in [secret_key_file, public_key_file]:
                makedirs(os.path.dirname(filename))

            # Generate a file with batch instructions
            # suitable for `gpg --batch --gen-key'.
            fd, gpg_script = tempfile.mkstemp()
            with open(gpg_script, 'w') as handle:
                handle.write(
                    textwrap.dedent('''
                    Key-Type: DSA
                    Key-Length: 1024
                    Subkey-Type: ELG-E
                    Subkey-Length: 1024
                    Name-Real: {name}
                    Name-Comment: {description}
                    Name-Email: none
                    Expire-Date: 0
                    %pubring {public_key_file}
                    %secring {secret_key_file}
                    %commit
                ''').format(name=self.name,
                            description=self.description,
                            secret_key_file=self.secret_key_file,
                            public_key_file=self.public_key_file))

            # Generate the GPG key pair.
            logger.info("Generating GPG key pair %s (%s) ..", self.name,
                        self.description)
            logger.debug("Private key: %s", format_path(self.secret_key_file))
            logger.debug("Public key: %s", format_path(self.public_key_file))
            logger.info(
                "Please note: Generating a GPG key pair can take a long time. "
                "If you are logged into a virtual machine or a remote server "
                "over SSH, now is a good time to familiarize yourself with "
                "the concept of entropy and how to make more of it :-)")
            start_time = time.time()
            initialize_gnupg()
            with EntropyGenerator():
                execute('gpg',
                        '--batch',
                        '--gen-key',
                        gpg_script,
                        logger=logger)
            logger.info("Finished generating GPG key pair in %s.",
                        format_timespan(time.time() - start_time))
            os.unlink(gpg_script)