Пример #1
0
def check_version_conflicts(dependency_set, cache=None):
    """
    Check for version conflicts in a dependency set.

    :param dependency_set: A list of filenames (strings) of ``*.deb`` files.
    :param cache: The :class:`.PackageCache` to use (defaults to :data:`None`).
    :raises: :exc:`VersionConflictFound` when one or more version
             conflicts are found.

    For each Debian binary package archive given, check if a newer version of
    the same package exists in the same repository (directory). This analysis
    can be very slow. To make it faster you can use the
    :class:`.PackageCache`.
    """
    timer = Timer()
    summary = []
    dependency_set = list(map(parse_filename, dependency_set))
    spinner = Spinner(total=len(dependency_set))
    logger.info("Checking for version conflicts in %i package(s) ..", len(dependency_set))
    for i, archive in enumerate(dependency_set, start=1):
        if archive.newer_versions:
            summary.append(compact("""
                    Dependency set includes {dependency} but newer version(s)
                    of that package also exist and will take precedence:
            """, dependency=format_path(archive.filename)))
            summary.append("\n".join(" - %s" % format_path(a.filename) for a in sorted(archive.newer_versions)))
        spinner.step(label="Checking for version conflicts", progress=i)
    spinner.clear()
    if summary:
        summary.insert(0, "One or more version conflicts found:")
        raise VersionConflictFound('\n\n'.join(summary))
    else:
        logger.info("No version conflicts found (took %s).", timer)
Пример #2
0
def clean_package_tree(directory,
                       remove_dirs=DIRECTORIES_TO_REMOVE,
                       remove_files=FILES_TO_REMOVE):
    """
    Clean up files that should not be included in a Debian package from the
    given directory. Uses the :py:mod:`fnmatch` module for directory and
    filename matching. Matching is done on the base name of each directory and
    file. This function assumes it is safe to unlink files from the given
    directory (which it should be when :py:func:`copy_package_files()` was
    previously called, e.g. by :py:func:`build_package()`).

    :param directory: The pathname of the directory to clean (a string).
    :param remove_dirs: An iterable with filename patterns of directories that
                        should not be included in the package (e.g. version
                        control directories like ``.git`` and ``.hg``).
    :param remove_files: An iterable with filename patterns of files that
                         should not be included in the package (e.g. version
                         control files like ``.gitignore`` and
                         ``.hgignore``).
    """
    for root, dirs, files in os.walk(directory):
        for name in dirs:
            if any(fnmatch.fnmatch(name, p) for p in remove_dirs):
                pathname = os.path.join(root, name)
                logger.debug("Cleaning up directory: %s",
                             format_path(pathname))
                shutil.rmtree(pathname)
        for name in files:
            if any(fnmatch.fnmatch(name, p) for p in remove_files):
                pathname = os.path.join(root, name)
                logger.debug("Cleaning up file: %s", format_path(pathname))
                os.unlink(pathname)
Пример #3
0
def show_package_metadata(archive):
    """
    Show the metadata and contents of a Debian archive on the terminal.

    :param archive: The pathname of an existing ``*.deb`` archive (a string).
    """
    control_fields, contents = inspect_package(archive)
    say(highlight("Package metadata from %s:"), format_path(archive))
    for field_name in sorted(control_fields.keys()):
        value = control_fields[field_name]
        if field_name == 'Installed-Size':
            value = format_size(int(value) * 1024)
        say(" - %s %s", highlight(field_name + ":"), value)
    say(highlight("Package contents from %s:"), format_path(archive))
    for pathname, entry in sorted(contents.items()):
        size = format_size(entry.size, keep_width=True)
        if len(size) < 10:
            size = ' ' * (10 - len(size)) + size
        if entry.target:
            pathname += ' -> ' + entry.target
        say("{permissions} {owner} {group} {size} {modified} {pathname}",
            permissions=entry.permissions,
            owner=entry.owner,
            group=entry.group,
            size=size,
            modified=entry.modified,
            pathname=pathname)
Пример #4
0
def clean_package_tree(directory, remove_dirs=DIRECTORIES_TO_REMOVE, remove_files=FILES_TO_REMOVE):
    """
    Clean up files that should not be included in a Debian package from the
    given directory. Uses the :py:mod:`fnmatch` module for directory and
    filename matching. Matching is done on the base name of each directory and
    file. This function assumes it is safe to unlink files from the given
    directory (which it should be when :py:func:`copy_package_files()` was
    previously called, e.g. by :py:func:`build_package()`).

    :param directory: The pathname of the directory to clean (a string).
    :param remove_dirs: An iterable with filename patterns of directories that
                        should not be included in the package (e.g. version
                        control directories like ``.git`` and ``.hg``).
    :param remove_files: An iterable with filename patterns of files that
                         should not be included in the package (e.g. version
                         control files like ``.gitignore`` and
                         ``.hgignore``).
    """
    for root, dirs, files in os.walk(directory):
        for name in dirs:
            if any(fnmatch.fnmatch(name, p) for p in remove_dirs):
                pathname = os.path.join(root, name)
                logger.debug("Cleaning up directory: %s", format_path(pathname))
                shutil.rmtree(pathname)
        for name in files:
            if any(fnmatch.fnmatch(name, p) for p in remove_files):
                pathname = os.path.join(root, name)
                logger.debug("Cleaning up file: %s", format_path(pathname))
                os.unlink(pathname)
Пример #5
0
def copy_package_files(from_directory, to_directory, hard_links=True):
    """
    Copy package files to a temporary directory, using hard links when possible.

    :param from_directory: The pathname of a directory tree suitable for
                           packaging with ``dpkg-deb --build``.
    :param to_directory: The pathname of a temporary build directory.
    :param hard_links: Use hard links to speed up copying when possible.

    This function copies a directory tree suitable for packaging with
    ``dpkg-deb --build`` to a temporary build directory so that individual
    files can be replaced without changing the original directory tree. If the
    build directory is on the same file system as the source directory, hard
    links are used to speed up the copy. This function is used by
    :func:`build_package()`.
    """
    logger.info("Copying files (%s) to temporary directory (%s) ..",
                format_path(from_directory), format_path(to_directory))
    command = ['cp', '-a']
    makedirs(to_directory)
    if hard_links and ALLOW_HARD_LINKS:
        # Check whether we can use hard links to speed up the copy. In the past
        # this used the following simple and obvious check:
        #
        #   os.stat(source_directory).st_dev == os.stat(build_directory).st_dev
        #
        # However this expression holds true inside schroot, yet `cp -al' fails
        # when trying to create the hard links! This is why the following code now
        # tries to create an actual hard link to verify that `cp -al' can be used.
        test_file_from = None
        test_file_to = None
        try:
            # Find a unique filename that we can create and destroy without
            # touching any of the caller's files.
            while True:
                test_name = 'deb-pkg-tools-hard-link-test-%d' % random.randint(
                    1, 1000)
                test_file_from = os.path.join(from_directory, test_name)
                test_file_to = os.path.join(to_directory, test_name)
                if not os.path.isfile(test_file_from):
                    break
            # Create the test file.
            with open(test_file_from, 'w') as handle:
                handle.write('test')
            os.link(test_file_from, test_file_to)
            logger.debug("Speeding up file copy using hard links ..")
            command.append('-l')
        except (IOError, OSError):
            pass
        finally:
            for test_file in [test_file_from, test_file_to]:
                if test_file and os.path.isfile(test_file):
                    os.unlink(test_file)
    # I know this looks really funky, but this is a valid use of shell escaping
    # and globbing (obviously I tested it ;-).
    command.append('%s/*' % pipes.quote(from_directory))
    command.append(pipes.quote(to_directory))
    execute(' '.join(command), logger=logger)
Пример #6
0
def copy_package_files(from_directory, to_directory, hard_links=True):
    """
    Copy package files to a temporary directory, using hard links when possible.

    :param from_directory: The pathname of a directory tree suitable for
                           packaging with ``dpkg-deb --build``.
    :param to_directory: The pathname of a temporary build directory.
    :param hard_links: Use hard links to speed up copying when possible.

    This function copies a directory tree suitable for packaging with
    ``dpkg-deb --build`` to a temporary build directory so that individual
    files can be replaced without changing the original directory tree. If the
    build directory is on the same file system as the source directory, hard
    links are used to speed up the copy. This function is used by
    :func:`build_package()`.
    """
    logger.info("Copying files (%s) to temporary directory (%s) ..",
                format_path(from_directory), format_path(to_directory))
    command = ['cp', '-a']
    makedirs(to_directory)
    if hard_links and ALLOW_HARD_LINKS:
        # Check whether we can use hard links to speed up the copy. In the past
        # this used the following simple and obvious check:
        #
        #   os.stat(source_directory).st_dev == os.stat(build_directory).st_dev
        #
        # However this expression holds true inside schroot, yet `cp -al' fails
        # when trying to create the hard links! This is why the following code now
        # tries to create an actual hard link to verify that `cp -al' can be used.
        test_file_from = None
        test_file_to = None
        try:
            # Find a unique filename that we can create and destroy without
            # touching any of the caller's files.
            while True:
                test_name = 'deb-pkg-tools-hard-link-test-%d' % random.randint(1, 1000)
                test_file_from = os.path.join(from_directory, test_name)
                test_file_to = os.path.join(to_directory, test_name)
                if not os.path.isfile(test_file_from):
                    break
            # Create the test file.
            with open(test_file_from, 'w') as handle:
                handle.write('test')
            os.link(test_file_from, test_file_to)
            logger.debug("Speeding up file copy using hard links ..")
            command.append('-l')
        except (IOError, OSError):
            pass
        finally:
            for test_file in [test_file_from, test_file_to]:
                if test_file and os.path.isfile(test_file):
                    os.unlink(test_file)
    # I know this looks really funky, but this is a valid use of shell escaping
    # and globbing (obviously I tested it ;-).
    command.append('%s/*' % pipes.quote(from_directory))
    command.append(pipes.quote(to_directory))
    execute(' '.join(command), logger=logger)
def generate_screenshots():
    """Generate screenshots from shell scripts."""
    this_script = os.path.abspath(__file__)
    this_directory = os.path.dirname(this_script)
    repository = os.path.join(this_directory, os.pardir)
    examples_directory = os.path.join(repository, 'docs', 'examples')
    images_directory = os.path.join(repository, 'docs', 'images')
    for shell_script in sorted(glob.glob(os.path.join(examples_directory, '*.sh'))):
        basename, extension = os.path.splitext(os.path.basename(shell_script))
        image_file = os.path.join(images_directory, '%s.png' % basename)
        logger.info("Generating %s by running %s ..",
                    format_path(image_file),
                    format_path(shell_script))
        command_line = [sys.executable, __file__, shell_script]
        random_title = random_string(25)
        # Generate the urxvt command line.
        urxvt_command = [
            'urxvt',
            # Enforce a default geometry.
            '-geometry', '98x30',
            # Set the text and background color.
            '-fg', TEXT_COLOR,
            '-bg', BACKGROUND_COLOR,
            # Set the font name and pixel size.
            '-fn', 'xft:%s:pixelsize=%i' % (FONT_NAME, FONT_SIZE),
            # Set the window title.
            '-title', random_title,
            # Hide scrollbars.
            '+sb',
        ]
        if which('qtile-run'):
            # I've been using tiling window managers for years now, at the
            # moment 'qtile' is my window manager of choice. It requires the
            # following special handling to enable the 'urxvt' window to float,
            # which in turn enables it to respect the '--geometry' option.
            urxvt_command.insert(0, 'qtile-run')
            urxvt_command.insert(1, '-f')
        # Apply the Ubuntu color scheme to urxvt.
        for index, css_color in enumerate(EIGHT_COLOR_PALETTE):
            urxvt_command.extend(('--color%i' % index, css_color))
        # Add the command that should run inside the terminal.
        urxvt_command.extend(('-e', 'sh', '-c', 'setterm -cursor off; %s' % quote(command_line)))
        # Launch urxvt.
        execute(*urxvt_command, asynchronous=True)
        # Make sure we close the urxvt window.
        try:
            # Wait for urxvt to start up. If I were to improve this I could
            # instead wait for the creation of a file by interpret_script().
            time.sleep(10)
            # Take a screen shot of the window using ImageMagick.
            execute('import', '-window', random_title, image_file)
            # Auto-trim the screen shot, then give it a 5px border.
            execute('convert', image_file, '-trim',
                    '-bordercolor', BACKGROUND_COLOR,
                    '-border', '5', image_file)
        finally:
            execute('wmctrl', '-c', random_title)
def load_config_file(configuration_file=None):
    """
    Load a configuration file with backup directories and rotation schemes.

    :param configuration_file: Override the pathname of the configuration file
                               to load (a string or :data:`None`).
    :returns: A generator of tuples with four values each:

              1. An execution context created using :mod:`executor.contexts`.
              2. The pathname of a directory with backups (a string).
              3. A dictionary with the rotation scheme.
              4. A dictionary with additional options.
    :raises: :exc:`~exceptions.ValueError` when `configuration_file` is given
             but doesn't exist or can't be loaded.

    When `configuration_file` isn't given :data:`LOCAL_CONFIG_FILE` and
    :data:`GLOBAL_CONFIG_FILE` are checked and the first configuration file
    that exists is loaded. This function is used by :class:`RotateBackups` to
    discover user defined rotation schemes and by :mod:`rotate_backups.cli` to
    discover directories for which backup rotation is configured.
    """
    parser = configparser.RawConfigParser()
    if configuration_file:
        logger.verbose("Reading configuration file %s ..",
                       format_path(configuration_file))
        loaded_files = parser.read(configuration_file)
        if len(loaded_files) == 0:
            msg = "Failed to read configuration file! (%s)"
            raise ValueError(msg % configuration_file)
    else:
        for config_file in LOCAL_CONFIG_FILE, GLOBAL_CONFIG_FILE:
            pathname = parse_path(config_file)
            if parser.read(pathname):
                logger.verbose("Reading configuration file %s ..",
                               format_path(pathname))
                break
    for section in parser.sections():
        items = dict(parser.items(section))
        context_options = {}
        if coerce_boolean(items.get('use-sudo')):
            context_options['sudo'] = True
        if items.get('ssh-user'):
            context_options['ssh_user'] = items['ssh-user']
        location = coerce_location(section, **context_options)
        rotation_scheme = dict((name, coerce_retention_period(items[name]))
                               for name in SUPPORTED_FREQUENCIES
                               if name in items)
        options = dict(include_list=split(items.get('include-list', '')),
                       exclude_list=split(items.get('exclude-list', '')),
                       io_scheduling_class=items.get('ionice'),
                       strict=coerce_boolean(items.get('strict', 'yes')),
                       prefer_recent=coerce_boolean(
                           items.get('prefer-recent', 'no')))
        yield location, rotation_scheme, options
Пример #9
0
    def hash_contents(self):
        """
        Hash the text file using the SHA1 algorithm.

        :returns: A string containing a hexadecimal SHA1 digest.
        """
        logger.debug("Calculating SHA1 of %s", format_path(self.filename))
        context = hashlib.sha1()
        context.update(self.context.read_file(self.filename))
        hexdigest = context.hexdigest()
        logger.debug("SHA1 of %s is %s", format_path(self.filename), hexdigest)
        return hexdigest
Пример #10
0
    def write_file(self, filename, contents):
        """
        Write a text file and provide feedback to the user.

        :param filename: The pathname of the file to write (a string).
        :param contents: The new contents of the file (a string).
        """
        logger.info("Writing file: %s", format_path(filename))
        contents = contents.rstrip() + b"\n"
        self.context.write_file(filename, contents)
        logger.debug("Wrote %s to %s.",
                     pluralize(len(contents.splitlines()), "line"),
                     format_path(filename))
Пример #11
0
    def write_file(self, filename, contents):
        """
        Write a text file and provide feedback to the user.

        :param filename: The pathname of the file to write (a string).
        :param contents: The new contents of the file (a string).
        """
        logger.info("Writing file: %s", format_path(filename))
        contents = contents.rstrip() + b"\n"
        self.context.write_file(filename, contents)
        logger.debug("Wrote %s to %s.",
                     pluralize(len(contents.splitlines()), "line"),
                     format_path(filename))
Пример #12
0
def load_config_file(configuration_file=None):
    """
    Load a configuration file with backup directories and rotation schemes.

    :param configuration_file: Override the pathname of the configuration file
                               to load (a string or :data:`None`).
    :returns: A generator of tuples with four values each:

              1. An execution context created using :mod:`executor.contexts`.
              2. The pathname of a directory with backups (a string).
              3. A dictionary with the rotation scheme.
              4. A dictionary with additional options.
    :raises: :exc:`~exceptions.ValueError` when `configuration_file` is given
             but doesn't exist or can't be loaded.

    When `configuration_file` isn't given :data:`LOCAL_CONFIG_FILE` and
    :data:`GLOBAL_CONFIG_FILE` are checked and the first configuration file
    that exists is loaded. This function is used by :class:`RotateBackups` to
    discover user defined rotation schemes and by :mod:`rotate_backups.cli` to
    discover directories for which backup rotation is configured.
    """
    parser = configparser.RawConfigParser()
    if configuration_file:
        logger.verbose("Reading configuration file %s ..", format_path(configuration_file))
        loaded_files = parser.read(configuration_file)
        if len(loaded_files) == 0:
            msg = "Failed to read configuration file! (%s)"
            raise ValueError(msg % configuration_file)
    else:
        for config_file in LOCAL_CONFIG_FILE, GLOBAL_CONFIG_FILE:
            pathname = parse_path(config_file)
            if parser.read(pathname):
                logger.verbose("Reading configuration file %s ..", format_path(pathname))
                break
    for section in parser.sections():
        items = dict(parser.items(section))
        context_options = {}
        if coerce_boolean(items.get('use-sudo')):
            context_options['sudo'] = True
        if items.get('ssh-user'):
            context_options['ssh_user'] = items['ssh-user']
        location = coerce_location(section, **context_options)
        rotation_scheme = dict((name, coerce_retention_period(items[name]))
                               for name in SUPPORTED_FREQUENCIES
                               if name in items)
        options = dict(include_list=split(items.get('include-list', '')),
                       exclude_list=split(items.get('exclude-list', '')),
                       io_scheduling_class=items.get('ionice'),
                       strict=coerce_boolean(items.get('strict', 'yes')),
                       prefer_recent=coerce_boolean(items.get('prefer-recent', 'no')))
        yield location, rotation_scheme, options
Пример #13
0
    def read_file(self, filename):
        """
        Read a text file and provide feedback to the user.

        :param filename: The pathname of the file to read (a string).
        :returns: The contents of the file (a string).
        """
        logger.info("Reading file: %s", format_path(filename))
        contents = self.context.read_file(filename)
        num_lines = len(contents.splitlines())
        logger.debug("Read %i line%s from %s",
                     num_lines, '' if num_lines == 1 else 's',
                     format_path(filename))
        return contents.rstrip()
Пример #14
0
    def read_file(self, filename):
        """
        Read a text file and provide feedback to the user.

        :param filename: The pathname of the file to read (a string).
        :returns: The contents of the file (a string).
        """
        logger.info("Reading file: %s", format_path(filename))
        contents = self.context.read_file(filename)
        num_lines = len(contents.splitlines())
        logger.debug("Read %s from %s.",
                     pluralize(num_lines, 'line'),
                     format_path(filename))
        return contents.rstrip()
Пример #15
0
    def execute_file(self, filename):
        """
        Execute a file and provide feedback to the user.

        :param filename: The pathname of the file to execute (a string).
        :returns: Whatever the executed file returns on stdout (a string).
        """
        logger.info("Executing file: %s", format_path(filename))
        contents = self.context.execute(filename, capture=True).stdout
        num_lines = len(contents.splitlines())
        logger.debug("Execution of %s yielded % of output.",
                     format_path(filename),
                     pluralize(num_lines, 'line'))
        return contents.rstrip()
Пример #16
0
 def old_checksum(self):
     """Get the checksum stored in :attr:`checksum_file` (a string or :data:`None`)."""
     if self.context.is_file(self.checksum_file):
         logger.debug("Reading saved checksum from %s ..", format_path(self.checksum_file))
         checksum = self.context.read_file(self.checksum_file).decode('ascii')
         logger.debug("Saved checksum is %s.", checksum)
         return checksum
Пример #17
0
def generate_ssh_key_pair():
    """
    Generate an SSH key pair for communication between the host system and
    containers created with Redock. Requires the ``ssh-keygen`` program.
    """
    create_configuration_directory()
    logger.verbose("Checking if we need to generate a new SSH key pair ..")
    if os.path.isfile(PRIVATE_SSH_KEY):
        logger.verbose("SSH key pair was previously generated: %s", format_path(PRIVATE_SSH_KEY))
        return
    logger.info("No existing SSH key pair found, generating new key: %s", format_path(PRIVATE_SSH_KEY))
    command = ["ssh-keygen", "-t", "rsa", "-f", PRIVATE_SSH_KEY, "-N", "", "-C", "root@%s" % socket.gethostname()]
    ssh_keygen = subprocess.Popen(command)
    if ssh_keygen.wait() != 0:
        msg = "Failed to generate SSH key pair! (command exited with code %d: %s)"
        raise Exception, msg % (ssh_keygen.returncode, quote_command_line(command))
Пример #18
0
def check_package(archive, cache=None):
    """
    Perform static checks on a package's dependency set.

    :param archive: The pathname of an existing ``*.deb`` archive (a string).
    :param cache: The :py:class:`.PackageCache` to use (defaults to ``None``).
    :raises: :py:class:`BrokenPackage` when one or more checks failed.
    """
    timer = Timer()
    logger.info("Checking %s ..", format_path(archive))
    dependency_set = collect_related_packages(archive, cache=cache)
    failed_checks = []
    # Check for duplicate files in the dependency set.
    try:
        check_duplicate_files(dependency_set, cache=cache)
    except BrokenPackage as e:
        failed_checks.append(e)
    except ValueError:
        # Silenced.
        pass
    # Check for version conflicts in the dependency set.
    try:
        check_version_conflicts(dependency_set, cache=cache)
    except BrokenPackage as e:
        failed_checks.append(e)
    if len(failed_checks) == 1:
        raise failed_checks[0]
    elif failed_checks:
        raise BrokenPackage('\n\n'.join(map(str, failed_checks)))
    else:
        logger.info("Finished checking in %s, no problems found.", timer)
Пример #19
0
def create_control_file(control_file, control_fields):
    """
    Create a Debian control file.

    :param control_file: The filename of the control file to create (a string).
    :param control_fields: A dictionary with control file fields. This
                           dictionary is merged with the values in
                           :data:`DEFAULT_CONTROL_FIELDS`.
    :raises: See :func:`check_mandatory_fields()`.
    """
    logger.debug("Creating control file: %s", format_path(control_file))
    # Merge the defaults with the fields defined by the caller.
    merged_fields = merge_control_fields(DEFAULT_CONTROL_FIELDS,
                                         control_fields)
    # Sanity check for mandatory fields that are missing.
    check_mandatory_fields(merged_fields)
    # Make sure the parent directory of the control file exists.
    makedirs(os.path.dirname(control_file))
    # Remove the control file if it already exists in case it's a hard link to
    # an inode with multiple hard links that should _not_ be changed by us.
    if os.path.exists(control_file):
        os.unlink(control_file)
    # Write the control file.
    with open(control_file, 'wb') as handle:
        merged_fields.dump(handle)
Пример #20
0
def create_configuration_directory():
    """
    Make sure Redock's local configuration directory exists.
    """
    if not os.path.isdir(REDOCK_CONFIG_DIR):
        logger.info("Creating directory: %s", format_path(REDOCK_CONFIG_DIR))
        os.makedirs(REDOCK_CONFIG_DIR)
Пример #21
0
    def load(self, exists=True):
        """
        Load the runtime configuration from disk. If the file doesn't exist yet
        an empty configuration is returned. The configuration contains a
        version number which enables graceful upgrades to the format.

        :returns: A dictionary with runtime configuration data.
        """
        if exists:
            self.logger.verbose("Loading runtime configuration from %s ..", format_path(CONFIG_FILE))
        state = {}
        handle = self.handle
        close = False
        if (not handle) and os.path.isfile(CONFIG_FILE):
            handle = open(CONFIG_FILE)
            close = True
        if exists and handle:
            state = pickle.load(handle)
            if close:
                handle.close()
        version = state.get("version", 0)
        if version == 0:
            state["containers"] = dict()
            state["version"] = CONFIG_VERSION
        self.logger.debug("Initialized configuration: %r", state)
        return state
Пример #22
0
def create_control_file(control_file, control_fields):
    """
    Create a Debian control file.

    :param control_file: The filename of the control file to create (a string).
    :param control_fields: A dictionary with control file fields. This
                           dictionary is merged with the values in
                           :data:`DEFAULT_CONTROL_FIELDS`.
    :raises: :exc:`~exceptions.ValueError` when a mandatory binary control
             field is not present in the provided control fields (see also
             :data:`MANDATORY_BINARY_CONTROL_FIELDS`).
    """
    logger.debug("Creating control file: %s", format_path(control_file))
    # Merge the defaults with the fields defined by the caller.
    merged_fields = merge_control_fields(DEFAULT_CONTROL_FIELDS, control_fields)
    # Sanity check for mandatory fields that are missing.
    missing_fields = [f for f in MANDATORY_BINARY_CONTROL_FIELDS if f not in merged_fields]
    if missing_fields:
        raise ValueError("Missing %s! (%s)" % (pluralize(len(missing_fields), "mandatory binary package control field"),
                                               concatenate(sorted(missing_fields))))
    # Make sure the parent directory of the control file exists.
    makedirs(os.path.dirname(control_file))
    # Remove the control file if it already exists in case it's a hard link to
    # an inode with multiple hard links that should _not_ be changed by us.
    if os.path.exists(control_file):
        os.unlink(control_file)
    # Write the control file.
    with open(control_file, 'wb') as handle:
        merged_fields.dump(handle)
Пример #23
0
    def add_to_cache(self, modules_directory, file_in_cache):
        """
        Add a ``node_modules`` directory to the cache.

        :param modules_directory: The pathname of the ``node_modules`` directory (a string).
        :param file_in_cache: The pathname of the archive in the cache (a string).
        :raises: Any exceptions raised by the :mod:`executor.contexts` module.

        This method generates the tar archive under a temporary name inside the
        cache directory and then renames it into place atomically, in order to
        avoid race conditions where multiple concurrent npm-accel commands try
        to use partially generated cache entries.

        The temporary names are generated by appending a randomly generated
        integer number to the original filename (with a dash to delimit the
        original filename from the number).
        """
        timer = Timer()
        logger.info("Adding to cache (%s) ..", format_path(file_in_cache))
        self.context.execute('mkdir', '-p', os.path.dirname(file_in_cache))
        with self.context.atomic_write(file_in_cache) as temporary_file:
            self.context.execute('tar', '-cf', temporary_file, '-C',
                                 modules_directory, '.')
        self.write_metadata(file_in_cache)
        logger.verbose("Took %s to add directory to cache.", timer)
Пример #24
0
def create_control_file(control_file, control_fields):
    """
    Create a Debian control file.

    :param control_file: The filename of the control file to create (a string).
    :param control_fields: A dictionary with control file fields. This
                           dictionary is merged with the values in
                           :data:`DEFAULT_CONTROL_FIELDS`.
    :raises: :exc:`~exceptions.ValueError` when a mandatory binary control
             field is not present in the provided control fields (see also
             :data:`MANDATORY_BINARY_CONTROL_FIELDS`).
    """
    logger.debug("Creating control file: %s", format_path(control_file))
    # Merge the defaults with the fields defined by the caller.
    merged_fields = merge_control_fields(DEFAULT_CONTROL_FIELDS, control_fields)
    # Sanity check for mandatory fields that are missing.
    missing_fields = [f for f in MANDATORY_BINARY_CONTROL_FIELDS if f not in merged_fields]
    if missing_fields:
        raise ValueError(
            "Missing %s! (%s)"
            % (
                pluralize(len(missing_fields), "mandatory binary package control field"),
                concatenate(sorted(missing_fields)),
            )
        )
    # Make sure the parent directory of the control file exists.
    makedirs(os.path.dirname(control_file))
    # Remove the control file if it already exists in case it's a hard link to
    # an inode with multiple hard links that should _not_ be changed by us.
    if os.path.exists(control_file):
        os.unlink(control_file)
    # Write the control file.
    with open(control_file, "wb") as handle:
        merged_fields.dump(handle)
Пример #25
0
    def install(self, directory, silent=False):
        """
        Install Node.js package(s) listed in a ``package.json`` file.

        :param directory: The pathname of a directory with a ``package.json`` file (a string).
        :param silent: Used to set :attr:`~executor.ExternalCommand.silent`.
        :returns: The result of :func:`extract_dependencies()`.
        """
        timer = Timer()
        package_file = os.path.join(directory, 'package.json')
        modules_directory = os.path.join(directory, 'node_modules')
        dependencies = self.extract_dependencies(package_file)
        logger.info("Installing Node.js package(s) in %s ..",
                    format_path(directory))
        if dependencies:
            file_in_cache = self.get_cache_file(dependencies)
            logger.verbose("Checking the cache (%s) ..", file_in_cache)
            if self.read_from_cache and self.context.is_file(file_in_cache):
                self.install_from_cache(file_in_cache, modules_directory)
                logger.info(
                    "Done! Took %s to install %s from cache.", timer,
                    pluralize(len(dependencies), "dependency", "dependencies"))
            else:
                self.installer_method(directory, silent=silent)
                self.prune_dependencies(directory)
                if self.write_to_cache:
                    self.add_to_cache(modules_directory, file_in_cache)
                logger.info(
                    "Done! Took %s to install %s using npm.", timer,
                    pluralize(len(dependencies), "dependency", "dependencies"))
            self.clean_cache()
        else:
            logger.info("Nothing to do! (no dependencies to install)")
        return dependencies
Пример #26
0
def check_package(archive, cache=None):
    """
    Perform static checks on a package's dependency set.

    :param archive: The pathname of an existing ``*.deb`` archive (a string).
    :param cache: The :class:`.PackageCache` to use (defaults to :data:`None`).
    :raises: :exc:`BrokenPackage` when one or more checks failed.
    """
    timer = Timer()
    logger.info("Checking %s ..", format_path(archive))
    dependency_set = collect_related_packages(archive, cache=cache)
    failed_checks = []
    # Check for duplicate files in the dependency set.
    try:
        check_duplicate_files(dependency_set, cache=cache)
    except BrokenPackage as e:
        failed_checks.append(e)
    except ValueError:
        # Silenced.
        pass
    # Check for version conflicts in the dependency set.
    try:
        check_version_conflicts(dependency_set, cache=cache)
    except BrokenPackage as e:
        failed_checks.append(e)
    if len(failed_checks) == 1:
        raise failed_checks[0]
    elif failed_checks:
        raise BrokenPackage('\n\n'.join(map(str, failed_checks)))
    else:
        logger.info("Finished checking in %s, no problems found.", timer)
Пример #27
0
def deactivate_repository(directory):
    """
    Deactivate a trivial Debian package repository that was previously
    activated using :py:func:`activate_repository()`.

    :param directory: The pathname of a directory with ``*.deb`` packages.

    .. warning:: This function requires ``root`` privileges to:

                 1. delete a ``*.list`` file in ``/etc/apt/sources.list.d`` and
                 2. run ``apt-get update``.

                 This function will use ``sudo`` to gain ``root`` privileges
                 when it's not already running as ``root``.
    """
    directory = os.path.realpath(directory)
    logger.debug("Deactivating repository: %s", format_path(directory))
    # Remove the `sources.list' file.
    sources_file = os.path.join('/etc/apt/sources.list.d',
                                '%s.list' % sha1(directory))
    logger.debug("Removing file: %s", sources_file)
    execute('rm', '-f', sources_file, sudo=ALLOW_SUDO, logger=logger)
    # Update the package list (cleanup).
    logger.debug("Updating package list ..")
    execute("apt-get update", sudo=ALLOW_SUDO, logger=logger)
Пример #28
0
def collect_related_packages_helper(candidate_archives, given_archive, cache, interactive):
    """Internal helper for package collection to enable simple conflict resolution."""
    # Enable mutation of the candidate archives data structure inside the scope
    # of this function without mutating the original data structure.
    candidate_archives = copy.deepcopy(candidate_archives)
    # Prepare some internal state.
    archives_to_scan = [given_archive]
    collected_archives = []
    relationship_sets = set()
    # Render an interactive spinner as a simple means of feedback to the operator.
    with Spinner(label="Collecting related packages", interactive=interactive, timer=Timer()) as spinner:
        # Loop to collect the related packages.
        while archives_to_scan:
            selected_archive = archives_to_scan.pop(0)
            logger.debug("Scanning %s ..", format_path(selected_archive.filename))
            # Find the relationships of the given package.
            control_fields = inspect_package_fields(selected_archive.filename, cache)
            for field_name in DEPENDENCY_FIELDS:
                if field_name in control_fields:
                    relationship_sets.add(control_fields[field_name])
            # For each group of package archives sharing the same package name ..
            for package_name in sorted(candidate_archives):
                # For each version of the package ..
                for package_archive in list(candidate_archives[package_name]):
                    package_matches = match_relationships(package_archive, relationship_sets)
                    spinner.step()
                    if package_matches is True:
                        logger.debug("Package archive matched all relationships: %s", package_archive.filename)
                        # Move the selected version of the package archive from the
                        # candidates to the list of selected package archives.
                        collected_archives.append(package_archive)
                        # Prepare to scan and collect dependencies of the selected
                        # package archive in a future iteration of the outermost
                        # (while) loop.
                        archives_to_scan.append(package_archive)
                        # Ignore all other versions of the package inside this call
                        # to collect_related_packages_helper().
                        candidate_archives.pop(package_name)
                        # Break out of the loop to avoid scanning other versions of
                        # this package archive; we've made our choice now.
                        break
                    elif package_matches is False:
                        # If we're sure we can exclude this version of the package
                        # from future iterations it could be worth it to speed up
                        # the process on big repositories / dependency sets.
                        candidate_archives[package_name].remove(package_archive)
                        # Keep looking for a match in another version.
                    elif package_matches is None:
                        # Break out of the loop that scans multiple versions of the
                        # same package because none of the relationship sets collected
                        # so far reference the name of this package (this is intended
                        # as a harmless optimization).
                        break
    # Check for conflicts in the collected set of related package archives.
    conflicts = [a for a in collected_archives if not match_relationships(a, relationship_sets)]
    if conflicts:
        raise CollectedPackagesConflict(conflicts)
    else:
        return collected_archives
Пример #29
0
    def update_file(self, force=None):
        """
        Update the file with the contents of the files in the ``.d`` directory.

        :param force: Override the value of :attr:`force` (a boolean or
                      :data:`None`).
        :raises: :exc:`RefuseToOverwrite` when :attr:`force` is :data:`False`
                 and the contents of :attr:`filename` were modified.
        """
        if force is None:
            force = self.force
        if not self.context.is_directory(self.directory):
            # Create the .d directory.
            logger.info("Creating directory %s ..", format_path(self.directory))
            self.context.execute('mkdir', '-p', self.directory, tty=False)
            # Move the original file into the .d directory.
            local_file = os.path.join(self.directory, 'local')
            logger.info("Moving %s to %s ..", format_path(self.filename), format_path(local_file))
            self.context.execute('mv', self.filename, local_file, tty=False)
        # Read the modular configuration file(s).
        blocks = []
        for entry in natsort(self.context.list_entries(self.directory)):
            if not entry.startswith('.'):
                filename = os.path.join(self.directory, entry)
                if self.context.is_executable(filename):
                    blocks.append(self.execute_file(filename))
                else:
                    blocks.append(self.read_file(filename))
        contents = b"\n\n".join(blocks)
        # Make sure the generated file was not modified? We skip this on the
        # first run, when the original file was just moved into the newly
        # created directory (see above).
        if all(map(self.context.is_file, (self.filename, self.checksum_file))):
            logger.info("Checking for local changes to %s ..", format_path(self.filename))
            if self.new_checksum != self.old_checksum:
                if force:
                    logger.warning(compact(
                        """
                        The contents of the file to generate ({filename})
                        were modified but --force was used so overwriting
                        anyway!
                        """,
                        filename=format_path(self.filename),
                    ))
                else:
                    raise RefuseToOverwrite(compact(
                        """
                        The contents of the file to generate ({filename})
                        were modified and I'm refusing to overwrite it! If
                        you're sure you want to proceed, use the --force
                        option or delete the file {checksum_file} and
                        retry.
                        """,
                        filename=format_path(self.filename),
                        checksum_file=format_path(self.checksum_file),
                    ))
        # Update the generated configuration file.
        self.write_file(self.filename, contents)
        # Update the checksum file.
        self.context.write_file(self.checksum_file, self.new_checksum)
Пример #30
0
def parse_cmdline_arguments(
        args: typing.List[str]) -> typing.Optional[argparse.Namespace]:
    parser = argparse.ArgumentParser(
        description="Autonomous BitTorrent DHT crawler and metadata fetcher.",
        epilog=textwrap.dedent("""\
            Copyright (C) 2017  Mert Bora ALPER <*****@*****.**>
            Dedicated to Cemile Binay, in whose hands I thrived.

            This program is free software: you can redistribute it and/or modify it under
            the terms of the GNU Affero General Public License as published by the Free
            Software Foundation, either version 3 of the License, or (at your option) any
            later version.

            This program is distributed in the hope that it will be useful, but WITHOUT ANY
            WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
            PARTICULAR PURPOSE.  See the GNU Affero General Public License for more
            details.

            You should have received a copy of the GNU Affero General Public License along
            with this program.  If not, see <http://www.gnu.org/licenses/>.
        """),
        allow_abbrev=False,
        formatter_class=argparse.RawDescriptionHelpFormatter)

    parser.add_argument(
        "--node-addr",
        action="store",
        type=parse_ip_port,
        required=False,
        default="0.0.0.0:0",
        help="the address of the (DHT) node magneticod will use")

    parser.add_argument(
        "--max-metadata-size",
        type=parse_size,
        default=DEFAULT_MAX_METADATA_SIZE,
        help=
        "Limit metadata size to protect memory overflow. Provide in human friendly format eg. 1 M, 1 GB"
    )

    default_database_dir = os.path.join(appdirs.user_data_dir("magneticod"),
                                        "database.sqlite3")
    parser.add_argument("--database-file",
                        type=str,
                        default=default_database_dir,
                        help="Path to database file (default: {})".format(
                            humanfriendly.format_path(default_database_dir)))
    parser.add_argument(
        '-d',
        '--debug',
        action="store_const",
        dest="loglevel",
        const=logging.DEBUG,
        default=logging.INFO,
        help="Print debugging information in addition to normal processing.",
    )
    return parser.parse_args(args)
Пример #31
0
def smart_copy(src, dst):
    """
    Create a hard link to or copy of a file.

    :param src: The pathname of the source file (a string).
    :param dst: The pathname of the target file (a string).

    This function first tries to create a hard link `dst` pointing to `src` and
    if that fails it will perform a regular file copy from `src` to `dst`. This
    is used by :func:`collect_packages()` in an attempt to conserve disk space
    when copying package archives between repositories on the same filesystem.
    """
    try:
        os.link(src, dst)
    except Exception:
        logger.debug("Copying %s -> %s using regular file copy ..", format_path(src), format_path(dst))
        shutil.copy(src, dst)
    else:
        logger.debug("Copied %s -> %s using hard link ..", format_path(src), format_path(dst))
Пример #32
0
 def new_checksum(self):
     """Get the SHA1 digest of the contents of :attr:`filename` (a string)."""
     if self.context.is_file(self.filename):
         friendly_name = format_path(self.filename)
         logger.debug("Calculating SHA1 of %s ..", friendly_name)
         context = hashlib.sha1()
         context.update(self.context.read_file(self.filename))
         checksum = context.hexdigest()
         logger.debug("The SHA1 digest of %s is %s.", friendly_name, checksum)
         return checksum
Пример #33
0
def show_package_metadata(archive):
    control_fields, contents = inspect_package(archive)
    print("Package metadata from %s:" % format_path(archive))
    for field_name in sorted(control_fields.keys()):
        value = control_fields[field_name]
        if field_name == 'Installed-Size':
            value = format_size(int(value) * 1024)
        print(" - %s: %s" % (field_name, value))
    print("Package contents from %s:" % format_path(archive))
    for pathname, entry in sorted(contents.items()):
        size = format_size(entry.size, keep_width=True)
        if len(size) < 10:
            size = ' ' * (10 - len(size)) + size
        if entry.target:
            pathname += ' -> ' + entry.target
        print("{permissions} {owner} {group} {size} {modified} {pathname}".format(
            permissions=entry.permissions, owner=entry.owner,
            group=entry.group, size=size, modified=entry.modified,
            pathname=pathname))
Пример #34
0
 def __exit__(self, type, value, traceback):
     if type is None:
         self.logger.verbose("Saving configuration to %s ..", format_path(CONFIG_FILE))
         self.handle.seek(0)
         pickle.dump(self.state, self.handle)
         self.handle.truncate()
     else:
         self.logger.warn("Not saving configuration! (an exception was raised: %s)", value)
     fcntl.flock(self.handle.fileno(), fcntl.LOCK_UN)
     self.handle.close()
     self.handle = None
Пример #35
0
    def update_file(self, force=None):
        """
        Update the file with the contents of the files in the ``.d`` directory.

        :param force: Override the value of :attr:`force` (a boolean or
                      :data:`None`).
        :raises: :exc:`RefuseToOverwrite` when :attr:`force` is :data:`False`
                 and the contents of :attr:`filename` were modified.
        """
        if force is None:
            force = self.force
        if not self.context.is_directory(self.directory):
            # Create the .d directory.
            logger.info("Creating directory %s", format_path(self.directory))
            self.context.execute('mkdir', '-p', self.directory, tty=False)
            # Move the original file into the .d directory.
            local_file = os.path.join(self.directory, 'local')
            logger.info("Moving %s to %s", format_path(self.filename), format_path(local_file))
            self.context.execute('mv', self.filename, local_file, tty=False)
        # Read the modularized configuration file(s).
        blocks = []
        for filename in natsort(self.context.list_entries(self.directory)):
            if not filename.startswith('.'):
                blocks.append(self.read_file(os.path.join(self.directory, filename)))
        contents = b"\n\n".join(blocks)
        # Make sure the generated file was not modified? We skip this on the
        # first run, when the original file was just moved into the newly
        # created directory (see above).
        if all(map(self.context.is_file, (self.filename, self.checksum_file))):
            logger.info("Checking for local changes to %s ..", format_path(self.filename))
            if self.hash_contents() != self.context.read_file(self.checksum_file):
                if force:
                    logger.warning(compact(
                        """
                        The contents of the file to generate ({filename})
                        were modified but --force was used so overwriting
                        anyway!
                        """,
                        filename=format_path(self.filename),
                    ))
                else:
                    raise RefuseToOverwrite(compact(
                        """
                        The contents of the file to generate ({filename})
                        were modified and I'm refusing to overwrite it! If
                        you're sure you want to proceed, use the --force
                        option or delete the file {checksum_file} and
                        retry.
                        """,
                        filename=format_path(self.filename),
                        checksum_file=format_path(self.checksum_file),
                    ))
        # Update the generated configuration file.
        self.write_file(self.filename, contents)
        # Update the checksum file.
        self.context.write_file(self.checksum_file, self.hash_contents())
Пример #36
0
def create_full_backup(app, backup_type):
    """Create a full backup of a database defined in attr:´app´

    :param app: :class:`dict` with configuration returned by :func:`read_config()`
    :param backup_type: the key backup type to include in backup filename: daily, weekly, monthly, yearly
    :return: ``True`` if local and remote backup created correctly, ``False`` otherwise
    """
    log.info("starting full backup_{} to '{}'".format(backup_type, app['cfg']['app_name']))

    filestamp = time.strftime('%Y-%m-%d_%H-%M')
    # filestamp = '2016-03-28_09-17'
    backup_file = '{}{}_{}_{}.{}'.format(app['cfg']['local_backup_dir'], app['cfg']['app_name'], filestamp, backup_type,
                                         'gz')

    # here create backup


    try:
        os.makedirs(os.path.dirname(backup_file), exist_ok=True)
        timer = Timer()

        mysql_cmd = '/opt/lamp/mysql/bin/mysqldump '
        mysql_cmd += '--opt --triggers --events --user={} --password={} --databases {}'.format(
            app['custom']['db_user'], app['custom']['db_password'], app['custom']['db_name'])

        gzip_cmd = 'gzip -c > {}'.format(format_path(backup_file))

        mysql_cmd_res = execute(mysql_cmd, logger=log, capture=True)
        cmd_result = execute(gzip_cmd, logger=log, input=mysql_cmd_res, error_message='error en gzip')

        if cmd_result:
            log.info(
                "finish full backup_{} to '{}:{} in {}'".format(backup_type, app['cfg']['app_name'],
                                                                format_path(backup_file), timer))
            cmd_result = upload_backup(app, backup_file)
        else:
            log.error("error creating full backup_{} to '{}'".format(backup_type, app['cfg']['app_name']))
        return cmd_result
    except ExternalCommandFailed as ex:
        log.error(
            "error creating full backup_{} to '{} :{}'".format(backup_type, app['cfg']['app_name'], ex.error_message))
Пример #37
0
def activate_repository(directory, gpg_key=None):
    """
    Activate a local trivial repository.

    :param directory: The pathname of a directory with ``*.deb`` packages.
    :param gpg_key: The :class:`.GPGKey` object used to sign the repository.
                    Defaults to the result of :func:`select_gpg_key()`.

    This function sets everything up so that a trivial Debian package
    repository can be used to install packages without a webserver. This uses
    the ``file://`` URL scheme to point :man:`apt-get` to a directory on the
    local file system.

    .. warning:: This function requires ``root`` privileges to:

                 1. create the directory ``/etc/apt/sources.list.d``,
                 2. create a ``*.list`` file in ``/etc/apt/sources.list.d`` and
                 3. run ``apt-get update``.

                 This function will use :man:`sudo` to gain ``root`` privileges
                 when it's not already running as ``root``.

    .. seealso:: :data:`ALLOW_SUDO`
    """
    directory = os.path.realpath(directory)
    logger.debug("Activating repository: %s", format_path(directory))
    # Generate the `sources.list' file.
    sources_directory = '/etc/apt/sources.list.d'
    execute('mkdir', '-p', sources_directory, sudo=ALLOW_SUDO, logger=logger)
    sources_file = os.path.join(sources_directory, '%s.list' % sha1(directory))
    logger.debug("Generating file: %s", sources_file)
    sources_entry = ['deb']
    if apt_supports_trusted_option():
        sources_entry.append('[trusted=yes]')
    sources_entry.append('file://%s' % directory)
    sources_entry.append('./')
    command = "echo {text} > {file}"
    execute(command.format(text=pipes.quote(' '.join(sources_entry)),
                           file=pipes.quote(sources_file)),
            sudo=ALLOW_SUDO,
            logger=logger)
    # Make apt-get accept the repository signing key?
    gpg_key = gpg_key or select_gpg_key(directory)
    if gpg_key:
        logger.info("Installing GPG key for automatic signing ..")
        initialize_gnupg()
        command = '{gpg} --armor --export | apt-key add -'
        execute(command.format(gpg=gpg_key.gpg_command),
                sudo=ALLOW_SUDO,
                logger=logger)
    # Update the package list (make sure it works).
    logger.debug("Updating package list ..")
    execute("apt-get update", sudo=ALLOW_SUDO, logger=logger)
Пример #38
0
def smart_copy(src, dst):
    """
    Create a hard link to or copy of a file.

    :param src: The pathname of the source file (a string).
    :param dst: The pathname of the target file (a string).

    This function first tries to create a hard link `dst` pointing to `src` and
    if that fails it will perform a regular file copy from `src` to `dst`. This
    is used by :func:`collect_packages()` in an attempt to conserve disk space
    when copying package archives between repositories on the same filesystem.
    """
    try:
        os.link(src, dst)
    except Exception:
        logger.debug("Copying %s -> %s using regular file copy ..",
                     format_path(src), format_path(dst))
        shutil.copy(src, dst)
    else:
        logger.debug("Copied %s -> %s using hard link ..", format_path(src),
                     format_path(dst))
Пример #39
0
def show_package_metadata(archive):
    control_fields, contents = inspect_package(archive)
    print("Package metadata from %s:" % format_path(archive))
    for field_name in sorted(control_fields.keys()):
        value = control_fields[field_name]
        if field_name == 'Installed-Size':
            value = format_size(int(value) * 1024)
        print(" - %s: %s" % (field_name, value))
    print("Package contents from %s:" % format_path(archive))
    for pathname, entry in sorted(contents.items()):
        size = format_size(entry.size, keep_width=True)
        if len(size) < 10:
            size = ' ' * (10 - len(size)) + size
        if entry.target:
            pathname += ' -> ' + entry.target
        print("{permissions} {owner} {group} {size} {modified} {pathname}".
              format(permissions=entry.permissions,
                     owner=entry.owner,
                     group=entry.group,
                     size=size,
                     modified=entry.modified,
                     pathname=pathname))
Пример #40
0
 def parser(self):
     """A :class:`configparser.RawConfigParser` object with :attr:`available_files` loaded."""
     parser = configparser.RawConfigParser()
     for filename in self.available_files:
         friendly_name = format_path(filename)
         logger.debug("Loading configuration file: %s", friendly_name)
         loaded_files = parser.read(filename)
         if len(loaded_files) == 0:
             self.report_issue("Failed to load configuration file! (%s)", friendly_name)
     logger.debug("Loaded %s from %s.",
                  pluralize(len(parser.sections()), "section"),
                  pluralize(len(self.available_files), "configuration file"))
     return parser
Пример #41
0
 def entries(self):
     """A list of :class:`PasswordEntry` objects."""
     timer = Timer()
     passwords = []
     logger.info("Scanning %s ..", format_path(self.directory))
     listing = self.context.capture("find", "-type", "f", "-name", "*.gpg", "-print0")
     for filename in split(listing, "\0"):
         basename, extension = os.path.splitext(filename)
         if extension == ".gpg":
             # We use os.path.normpath() to remove the leading `./' prefixes
             # that `find' adds because it searches the working directory.
             passwords.append(PasswordEntry(name=os.path.normpath(basename), store=self))
     logger.verbose("Found %s in %s.", pluralize(len(passwords), "password"), timer)
     return natsort(passwords, key=lambda e: e.name)
Пример #42
0
def check_version_conflicts(dependency_set, cache=None):
    """
    Check for version conflicts in a dependency set.

    For each Debian binary package archive given, check if a newer version of
    the same package exists in the same repository (directory). This analysis
    can be very slow. To make it faster you can use the
    :py:class:`.PackageCache`.

    :param dependency_set: A list of filenames (strings) of ``*.deb`` files.
    :param cache: The :py:class:`.PackageCache` to use (defaults to ``None``).
    :raises: :py:class:`VersionConflictFound` when one or more version
             conflicts are found.
    """
    timer = Timer()
    summary = []
    dependency_set = list(map(parse_filename, dependency_set))
    spinner = Spinner(total=len(dependency_set))
    logger.info("Checking for version conflicts in %i package(s) ..",
                len(dependency_set))
    for i, archive in enumerate(dependency_set, start=1):
        if archive.newer_versions:
            summary.append(
                compact("""
                    Dependency set includes {dependency} but newer version(s)
                    of that package also exist and will take precedence:
            """,
                        dependency=format_path(archive.filename)))
            summary.append("\n".join(" - %s" % format_path(a.filename)
                                     for a in sorted(archive.newer_versions)))
        spinner.step(label="Checking for version conflicts", progress=i)
    spinner.clear()
    if summary:
        summary.insert(0, "One or more version conflicts found:")
        raise VersionConflictFound('\n\n'.join(summary))
    else:
        logger.info("No version conflicts found (took %s).", timer)
Пример #43
0
def show_package_metadata(archive):
    """
    Show the metadata and contents of a Debian archive on the terminal.

    :param archive: The pathname of an existing ``*.deb`` archive (a string).
    """
    control_fields, contents = inspect_package(archive)
    say(highlight("Package metadata from %s:"), format_path(archive))
    for field_name in sorted(control_fields.keys()):
        value = control_fields[field_name]
        if field_name == 'Installed-Size':
            value = format_size(int(value) * 1024)
        say(" - %s %s", highlight(field_name + ":"), value)
    say(highlight("Package contents from %s:"), format_path(archive))
    for pathname, entry in sorted(contents.items()):
        size = format_size(entry.size, keep_width=True)
        if len(size) < 10:
            size = ' ' * (10 - len(size)) + size
        if entry.target:
            pathname += ' -> ' + entry.target
        say("{permissions} {owner} {group} {size} {modified} {pathname}",
            permissions=entry.permissions, owner=entry.owner,
            group=entry.group, size=size, modified=entry.modified,
            pathname=pathname)
Пример #44
0
def dwim(profile=DEFAULT_PROFILE):
    """Evaluate the user's profile script."""
    logger.info("Initializing dwim %s ..", __version__)
    filename = os.path.expanduser(profile)
    environment = dict(
        __file__=filename,
        __name__='dwimrc',
        determine_network_location=determine_network_location,
        launch_program=launch_program,
        LaunchStatus=LaunchStatus,
        set_random_background=set_random_background,
        wait_for_internet_connection=wait_for_internet_connection,
    )
    logger.info("Loading %s ..", format_path(filename))
    execfile(filename, environment, environment)
Пример #45
0
def activate_repository(directory, gpg_key=None):
    """
    Activate a local trivial repository.

    :param directory: The pathname of a directory with ``*.deb`` packages.
    :param gpg_key: The :class:`.GPGKey` object used to sign the repository.
                    Defaults to the result of :func:`select_gpg_key()`.

    This function sets everything up so that a trivial Debian package
    repository can be used to install packages without a webserver. This uses
    the ``file://`` URL scheme to point ``apt-get`` to a directory on the local
    file system.

    .. warning:: This function requires ``root`` privileges to:

                 1. create the directory ``/etc/apt/sources.list.d``,
                 2. create a ``*.list`` file in ``/etc/apt/sources.list.d`` and
                 3. run ``apt-get update``.

                 This function will use ``sudo`` to gain ``root`` privileges
                 when it's not already running as ``root``.
    """
    directory = os.path.realpath(directory)
    logger.debug("Activating repository: %s", format_path(directory))
    # Generate the `sources.list' file.
    sources_directory = '/etc/apt/sources.list.d'
    execute('mkdir', '-p', sources_directory, sudo=ALLOW_SUDO, logger=logger)
    sources_file = os.path.join(sources_directory, '%s.list' % sha1(directory))
    logger.debug("Generating file: %s", sources_file)
    sources_entry = ['deb']
    if apt_supports_trusted_option():
        sources_entry.append('[trusted=yes]')
    sources_entry.append('file://%s' % directory)
    sources_entry.append('./')
    command = "echo {text} > {file}"
    execute(command.format(text=pipes.quote(' '.join(sources_entry)),
                           file=pipes.quote(sources_file)),
            sudo=ALLOW_SUDO, logger=logger)
    # Make apt-get accept the repository signing key?
    gpg_key = gpg_key or select_gpg_key(directory)
    if gpg_key:
        logger.info("Installing GPG key for automatic signing ..")
        initialize_gnupg()
        command = '{gpg} --armor --export | apt-key add -'
        execute(command.format(gpg=gpg_key.gpg_command), sudo=ALLOW_SUDO, logger=logger)
    # Update the package list (make sure it works).
    logger.debug("Updating package list ..")
    execute("apt-get update", sudo=ALLOW_SUDO, logger=logger)
Пример #46
0
def load_config(repository):
    repository = os.path.abspath(repository)
    for config_dir in (config.user_config_directory, config.system_config_directory):
        config_file = os.path.join(config_dir, config.repo_config_file)
        if os.path.isfile(config_file):
            logger.debug("Loading configuration from %s ..", format_path(config_file))
            parser = configparser.RawConfigParser()
            parser.read(config_file)
            sections = dict((n, dict(parser.items(n))) for n in parser.sections())
            defaults = sections.get('default', {})
            logger.debug("Found %i sections: %s", len(sections), concatenate(parser.sections()))
            for name, options in sections.items():
                directory = options.get('directory')
                if directory and fnmatch.fnmatch(repository, directory):
                    defaults.update(options)
                    return defaults
    return {}
def testfile(filename, verbose=False):
    logger.info("Checking %s", format_path(filename))
    printer = CustomPrettyPrinter()
    filename = os.path.abspath(filename)
    cwd_save = os.getcwd()
    os.chdir(SAMPLES_DIRECTORY)
    results = doctest.testfile(filename=filename,
                               module_relative=False,
                               globs=dict(repr=printer.pformat),
                               optionflags=doctest.NORMALIZE_WHITESPACE,
                               verbose=verbose)
    if results.attempted > 0:
        if results.failed == 0:
            logger.info("Evaluated %i doctests, all passed!", results.attempted)
        else:
            logger.error("Evaluated %i doctests, %i failed!", results.attempted, results.failed)
    os.chdir(cwd_save)
    return results.failed
Пример #48
0
    def install_from_cache(self, file_in_cache, modules_directory):
        """
        Populate a ``node_modules`` directory by unpacking an archive from the cache.

        :param file_in_cache: The pathname of the archive in the cache (a string).
        :param modules_directory: The pathname of the ``node_modules`` directory (a string).
        :raises: Any exceptions raised by the :mod:`executor.contexts` module.

        If the directory already exists it will be removed and recreated in
        order to remove any existing contents before the archive is unpacked.
        """
        timer = Timer()
        logger.info("Installing from cache (%s)..", format_path(file_in_cache))
        self.clear_directory(modules_directory)
        logger.verbose("Unpacking archive (%s) ..", file_in_cache)
        self.context.execute('tar', '-xf', file_in_cache, '-C',
                             modules_directory)
        self.write_metadata(file_in_cache)
        logger.verbose("Took %s to install from cache.", timer)
Пример #49
0
 def get_email_body(self, uid):
     """Get the body of an email from the local cache or the server."""
     local_copy = os.path.join(self.archive.data_directory, "gtalk",
                               self.account_name, "%i.eml" % uid)
     formatted_path = format_path(local_copy)
     if os.path.isfile(local_copy):
         logger.verbose("Reading email with UID %s from %s ..", uid,
                        formatted_path)
         with open(local_copy, encoding="ascii") as handle:
             return EmailMessageParser(raw_body=handle.read(), uid=uid)
     else:
         logger.verbose("Downloading email with UID %s to ..", uid,
                        formatted_path)
         response = self.client.uid("fetch", str(uid), "(RFC822)")
         data = self.check_response(
             response, "Failed to download conversation with UID %s!", uid)
         raw_body = data[0][1].decode("ascii")
         with open(local_copy, "w") as handle:
             handle.write(raw_body)
         return EmailMessageParser(raw_body=raw_body, uid=uid)
def interpret_script(shell_script):
    """Make it appear as if commands are typed into the terminal."""
    with CaptureOutput() as capturer:
        shell = subprocess.Popen(['bash', '-'], stdin=subprocess.PIPE)
        with open(shell_script) as handle:
            for line in handle:
                sys.stdout.write(ansi_wrap('$', color='green') + ' ' + line)
                sys.stdout.flush()
                shell.stdin.write(line)
                shell.stdin.flush()
            shell.stdin.close()
        time.sleep(12)
        # Get the text that was shown in the terminal.
        captured_output = capturer.get_text()
    # Store the text that was shown in the terminal.
    filename, extension = os.path.splitext(shell_script)
    transcript_file = '%s.txt' % filename
    logger.info("Updating %s ..", format_path(transcript_file))
    with open(transcript_file, 'w') as handle:
        handle.write(ansi_strip(captured_output))
Пример #51
0
def patch_control_file(control_file, overrides):
    """
    Patch the fields of a Debian control file.

    :param control_file: The filename of the control file to patch (a string).
    :param overrides: A dictionary with fields that should override default
                      name/value pairs. Values of the fields `Depends`,
                      `Provides`, `Replaces` and `Conflicts` are merged
                      while values of other fields are overwritten.
    """
    logger.debug("Patching control file: %s", format_path(control_file))
    # Read the control file.
    with open(control_file) as handle:
        defaults = Deb822(handle)
    # Apply the patches.
    patched = merge_control_fields(defaults, overrides)
    # Break the hard link chain.
    os.unlink(control_file)
    # Patch the control file.
    with open(control_file, 'wb') as handle:
        patched.dump(handle)
Пример #52
0
def load_config(repository):
    repository = os.path.abspath(repository)
    for config_dir in (config.user_config_directory,
                       config.system_config_directory):
        config_file = os.path.join(config_dir, config.repo_config_file)
        if os.path.isfile(config_file):
            logger.debug("Loading configuration from %s ..",
                         format_path(config_file))
            parser = configparser.RawConfigParser()
            parser.read(config_file)
            sections = dict(
                (n, dict(parser.items(n))) for n in parser.sections())
            defaults = sections.get('default', {})
            logger.debug("Found %i sections: %s", len(sections),
                         concatenate(parser.sections()))
            for name, options in sections.items():
                directory = options.get('directory')
                if directory and fnmatch.fnmatch(repository, directory):
                    defaults.update(options)
                    return defaults
    return {}
Пример #53
0
def patch_control_file(control_file, overrides):
    """
    Patch the fields of a Debian control file.

    :param control_file: The filename of the control file to patch (a string).
    :param overrides: A dictionary with fields that should override default
                      name/value pairs. Values of the fields `Depends`,
                      `Provides`, `Replaces` and `Conflicts` are merged
                      while values of other fields are overwritten.
    """
    logger.debug("Patching control file: %s", format_path(control_file))
    # Read the control file.
    with open(control_file) as handle:
        defaults = Deb822(handle)
    # Apply the patches.
    patched = merge_control_fields(defaults, overrides)
    # Break the hard link chain.
    os.unlink(control_file)
    # Patch the control file.
    with open(control_file, "wb") as handle:
        patched.dump(handle)
Пример #54
0
def collect_packages(archives, directory, prompt=True, cache=None):
    # Find all related packages.
    related_archives = set()
    for filename in archives:
        related_archives.add(parse_filename(filename))
        related_archives.update(collect_related_packages(filename,
                                                         cache=cache))
    # Ignore package archives that are already in the target directory.
    relevant_archives = set()
    for archive in related_archives:
        basename = os.path.basename(archive.filename)
        if not os.path.isfile(os.path.join(directory, basename)):
            relevant_archives.add(archive)
    # Interactively move the package archives.
    if relevant_archives:
        relevant_archives = sorted(relevant_archives)
        pluralized = pluralize(len(relevant_archives), "package archive",
                               "package archives")
        print("Found %s:" % pluralized)
        for file_to_collect in relevant_archives:
            print(" - %s" % format_path(file_to_collect.filename))
        try:
            if prompt:
                # Ask permission to copy the file(s).
                prompt = "Copy %s to %s? [Y/n] " % (pluralized,
                                                    format_path(directory))
                assert raw_input(prompt).lower() in ('', 'y', 'yes')
            # Copy the file(s).
            for file_to_collect in relevant_archives:
                copy_from = file_to_collect.filename
                copy_to = os.path.join(directory, os.path.basename(copy_from))
                logger.debug("Copying %s -> %s ..", format_path(copy_from),
                             format_path(copy_to))
                shutil.copy(copy_from, copy_to)
            logger.info("Done! Copied %s to %s.", pluralized,
                        format_path(directory))
        except (AssertionError, KeyboardInterrupt, EOFError) as e:
            if isinstance(e, KeyboardInterrupt):
                # Control-C interrupts the prompt without emitting a newline. We'll
                # print one manually so the console output doesn't look funny.
                sys.stderr.write('\n')
            logger.warning("Not copying archive(s) to %s! (aborted by user)",
                           format_path(directory))
            if isinstance(e, KeyboardInterrupt):
                # Maybe we shouldn't actually swallow Control-C, it can make
                # for a very unfriendly user experience... :-)
                raise
Пример #55
0
def set_random_background(command, directory):
    """
    Set a random desktop wallpaper / background.

    :param command: The command to set the wallpaper (a string containing an
                    ``{image}`` marker).
    :param directory: The pathname of a directory containing wallpapers (a
                      string).
    :raises: :exc:`~exceptions.ValueError` when the `command` string doesn't
             contain an ``{image}`` placeholder.
    """
    if '{image}' not in command:
        raise ValueError("The 1st argument should contain an {image} marker!")
    backgrounds = []
    logger.verbose("Searching for desktop backgrounds in %s ..", directory)
    for root, dirs, files in os.walk(directory):
        for filename in files:
            if filename.lower().endswith(('.jpg', '.jpeg', '.png')):
                backgrounds.append(os.path.join(root, filename))
    logger.verbose("Found %s.", pluralize(len(backgrounds), "desktop background"))
    selected_background = random.choice(backgrounds)
    logger.info("Selected random background: %s", format_path(selected_background))
    execute(command.format(image=quote(selected_background)))
Пример #56
0
def deactivate_repository(directory):
    """
    Deactivate a local repository that was previously activated using :func:`activate_repository()`.

    :param directory: The pathname of a directory with ``*.deb`` packages.

    .. warning:: This function requires ``root`` privileges to:

                 1. delete a ``*.list`` file in ``/etc/apt/sources.list.d`` and
                 2. run ``apt-get update``.

                 This function will use ``sudo`` to gain ``root`` privileges
                 when it's not already running as ``root``.
    """
    directory = os.path.realpath(directory)
    logger.debug("Deactivating repository: %s", format_path(directory))
    # Remove the `sources.list' file.
    sources_file = os.path.join('/etc/apt/sources.list.d', '%s.list' % sha1(directory))
    logger.debug("Removing file: %s", sources_file)
    execute('rm', '-f', sources_file, sudo=ALLOW_SUDO, logger=logger)
    # Update the package list (cleanup).
    logger.debug("Updating package list ..")
    execute("apt-get update", sudo=ALLOW_SUDO, logger=logger)
Пример #57
0
def create_control_file(control_file, control_fields):
    """
    Create a Debian control file.

    :param control_file: The filename of the control file to create (a string).
    :param control_fields: A dictionary with control file fields. This
                           dictionary is merged with the values in
                           :data:`DEFAULT_CONTROL_FIELDS`.
    :raises: See :func:`check_mandatory_fields()`.
    """
    logger.debug("Creating control file: %s", format_path(control_file))
    # Merge the defaults with the fields defined by the caller.
    merged_fields = merge_control_fields(DEFAULT_CONTROL_FIELDS, control_fields)
    # Sanity check for mandatory fields that are missing.
    check_mandatory_fields(merged_fields)
    # Make sure the parent directory of the control file exists.
    makedirs(os.path.dirname(control_file))
    # Remove the control file if it already exists in case it's a hard link to
    # an inode with multiple hard links that should _not_ be changed by us.
    if os.path.exists(control_file):
        os.unlink(control_file)
    # Write the control file.
    with open(control_file, 'wb') as handle:
        merged_fields.dump(handle)