Example #1
0
class IndeterminateStandardOut(ProgressViewBase):
    """ custom output for progress reporting """
    def __init__(self, out=None, spinner=None):
        super(IndeterminateStandardOut, self).__init__(
            out if out else sys.stderr)
        self.spinner = spinner

    def write(self, args):
        """
        writes the progress
        :param args: dictionary containing key 'message'
        """
        if self.spinner is None:
            self.spinner = Spinner(  # pylint: disable=no-member
                label='In Progress', stream=self.out, hide_cursor=False)
        msg = args.get('message', 'In Progress')
        try:
            self.spinner.step(label=msg)
        except OSError:
            pass

    def clear(self):
        self.spinner.clear()
        self.out.flush()

    def flush(self):
        self.out.flush()
Example #2
0
 def write(self, args):
     """
     writes the progress
     :param args: dictionary containing key 'message'
     """
     if self.spinner is None:
         self.spinner = Spinner(  # pylint: disable=no-member
             label='In Progress', stream=self.out, hide_cursor=False)
     msg = args.get('message', 'In Progress')
     try:
         self.spinner.step(label=msg)
     except OSError:
         pass
Example #3
0
 def __enter__(self):
     """Atomically lock the given pathname."""
     spinner = Spinner()
     timer = Timer()
     while True:
         if makedirs(self.lock_directory):
             return
         elif self.wait:
             spinner.step(label="Waiting for lock on %s: %s .." %
                          (self.pathname, timer))
             time.sleep(0.1)
         else:
             msg = "Failed to lock %s for exclusive access!"
             raise ResourceLockedException(msg % self.pathname)
Example #4
0
    def get_hashes(self, **options):
        """
        Get the hashes of the blocks in a file.

        :param options: See :attr:`get_url()`.
        :returns: A generator of tokens with two values each:

                  1. A byte offset into the file (an integer).
                  2. The hash of the block starting at that offset (a string).
        """
        results = {}
        options.update(filename=self.filename)
        if self.hostname:
            logger.info("Requesting hashes from server ..")
            request_url = self.get_url("hashes", **options)
            logger.debug("Requesting %s ..", request_url)
            response = requests.get(request_url, stream=True)
            response.raise_for_status()
            for line in response.iter_lines(decode_unicode=True):
                offset, _, digest = line.partition("\t")
                results[int(offset)] = digest
        else:
            progress = 0
            block_size = options["block_size"]
            total = os.path.getsize(options["filename"])
            with Spinner(label="Computing hashes", total=total) as spinner:
                for offset, digest in compute_hashes(**options):
                    results[offset] = digest
                    progress += block_size
                    spinner.step(progress)
        return results
Example #5
0
    def wait_for_process(self, timeout=0, use_spinner=None):
        """
        Wait until the process ends or the timeout expires.

        :param timeout: The number of seconds to wait for the process to
                        terminate after we've asked it nicely (defaults
                        to zero which means we wait indefinitely).
        :param use_spinner: Whether or not to display an interactive spinner
                            on the terminal (using :class:`~humanfriendly.Spinner`)
                            to explain to the user what they are waiting for:

                            - :data:`True` enables the spinner,
                            - :data:`False` disables the spinner,
                            - :data:`None` (the default) means the spinner is
                              enabled when the program is connected to an
                              interactive terminal, otherwise it's disabled.
        :returns: A :class:`~humanfriendly.Timer` object telling you how long
                  it took to wait for the process.
        """
        with Timer(resumable=True) as timer:
            with Spinner(interactive=use_spinner, timer=timer) as spinner:
                while self.is_running:
                    if timeout and timer.elapsed_time >= timeout:
                        break
                    spinner.step(label="Waiting for process %i to terminate" %
                                 self.pid)
                    spinner.sleep()
            return timer
Example #6
0
def wait_for_processes(processes):
    """
    Wait for the given processes to end.

    Prints an overview of running processes to the terminal once a second so
    the user knows what they are waiting for.

    This function is not specific to :mod:`proc.cron` at all (it doesn't
    even need to know what cron jobs are), it just waits until all of the given
    processes have ended.

    :param processes: A list of :class:`~proc.tree.ProcessNode` objects.
    """
    wait_timer = Timer()
    running_processes = list(processes)
    for process in running_processes:
        logger.info("Waiting for process %i: %s (runtime is %s)", process.pid,
                    quote(process.cmdline),
                    format_timespan(round(process.runtime)))
    with Spinner(timer=wait_timer) as spinner:
        while True:
            for process in list(running_processes):
                if not process.is_alive:
                    running_processes.remove(process)
            if not running_processes:
                break
            num_processes = pluralize(len(running_processes), "process",
                                      "processes")
            process_ids = concatenate(str(p.pid) for p in running_processes)
            spinner.step(label="Waiting for %s: %s" %
                         (num_processes, process_ids))
            spinner.sleep()
    logger.info("All processes have finished, we're done waiting (took %s).",
                wait_timer.rounded)
Example #7
0
 def __init__(self, cli_ctx, message="Running"):
     self.cli_ctx = cli_ctx
     self.message = message
     self.hook = self.cli_ctx.get_progress_controller(
         det=False,
         spinner=Spinner(  # pylint: disable=no-member
             label='Running',
             stream=sys.stderr,
             hide_cursor=False))
Example #8
0
def run_command(command_line):
    """Run an external command and show a spinner while the command is running."""
    timer = Timer()
    spinner_label = "Waiting for command: %s" % " ".join(
        map(pipes.quote, command_line))
    with Spinner(label=spinner_label, timer=timer) as spinner:
        process = subprocess.Popen(command_line)
        while True:
            spinner.step()
            spinner.sleep()
            if process.poll() is not None:
                break
    sys.exit(process.returncode)
Example #9
0
    def transfer_changes(self, offsets):
        """
        Helper for :func:`synchronize()` to transfer the differences.

        :param offsets: A list of integers with the byte offsets of the blocks
                        to copy from :attr:`source` to :attr:`target`.
        """
        timer = Timer()
        transfer_size = self.compute_transfer_size(offsets)
        formatted_size = format_size(transfer_size, binary=True)
        action = "download" if self.source.hostname else "upload"
        logger.info("Will %s %s totaling %s.", action, pluralize(len(offsets), "block"), formatted_size)
        if self.dry_run:
            return
        # Make sure the target file has the right size.
        if not (self.target.exists and self.source.file_size == self.target.file_size):
            self.target.resize(self.source.file_size)
        # Transfer changed blocks in parallel.
        pool = WorkerPool(
            concurrency=self.concurrency,
            generator_fn=functools.partial(iter, offsets),
            worker_fn=functools.partial(
                transfer_block_fn, block_size=self.block_size, source=self.source, target=self.target
            ),
        )
        spinner = Spinner(label="%sing changed blocks" % action.capitalize(), total=len(offsets))
        with pool, spinner:
            for i, result in enumerate(pool, start=1):
                spinner.step(progress=i)
        logger.info(
            "%sed %i blocks (%s) in %s (%s/s).",
            action.capitalize(),
            len(offsets),
            formatted_size,
            timer,
            format_size(transfer_size / timer.elapsed_time, binary=True),
        )
Example #10
0
    def wait_until_connected(self):
        """
        Wait until connections are being accepted.

        :raises: :exc:`TimeoutError` when the SSH server isn't fast enough to
                 initialize.
        """
        timer = Timer()
        with Spinner(timer=timer) as spinner:
            while not self.is_connected:
                if timer.elapsed_time > self.wait_timeout:
                    raise TimeoutError(format(
                        "Failed to establish connection to %s within configured timeout of %s!",
                        self.endpoint, format_timespan(self.wait_timeout),
                    ))
                spinner.step(label="Waiting for %s to accept connections" % self.endpoint)
                spinner.sleep()
        logger.debug("Waited %s for %s to accept connections.", timer, self.endpoint)
Example #11
0
def start_gpg_agent(timeout=LAUNCH_TIMEOUT):
    """
    Start a new gpg-agent daemon in the background.

    :param timeout: The timeout for the newly launched GPG agent daemon to
                    start (a number, defaults to :data:`LAUNCH_TIMEOUT`).
    :returns: The return value of :func:`find_gpg_agent_info()`.
    """
    timer = Timer()
    logger.info("Starting a new GPG agent daemon ..")
    execute('gpg-agent', '--daemon', asynchronous=True, silent=True)
    with Spinner(timer=timer) as spinner:
        while timer.elapsed_time < LAUNCH_TIMEOUT:
            gpg_agent_info = find_gpg_agent_info()
            if gpg_agent_info:
                logger.debug("Waited %s for GPG agent daemon to start.", timer)
                return gpg_agent_info
            spinner.step(label="Waiting for GPG agent daemon")
            spinner.sleep()
    logger.warning("Failed to locate spawned GPG agent! (waited for %s)", timer)
Example #12
0
 def __RunThreads(self):
     # Run timer thread
     thread = Thread(target=self.__RunTimer)
     thread.start()
     # Check if 1 thread
     if self.name == "EMAIL":
         self.threads_count = 1
     # Create flood threads
     for _ in range(self.threads_count):
         thread = Thread(target=self.__RunFlood)
         self.threads.append(thread)
     # Start flood threads
     with Spinner(
             label=f"Starting {self.threads_count} threads",
             total=100,
     ) as spinner:
         for index, thread in enumerate(self.threads):
             thread.start()
             spinner.step(100 / len(self.threads) * (index + 1))
     # Wait flood threads for stop
     for index, thread in enumerate(self.threads):
         thread.join()
         print(f"Stopped thread {index + 1}")
Example #13
0
def check_version_conflicts(dependency_set, cache=None):
    """
    Check for version conflicts in a dependency set.

    :param dependency_set: A list of filenames (strings) of ``*.deb`` files.
    :param cache: The :class:`.PackageCache` to use (defaults to :data:`None`).
    :raises: :exc:`VersionConflictFound` when one or more version
             conflicts are found.

    For each Debian binary package archive given, check if a newer version of
    the same package exists in the same repository (directory). This analysis
    can be very slow. To make it faster you can use the
    :class:`.PackageCache`.
    """
    timer = Timer()
    summary = []
    dependency_set = list(map(parse_filename, dependency_set))
    spinner = Spinner(total=len(dependency_set))
    logger.info("Checking for version conflicts in %i package(s) ..",
                len(dependency_set))
    for i, archive in enumerate(dependency_set, start=1):
        if archive.newer_versions:
            summary.append(
                compact("""
                    Dependency set includes {dependency} but newer version(s)
                    of that package also exist and will take precedence:
            """,
                        dependency=format_path(archive.filename)))
            summary.append("\n".join(" - %s" % format_path(a.filename)
                                     for a in sorted(archive.newer_versions)))
        spinner.step(label="Checking for version conflicts", progress=i)
    spinner.clear()
    if summary:
        summary.insert(0, "One or more version conflicts found:")
        raise VersionConflictFound('\n\n'.join(summary))
    else:
        logger.info("No version conflicts found (took %s).", timer)
Example #14
0
def scan_packages(repository, packages_file=None, cache=None):
    """
    A reimplementation of the ``dpkg-scanpackages -m`` command in Python.

    Updates a ``Packages`` file based on the Debian package archive(s) found in
    the given directory. Uses :class:`.PackageCache` to (optionally) speed
    up the process significantly by caching package metadata and hashes on
    disk. This explains why this function can be much faster than the
    :man:`dpkg-scanpackages` program.

    :param repository: The pathname of a directory containing Debian
                       package archives (a string).
    :param packages_file: The pathname of the ``Packages`` file to update
                          (a string). Defaults to the ``Packages`` file in
                          the given directory.
    :param cache: The :class:`.PackageCache` to use (defaults to :data:`None`).
    """
    # By default the `Packages' file inside the repository is updated.
    if not packages_file:
        packages_file = os.path.join(repository, 'Packages')
    # Update the `Packages' file.
    timer = Timer()
    package_archives = glob.glob(os.path.join(repository, '*.deb'))
    num_packages = len(package_archives)
    spinner = Spinner(total=num_packages)
    with open(packages_file, 'wb') as handle:
        for i, archive in enumerate(optimize_order(package_archives), start=1):
            fields = dict(inspect_package_fields(archive, cache=cache))
            fields.update(get_packages_entry(archive, cache=cache))
            deb822_dict = unparse_control_fields(fields)
            deb822_dict.dump(handle)
            handle.write(b'\n')
            spinner.step(label="Scanning package metadata", progress=i)
    spinner.clear()
    logger.debug("Wrote %i entries to output Packages file in %s.",
                 num_packages, timer)
 def get_spinner(self, timer):
     """Get a :class:`.Spinner` to be used by :func:`run()`."""
     if isinstance(self.spinner, Spinner):
         return self.spinner
     else:
         return Spinner(interactive=self.spinner, timer=timer)
Example #16
0
def check_duplicate_files(dependency_set, cache=None):
    """
    Check a collection of Debian package archives for conflicts.

    :param dependency_set: A list of filenames (strings) of ``*.deb`` files.
    :param cache: The :class:`.PackageCache` to use (defaults to :data:`None`).
    :raises: :exc:`exceptions.ValueError` when less than two package
             archives are given (the duplicate check obviously only works if
             there are packages to compare :-).
    :raises: :exc:`DuplicateFilesFound` when duplicate files are found
             within a group of package archives.

    This check looks for duplicate files in package archives that concern
    different packages. Ignores groups of packages that have their 'Provides'
    and 'Replaces' fields set to a common value. Other variants of 'Conflicts'
    are not supported yet.

    Because this analysis involves both the package control file fields and the
    pathnames of files installed by packages it can be really slow. To make it
    faster you can use the :class:`.PackageCache`.
    """
    timer = Timer()
    dependency_set = list(map(parse_filename, dependency_set))
    # Make sure we have something useful to work with.
    num_archives = len(dependency_set)
    if num_archives < 2:
        msg = "To check for duplicate files you need to provide two or more packages archives! (%i given)"
        raise ValueError(msg % num_archives)
    # Build up a global map of all files contained in the given package archives.
    global_contents = collections.defaultdict(set)
    global_fields = {}
    spinner = Spinner(total=num_archives)
    logger.info("Checking for duplicate files in %i package archives ..",
                num_archives)
    for i, archive in enumerate(optimize_order(dependency_set), start=1):
        spinner.step(label="Scanning %i package archives" % num_archives,
                     progress=i)
        fields, contents = inspect_package(archive.filename, cache=cache)
        global_fields[archive.filename] = fields
        for pathname, stat in contents.items():
            if not stat.permissions.startswith('d'):
                global_contents[pathname].add(archive)
    spinner.clear()
    # Count the number of duplicate files between sets of conflicting packages
    # for more user friendly reporting.
    duplicate_files = collections.defaultdict(
        lambda: dict(count=0, filenames=[]))
    for pathname, packages in global_contents.items():
        if len(packages) > 1:
            # Override the sort key to be the filename because we don't need
            # to properly sort by version (which is slow on large collections).
            key = tuple(sorted(packages, key=lambda p: p.filename))
            duplicate_files[key]['count'] += 1
            duplicate_files[key]['filenames'].append(pathname)
    for packages, information in sorted(duplicate_files.items()):
        # Never report multiple versions of the same package.
        if len(set(package.name for package in packages)) == 1:
            duplicate_files.pop(packages)
            continue

        # We check for one common case where it's easy to guarantee that
        # we're not dealing with broken packages: All of the packages have
        # marked each other as conflicting via the combination of the
        # fields `Provides:' and `Conflicts:'.
        def find_virtual_name(field_name):
            package_names = set()
            for archive in packages:
                field = global_fields[archive.filename].get(field_name)
                if field:
                    package_names |= field.names
                else:
                    return
            if len(package_names) == 1:
                return list(package_names)[0]

        marked_conflicts = find_virtual_name('Conflicts')
        marked_provides = find_virtual_name('Provides')
        if marked_conflicts and marked_conflicts == marked_provides:
            duplicate_files.pop(packages)
    # Boring string formatting, trying to find a way to clearly present conflicts.
    summary = []
    for packages, information in sorted(duplicate_files.items()):
        block = []
        conflicts = pluralize(information['count'], 'conflict', 'conflicts')
        block.append("Found %s between %i packages:\n" %
                     (conflicts, len(packages)))
        for i, package in enumerate(sorted(packages), start=1):
            block.append("  %i. %s\n" % (i, package.filename))
        block.append("These packages contain %s:\n" % conflicts)
        for i, filename in enumerate(sorted(information['filenames']),
                                     start=1):
            block.append("  %i. %s\n" % (i, filename))
        summary.append(''.join(block))
    if summary:
        archives_involved = set(
            itertools.chain.from_iterable(duplicate_files.keys()))
        files = pluralize(len(duplicate_files), 'duplicate file',
                          'duplicate files')
        archives = pluralize(len(archives_involved), 'package archive',
                             'package archives')
        summary.insert(0, "Found %s in %s!\n" % (files, archives))
        summary.append(
            compact("""
            Hint: If the package contents are correct you can resolve these
            conflicts by marking the packages as conflicting. You do this by
            adding the 'Conflicts' and 'Provides' fields and setting them to a
            common value. That should silence this message.
        """))
        delimiter = '%s\n' % ('-' * 79)
        raise DuplicateFilesFound(delimiter.join(summary))
    else:
        logger.info("No conflicting files found (took %s).", timer)