コード例 #1
0
ファイル: __init__.py プロジェクト: ProjetoDSL/PerfMoon
 def kill_workers(self, max_memory_active=0, max_memory_idle=0, timeout=0, dry_run=False):
     killed = set()
     num_checked = 0
     for worker in self.killable_workers:
         if worker.pid not in killed:
             kill_worker = False
             memory_usage_threshold = max_memory_active if worker.is_active else max_memory_idle
             if memory_usage_threshold and worker.memory_usage > memory_usage_threshold:
                 logger.info("Killing %s using %s (%s) ..",
                             worker, format_size(worker.memory_usage),
                             worker.request or 'last request unknown')
                 kill_worker = True
             elif timeout and worker.is_active and getattr(worker, 'ss', 0) > timeout:
                 logger.info("Killing %s hanging for %s since last request (%s) ..",
                             worker, format_timespan(worker.ss),
                             worker.request or 'unknown')
                 kill_worker = True
             if kill_worker:
                 if not dry_run:
                     worker.process.kill()
                 killed.add(worker.pid)
                 if worker.is_active:
                     self.num_killed_active += 1
                 else:
                     self.num_killed_idle += 1
         num_checked += 1
     if killed:
         logger.info("Killed %i of %s.", len(killed), pluralize(num_checked, "Apache worker"))
     else:
         logger.info("No Apache workers killed (found %s within resource usage limits).",
                     pluralize(num_checked, "worker"))
     return list(killed)
コード例 #2
0
def format_timespan(num_seconds):
    """
    Taken from the humanfriendly library and changed the time units to
    their abbreviations.
    """

    if num_seconds < 60:
        # Fast path.
        rounded_number = round_number(num_seconds, False)
        return pluralize(rounded_number, 's', 's')
    else:
        # Slow path.
        result = []
        for unit in reversed(time_units):
            if num_seconds >= unit['divider']:
                count = int(num_seconds / unit['divider'])
                num_seconds %= unit['divider']
                result.append(
                    pluralize(count, unit['singular'], unit['plural']))
        if len(result) == 1:
            # A single count/unit combination.
            return result[0]
        else:
            # Remove insignificant data from the formatted timespan and format
            # it in a readable way.
            return concatenate(result[:2])
コード例 #3
0
    def rotate_concurrent(self, *locations, **kw):
        """
        Rotate the backups in the given locations concurrently.

        :param locations: One or more values accepted by :func:`coerce_location()`.
        :param kw: Any keyword arguments are passed on to :func:`rotate_backups()`.

        This function uses :func:`rotate_backups()` to prepare rotation
        commands for the given locations and then it removes backups in
        parallel, one backup per mount point at a time.

        The idea behind this approach is that parallel rotation is most useful
        when the files to be removed are on different disks and so multiple
        devices can be utilized at the same time.

        Because mount points are per system :func:`rotate_concurrent()` will
        also parallelize over backups located on multiple remote systems.
        """
        timer = Timer()
        pool = CommandPool(concurrency=10)
        logger.info("Scanning %s ..", pluralize(len(locations), "backup location"))
        for location in locations:
            for cmd in self.rotate_backups(location, prepare=True, **kw):
                pool.add(cmd)
        if pool.num_commands > 0:
            backups = pluralize(pool.num_commands, "backup")
            logger.info("Preparing to rotate %s (in parallel) ..", backups)
            pool.run()
            logger.info("Successfully rotated %s in %s.", backups, timer)
コード例 #4
0
 async def download_all_conversations(self, conversation_list):
     """Download conversations from Google Hangouts."""
     timer = Timer()
     for conversation in conversation_list.get_all(include_archived=True):
         try:
             await self.download_conversation(conversation)
         except Exception:
             logger.warning("Skipping conversation due to synchronization error ..", exc_info=True)
             self.stats.failed_conversations += 1
         self.stats.show()
     summary = []
     if self.stats.conversations_added > 0:
         summary.append(pluralize(self.stats.conversations_added, "conversation"))
     if self.stats.messages_added > 0:
         summary.append(pluralize(self.stats.messages_added, "message"))
     if summary:
         logger.info("Added %s in %s.", concatenate(summary), timer)
     else:
         logger.info("No new conversations or messages found (took %s to check).", timer)
     if self.stats.failed_conversations > 0:
         logger.warning(
             "Skipped %s due to synchronization %s!",
             pluralize(self.stats.failed_conversations, "conversation"),
             "errors" if self.stats.failed_conversations > 1 else "error",
         )
     if self.stats.skipped_conversations > 0:
         logger.notice(
             "Skipped %s due to previous synchronization %s! (use --force to retry %s)",
             pluralize(self.stats.skipped_conversations, "conversation"),
             "errors" if self.stats.skipped_conversations > 1 else "error",
             "them" if self.stats.skipped_conversations > 1 else "it",
         )
コード例 #5
0
    def rotate_concurrent(self, *locations, **kw):
        """
        Rotate the backups in the given locations concurrently.

        :param locations: One or more values accepted by :func:`coerce_location()`.
        :param kw: Any keyword arguments are passed on to :func:`rotate_backups()`.

        This function uses :func:`rotate_backups()` to prepare rotation
        commands for the given locations and then it removes backups in
        parallel, one backup per mount point at a time.

        The idea behind this approach is that parallel rotation is most useful
        when the files to be removed are on different disks and so multiple
        devices can be utilized at the same time.

        Because mount points are per system :func:`rotate_concurrent()` will
        also parallelize over backups located on multiple remote systems.
        """
        timer = Timer()
        pool = CommandPool(concurrency=10)
        logger.info("Scanning %s ..",
                    pluralize(len(locations), "backup location"))
        for location in locations:
            for cmd in self.rotate_backups(location, prepare=True, **kw):
                pool.add(cmd)
        if pool.num_commands > 0:
            backups = pluralize(pool.num_commands, "backup")
            logger.info("Preparing to rotate %s (in parallel) ..", backups)
            pool.run()
            logger.info("Successfully rotated %s in %s.", backups, timer)
コード例 #6
0
 def synchronize(self):
     """Download RFC822 encoded Google Talk conversations using IMAP and import the embedded chat messages."""
     self.login_to_server()
     self.select_chats_folder()
     # Check for emails to download and/or import.
     to_download = self.find_uids_to_download()
     to_import = self.find_uids_to_import()
     to_process = to_download | to_import
     to_import |= to_download
     if to_process:
         summary = []
         if to_download:
             summary.append("downloading %s" %
                            pluralize(len(to_download), "email"))
         if to_import:
             summary.append("importing %s" %
                            pluralize(len(to_import), "email"))
         logger.info("%s ..", ", ".join(summary).capitalize())
         for i, uid in enumerate(sorted(to_process), start=1):
             logger.info("Processing email with UID %s (%.2f%%) ..", uid,
                         i / (len(to_process) / 100.0))
             email = self.get_email_body(uid)
             if email.parsed_body:
                 with self.stats:
                     if email.parsed_body.is_multipart():
                         self.parse_multipart_email(email)
                     else:
                         self.parse_singlepart_email(email)
             else:
                 logger.verbose(
                     "Skipping conversation %s with empty mail body.", uid)
             self.archive.commit_changes()
     else:
         logger.info("Nothing to do! (no new messages)")
     self.client.logout()
コード例 #7
0
def format_timespan(num_seconds):
    """
    Taken from the humanfriendly library and changed the time units to
    their abbreviations.
    """

    if num_seconds < 60:
        # Fast path.
        rounded_number = round_number(num_seconds, False)
        return pluralize(rounded_number, 's', 's')
    else:
        # Slow path.
        result = []
        for unit in reversed(time_units):
            if num_seconds >= unit['divider']:
                count = int(num_seconds / unit['divider'])
                num_seconds %= unit['divider']
                result.append(pluralize(count, unit['singular'], unit['plural']))
        if len(result) == 1:
            # A single count/unit combination.
            return result[0]
        else:
            # Remove insignificant data from the formatted timespan and format
            # it in a readable way.
            return concatenate(result[:2])
コード例 #8
0
 def find_uids_to_download(self):
     """Determine the UIDs of the email messages to be downloaded."""
     timer = Timer()
     # Load the UID values of the Google Talk conversations in the local database.
     logger.verbose(
         "Discovering conversations available in local archive ..")
     conversation_uids = (self.session.query(
         Conversation.external_id).filter(
             Conversation.account == self.account).filter(
                 Conversation.external_id != None))
     message_uids = (self.session.query(Message.external_id).join(
         Message.conversation).filter(
             Conversation.account == self.account).filter(
                 Message.external_id != None))
     logger.debug("Query: %s", conversation_uids.union(message_uids))
     local_uids = set(
         int(row[0]) for row in conversation_uids.union(message_uids))
     # Discover the UID values of the conversations available remotely.
     logger.verbose("Discovering conversations available on server ..")
     response = self.client.uid("search", None, "ALL")
     data = self.check_response(response,
                                "Search for available messages failed!")
     remote_uids = set(map(int, data[0].split()))
     # Discover the UID values of the conversations that we're missing.
     missing_uids = remote_uids - local_uids
     logger.verbose(
         "Found %s, %s and %s (took %s).",
         pluralize(len(local_uids), "local conversation"),
         pluralize(len(remote_uids), "remote conversation"),
         pluralize(len(missing_uids), "conversation to download",
                   "conversations to download"),
         timer,
     )
     return missing_uids
コード例 #9
0
    def run(self):
        """
        Keep spawning commands and collecting results until all commands have run.

        :returns: The value of :attr:`results`.
        :raises: Any exceptions raised by :func:`collect()`.

        This method calls :func:`spawn()` and :func:`collect()` in a loop until
        all commands registered using :func:`add()` have run and finished. If
        :func:`collect()` raises an exception any running commands are
        terminated before the exception is propagated to the caller.

        If you're writing code where you want to own the main loop then
        consider calling :func:`spawn()` and :func:`collect()` directly instead
        of using :func:`run()`.

        When :attr:`concurrency` is set to one, specific care is taken to make
        sure that the callbacks configured by :attr:`.start_event` and
        :attr:`.finish_event` are called in the expected (intuitive) order.
        """
        # Start spawning processes to execute the commands.
        timer = Timer()
        logger.debug("Preparing to run %s with a concurrency of %i ..",
                     pluralize(self.num_commands, "command"), self.concurrency)
        try:
            with Spinner(interactive=self.spinner, timer=timer) as spinner:
                num_started = 0
                num_collected = 0
                while not self.is_finished:
                    # When concurrency is set to one (I know, initially it
                    # sounds like a silly use case, bear with me) I want the
                    # start_event and finish_event callbacks of external
                    # commands to fire in the right order. The following
                    # conditional is intended to accomplish this goal.
                    if self.concurrency > (num_started - num_collected):
                        num_started += self.spawn()
                    num_collected += self.collect()
                    spinner.step(label=format(
                        "Waiting for %i/%i %s",
                        self.num_commands - self.num_finished,
                        self.num_commands,
                        "command" if self.num_commands == 1 else "commands",
                    ))
                    spinner.sleep()
        except Exception:
            if self.num_running > 0:
                logger.warning(
                    "Command pool raised exception, terminating running commands!"
                )
            # Terminate commands that are still running.
            self.terminate()
            # Re-raise the exception to the caller.
            raise
        # Collect the output and return code of any commands not yet collected.
        self.collect()
        logger.debug("Finished running %s in %s.",
                     pluralize(self.num_commands, "command"), timer)
        # Report the results to the caller.
        return self.results
コード例 #10
0
ファイル: concurrent.py プロジェクト: xolox/python-executor
    def run(self):
        """
        Keep spawning commands and collecting results until all commands have run.

        :returns: The value of :attr:`results`.
        :raises: Any exceptions raised by :func:`collect()`.

        This method calls :func:`spawn()` and :func:`collect()` in a loop until
        all commands registered using :func:`add()` have run and finished. If
        :func:`collect()` raises an exception any running commands are
        terminated before the exception is propagated to the caller.

        If you're writing code where you want to own the main loop then
        consider calling :func:`spawn()` and :func:`collect()` directly instead
        of using :func:`run()`.

        When :attr:`concurrency` is set to one, specific care is taken to make
        sure that the callbacks configured by :attr:`.start_event` and
        :attr:`.finish_event` are called in the expected (intuitive) order.
        """
        # Start spawning processes to execute the commands.
        timer = Timer()
        logger.debug("Preparing to run %s with a concurrency of %i ..",
                     pluralize(self.num_commands, "command"),
                     self.concurrency)
        try:
            with Spinner(interactive=self.spinner, timer=timer) as spinner:
                num_started = 0
                num_collected = 0
                while not self.is_finished:
                    # When concurrency is set to one (I know, initially it
                    # sounds like a silly use case, bear with me) I want the
                    # start_event and finish_event callbacks of external
                    # commands to fire in the right order. The following
                    # conditional is intended to accomplish this goal.
                    if self.concurrency > (num_started - num_collected):
                        num_started += self.spawn()
                    num_collected += self.collect()
                    spinner.step(label=format(
                        "Waiting for %i/%i %s",
                        self.num_commands - self.num_finished, self.num_commands,
                        "command" if self.num_commands == 1 else "commands",
                    ))
                    spinner.sleep()
        except Exception:
            if self.num_running > 0:
                logger.warning("Command pool raised exception, terminating running commands!")
            # Terminate commands that are still running.
            self.terminate()
            # Re-raise the exception to the caller.
            raise
        # Collect the output and return code of any commands not yet collected.
        self.collect()
        logger.debug("Finished running %s in %s.",
                     pluralize(self.num_commands, "command"),
                     timer)
        # Report the results to the caller.
        return self.results
コード例 #11
0
    def ranked_mirrors(self):
        """
        A list of :class:`CandidateMirror` objects (ordered from best to worst).

        The value of this property is computed by concurrently testing the
        mirrors in :attr:`available_mirrors` for the following details:

        - availability (:attr:`~CandidateMirror.is_available`)
        - connection speed (:attr:`~CandidateMirror.bandwidth`)
        - update status (:attr:`~CandidateMirror.is_updating`)

        The number of mirrors to test is limited to :attr:`max_mirrors` and you
        can change the number of simultaneous HTTP connections allowed by
        setting :attr:`concurrency`.
        """
        timer = Timer()
        # Sort the candidates based on the currently available information
        # (and transform the input argument into a list in the process).
        mirrors = sorted(self.available_mirrors,
                         key=lambda c: c.sort_key,
                         reverse=True)
        # Limit the number of candidates to a reasonable number?
        if self.max_mirrors and len(mirrors) > self.max_mirrors:
            mirrors = mirrors[:self.max_mirrors]
        # Prepare the Release.gpg URLs to fetch.
        mapping = dict((c.release_gpg_url, c) for c in mirrors)
        num_mirrors = pluralize(len(mapping), "mirror")
        logger.info("Checking %s for availability and performance ..",
                    num_mirrors)
        # Concurrently fetch the Release.gpg files.
        with AutomaticSpinner(label="Checking mirrors"):
            for url, data, elapsed_time in fetch_concurrent(
                    mapping.keys(), concurrency=self.concurrency):
                candidate = mapping[url]
                candidate.release_gpg_contents = data
                candidate.release_gpg_latency = elapsed_time
        # Concurrently check for Archive-Update-in-Progress markers.
        update_mapping = dict((c.archive_update_in_progress_url, c)
                              for c in mirrors if c.is_available)
        logger.info("Checking %s for Archive-Update-in-Progress marker ..",
                    pluralize(len(update_mapping), "mirror"))
        with AutomaticSpinner(label="Checking mirrors"):
            for url, data, elapsed_time in fetch_concurrent(
                    update_mapping.keys(), concurrency=self.concurrency):
                update_mapping[url].is_updating = data is not None
        # Sanity check our results.
        mirrors = list(mapping.values())
        logger.info("Finished checking %s (took %s).", num_mirrors, timer)
        if not any(c.is_available for c in mirrors):
            raise Exception("It looks like all %s are unavailable!" %
                            num_mirrors)
        if all(c.is_updating for c in mirrors):
            logger.warning("It looks like all %s are being updated?!",
                           num_mirrors)
        return sorted(mirrors, key=lambda c: c.sort_key, reverse=True)
コード例 #12
0
ファイル: __init__.py プロジェクト: jzoldak/pip-accel
    def install_dependencies(self, requirement):
        """
        If :py:mod:`pip_accel` fails to build a binary distribution, it will
        call this method as a last chance to install missing dependencies. If
        this function does not raise an exception, :py:mod:`pip_accel` will
        retry the build once.

        :param requirement: A :py:class:`.Requirement` object.
        :returns: ``True`` when missing system packages were installed,
                  ``False`` otherwise.
        :raises: :py:exc:`.DependencyInstallationRefused` when automatic
                 installation is disabled or refused by the operator.
        :raises: :py:exc:`.DependencyInstallationFailed` when the installation
                 of missing system packages fails.
        """
        install_timer = Timer()
        missing_dependencies = self.find_missing_dependencies(requirement)
        if missing_dependencies:
            # Compose the command line for the install command.
            install_command = shlex.split(self.install_command) + missing_dependencies
            if os.getuid() != 0:
                # Prepend `sudo' to the command line.
                install_command.insert(0, 'sudo')
            # Always suggest the installation command to the operator.
            logger.info("You seem to be missing %s: %s",
                        pluralize(len(missing_dependencies), "dependency", "dependencies"),
                        concatenate(missing_dependencies))
            logger.info("You can install %s with this command: %s",
                        "it" if len(missing_dependencies) == 1 else "them", " ".join(install_command))
            if self.config.auto_install is False:
                # Refuse automatic installation and don't prompt the operator when the configuration says no.
                self.installation_refused(requirement, missing_dependencies, "automatic installation is disabled")
            # Get the operator's permission to install the missing package(s).
            if self.config.auto_install or self.confirm_installation(requirement, missing_dependencies, install_command):
                logger.info("Got permission to install %s.",
                            pluralize(len(missing_dependencies), "dependency", "dependencies"))
            else:
                logger.error("Refused installation of missing %s!",
                             "dependency" if len(missing_dependencies) == 1 else "dependencies")
                self.installation_refused(requirement, missing_dependencies, "manual installation was refused")
            if subprocess.call(install_command) == 0:
                logger.info("Successfully installed %s in %s.",
                            pluralize(len(missing_dependencies), "dependency", "dependencies"),
                            install_timer)
                return True
            else:
                logger.error("Failed to install %s.",
                             pluralize(len(missing_dependencies), "dependency", "dependencies"))
                msg = "Failed to install %s required by Python package %s! (%s)"
                raise DependencyInstallationFailed(msg % (pluralize(len(missing_dependencies), "system package", "system packages"),
                                                          requirement.name, concatenate(missing_dependencies)))
        return False
コード例 #13
0
 def parser(self):
     """A :class:`configparser.RawConfigParser` object with :attr:`available_files` loaded."""
     parser = configparser.RawConfigParser()
     for filename in self.available_files:
         friendly_name = format_path(filename)
         logger.debug("Loading configuration file: %s", friendly_name)
         loaded_files = parser.read(filename)
         if len(loaded_files) == 0:
             self.report_issue("Failed to load configuration file! (%s)", friendly_name)
     logger.debug("Loaded %s from %s.",
                  pluralize(len(parser.sections()), "section"),
                  pluralize(len(self.available_files), "configuration file"))
     return parser
コード例 #14
0
 def show(self):
     """Show statistics about imported conversations, messages, contacts, etc."""
     additions = []
     if self.conversations_added > 0:
         additions.append(pluralize(self.conversations_added, "conversation"))
     if self.messages_added > 0:
         additions.append(pluralize(self.messages_added, "message"))
     if self.contacts_added > 0:
         additions.append(pluralize(self.contacts_added, "contact"))
     if self.email_addresses_added > 0:
         additions.append(pluralize(self.contacts_added, "email address", "email addresses"))
     if self.telephone_numbers_added > 0:
         additions.append(pluralize(self.telephone_numbers_added, "telephone number"))
     if additions:
         logger.info("Imported %s.", concatenate(additions))
コード例 #15
0
ファイル: __init__.py プロジェクト: fjgauthier/pip-accel
    def unpack_source_dists(self, arguments, use_wheels=False):
        """
        Check whether there are local source distributions available for all
        requirements, unpack the source distribution archives and find the
        names and versions of the requirements. By using the ``pip install
        --download`` command we avoid reimplementing the following pip
        features:

        - Parsing of ``requirements.txt`` (including recursive parsing)
        - Resolution of possibly conflicting pinned requirements
        - Unpacking source distributions in multiple formats
        - Finding the name & version of a given source distribution

        :param arguments: The command line arguments to ``pip install ...`` (a
                          list of strings).
        :param use_wheels: Whether pip and pip-accel are allowed to use wheels_
                           (``False`` by default for backwards compatibility
                           with callers that use pip-accel as a Python API).
        :returns: A list of :py:class:`pip_accel.req.Requirement` objects.
        :raises: Any exceptions raised by pip, for example
                 :py:exc:`pip.exceptions.DistributionNotFound` when not all
                 requirements can be satisfied.
        """
        unpack_timer = Timer()
        logger.info("Unpacking distribution(s) ..")
        with PatchedAttribute(pip_install_module, 'PackageFinder', CustomPackageFinder):
            requirements = self.get_pip_requirement_set(arguments, use_remote_index=False, use_wheels=use_wheels)
            logger.info("Finished unpacking %s in %s.", pluralize(len(requirements), "distribution"), unpack_timer)
            return requirements
コード例 #16
0
def discover_mirrors():
    """
    Discover available Ubuntu mirrors.

    :returns: A set of :class:`.CandidateMirror` objects that have their
              :attr:`~.CandidateMirror.mirror_url` property set and may have
              the :attr:`~.CandidateMirror.last_updated` property set.
    :raises: If no mirrors are discovered an exception is raised.

    This only queries :data:`MIRROR_SELECTION_URL` to
    discover available Ubuntu mirrors. Here's an example run:
    >>> from apt_smart.backends.ubuntu import discover_mirrors
    >>> from pprint import pprint
    >>> pprint(discover_mirrors())

    """
    timer = Timer()
    mirrors = set()
    mirrors = discover_mirror_selection()
    if not mirrors:
        logger.warning("Failed to discover any Ubuntu mirrors! (using %s)" %
                       MIRROR_SELECTION_URL)
        logger.info("Trying to use %s as fallback" % MIRRORS_URL)
        mirrors = discover_mirrors_old()
    elif len(mirrors) < 2:
        logger.warning("Too few mirrors, trying to use %s to find more" %
                       MIRRORS_URL)
        mirrors |= discover_mirrors_old(
        )  # add mirrors from discover_mirrors_old()
    logger.info("Discovered %s in %s.", pluralize(len(mirrors),
                                                  "Ubuntu mirror"), timer)
    return mirrors
コード例 #17
0
ファイル: __init__.py プロジェクト: yhat/pip-accel
    def unpack_source_dists(self, arguments):
        """
        Check whether there are local source distributions available for all
        requirements, unpack the source distribution archives and find the
        names and versions of the requirements. By using the ``pip install
        --no-install`` command we avoid reimplementing the following pip
        features:

        - Parsing of ``requirements.txt`` (including recursive parsing)
        - Resolution of possibly conflicting pinned requirements
        - Unpacking source distributions in multiple formats
        - Finding the name & version of a given source distribution

        :param arguments: The command line arguments to ``pip install ...`` (a
                          list of strings).
        :returns: A list of :py:class:`pip_accel.req.Requirement` objects.
        :raises: Any exceptions raised by pip, for example
                 :py:exc:`pip.exceptions.DistributionNotFound` when not all
                 requirements can be satisfied.
        """
        unpack_timer = Timer()
        logger.info("Unpacking source distribution(s) ..")
        # Install our custom package finder to force --no-index behavior.
        original_package_finder = pip_index_module.PackageFinder
        pip_install_module.PackageFinder = CustomPackageFinder
        try:
            requirements = self.get_pip_requirement_set(arguments, use_remote_index=False)
            logger.info("Finished unpacking %s in %s.",
                        pluralize(len(requirements), "source distribution"),
                        unpack_timer)
            return requirements
        finally:
            # Make sure to remove our custom package finder.
            pip_install_module.PackageFinder = original_package_finder
コード例 #18
0
def wait_for_processes(processes):
    """
    Wait for the given processes to end.

    Prints an overview of running processes to the terminal once a second so
    the user knows what they are waiting for.

    This function is not specific to :mod:`proc.cron` at all (it doesn't
    even need to know what cron jobs are), it just waits until all of the given
    processes have ended.

    :param processes: A list of :class:`~proc.tree.ProcessNode` objects.
    """
    wait_timer = Timer()
    running_processes = list(processes)
    for process in running_processes:
        logger.info("Waiting for process %i: %s (runtime is %s)", process.pid,
                    quote(process.cmdline),
                    format_timespan(round(process.runtime)))
    with Spinner(timer=wait_timer) as spinner:
        while True:
            for process in list(running_processes):
                if not process.is_alive:
                    running_processes.remove(process)
            if not running_processes:
                break
            num_processes = pluralize(len(running_processes), "process",
                                      "processes")
            process_ids = concatenate(str(p.pid) for p in running_processes)
            spinner.step(label="Waiting for %s: %s" %
                         (num_processes, process_ids))
            spinner.sleep()
    logger.info("All processes have finished, we're done waiting (took %s).",
                wait_timer.rounded)
コード例 #19
0
    def __init__(self, config):
        """
        Initialize a cache manager.

        Automatically initializes instances of all registered cache backends
        based on setuptools' support for entry points which makes it possible
        for external Python packages to register additional cache backends
        without any modifications to pip-accel.

        :param config: The pip-accel configuration (a :py:class:`.Config`
                       object).
        """
        self.config = config
        for entry_point in get_entry_map('pip-accel',
                                         'pip_accel.cache_backends').values():
            logger.debug("Importing cache backend: %s",
                         entry_point.module_name)
            __import__(entry_point.module_name)
        # Initialize instances of all registered cache backends (sorted by
        # priority so that e.g. the local file system is checked before S3).
        self.backends = sorted(
            (b(self.config)
             for b in registered_backends if b != AbstractCacheBackend),
            key=lambda b: b.PRIORITY)
        logger.debug("Initialized %s: %s",
                     pluralize(len(self.backends), "cache backend"),
                     concatenate(map(repr, self.backends)))
コード例 #20
0
def create_control_file(control_file, control_fields):
    """
    Create a Debian control file.

    :param control_file: The filename of the control file to create (a string).
    :param control_fields: A dictionary with control file fields. This
                           dictionary is merged with the values in
                           :data:`DEFAULT_CONTROL_FIELDS`.
    :raises: :exc:`~exceptions.ValueError` when a mandatory binary control
             field is not present in the provided control fields (see also
             :data:`MANDATORY_BINARY_CONTROL_FIELDS`).
    """
    logger.debug("Creating control file: %s", format_path(control_file))
    # Merge the defaults with the fields defined by the caller.
    merged_fields = merge_control_fields(DEFAULT_CONTROL_FIELDS, control_fields)
    # Sanity check for mandatory fields that are missing.
    missing_fields = [f for f in MANDATORY_BINARY_CONTROL_FIELDS if f not in merged_fields]
    if missing_fields:
        raise ValueError("Missing %s! (%s)" % (pluralize(len(missing_fields), "mandatory binary package control field"),
                                               concatenate(sorted(missing_fields))))
    # Make sure the parent directory of the control file exists.
    makedirs(os.path.dirname(control_file))
    # Remove the control file if it already exists in case it's a hard link to
    # an inode with multiple hard links that should _not_ be changed by us.
    if os.path.exists(control_file):
        os.unlink(control_file)
    # Write the control file.
    with open(control_file, 'wb') as handle:
        merged_fields.dump(handle)
コード例 #21
0
ファイル: __init__.py プロジェクト: jzoldak/pip-accel
    def install_requirements(self, requirements, **kw):
        """
        Manually install all requirements from binary distributions.

        :param requirements: A list of :py:class:`pip_accel.req.Requirement` objects.
        :param kw: Any keyword arguments are passed on to
                   :py:func:`~pip_accel.bdist.BinaryDistributionManager.install_binary_dist()`.
        """
        install_timer = Timer()
        logger.info("Installing from binary distributions ..")
        pip = os.path.join(sys.prefix, 'bin', 'pip')
        for requirement in requirements:
            if run('{pip} uninstall --yes {package} >/dev/null 2>&1', pip=pip, package=requirement.name):
                logger.info("Uninstalled previously installed package %s.", requirement.name)
            if requirement.is_editable:
                logger.debug("Installing %s (%s) in editable form using pip.", requirement.name, requirement.version)
                if not run('{pip} install --no-deps --editable {url} >/dev/null 2>&1', pip=pip, url=requirement.url):
                    msg = "Failed to install %s (%s) in editable form!"
                    raise Exception(msg % (requirement.name, requirement.version))
            else:
                binary_distribution = self.bdists.get_binary_dist(requirement)
                self.bdists.install_binary_dist(binary_distribution, **kw)
        logger.info("Finished installing %s in %s.",
                    pluralize(len(requirements), "requirement"),
                    install_timer)
コード例 #22
0
ファイル: __init__.py プロジェクト: jzoldak/pip-accel
    def unpack_source_dists(self, arguments):
        """
        Check whether there are local source distributions available for all
        requirements, unpack the source distribution archives and find the
        names and versions of the requirements. By using the ``pip install
        --no-install`` command we avoid reimplementing the following pip
        features:

        - Parsing of ``requirements.txt`` (including recursive parsing)
        - Resolution of possibly conflicting pinned requirements
        - Unpacking source distributions in multiple formats
        - Finding the name & version of a given source distribution

        :param arguments: The command line arguments to ``pip install ...`` (a
                          list of strings).
        :returns: A list of :py:class:`pip_accel.req.Requirement` objects.
        :raises: Any exceptions raised by pip, for example
                 :py:exc:`pip.exceptions.DistributionNotFound` when not all
                 requirements can be satisfied.
        """
        unpack_timer = Timer()
        logger.info("Unpacking source distribution(s) ..")
        # Install our custom package finder to force --no-index behavior.
        original_package_finder = pip_index_module.PackageFinder
        pip_install_module.PackageFinder = CustomPackageFinder
        try:
            requirements = self.get_pip_requirement_set(arguments, use_remote_index=False)
            logger.info("Finished unpacking %s in %s.",
                        pluralize(len(requirements), "source distribution"),
                        unpack_timer)
            return requirements
        finally:
            # Make sure to remove our custom package finder.
            pip_install_module.PackageFinder = original_package_finder
コード例 #23
0
    def find_egg_info_file(self, pattern=''):
        """
        Find pip metadata files in unpacked source distributions.

        When pip unpacks a source distribution archive it creates a directory
        ``pip-egg-info`` which contains the package metadata in a declarative
        and easy to parse format. This method finds such metadata files.

        :param pattern: The :mod:`glob` pattern to search for (a string).
        :returns: A list of matched filenames (strings).
        """
        full_pattern = os.path.join(self.requirement.source_directory,
                                    'pip-egg-info', '*.egg-info', pattern)
        logger.debug("Looking for %r file(s) using pattern %r ..", pattern,
                     full_pattern)
        matches = glob.glob(full_pattern)
        if len(matches) > 1:
            msg = "Source distribution directory of %s (%s) contains multiple *.egg-info directories: %s"
            raise Exception(
                msg % (self.requirement.project_name, self.requirement.version,
                       concatenate(matches)))
        elif matches:
            logger.debug("Matched %s: %s.",
                         pluralize(len(matches), "file", "files"),
                         concatenate(matches))
            return matches[0]
        else:
            logger.debug("No matching %r files found.", pattern)
コード例 #24
0
ファイル: cli.py プロジェクト: ProjetoDSL/PerfMoon
def report_memory_usage(lines, label, memory_usage):
    lines.append("")
    workers = pluralize(len(memory_usage), "worker")
    lines.append("Memory usage of %s (%s):" % (label, workers))
    lines.append(" - Minimum: %s" % format_size(memory_usage.min))
    lines.append(" - Average: %s" % format_size(memory_usage.average))
    lines.append(" - Maximum: %s" % format_size(memory_usage.max))
コード例 #25
0
 def render_conversation_summary(self, conversation):
     """Render a summary of which conversation a message is part of."""
     # Gather the names of the participants in the conversation, but exclude the
     # operator's name from private conversations (we can safely assume they
     # know who they are 😇).
     participants = sorted(
         set(contact.unambiguous_name if conversation.is_group_conversation
             else (contact.full_name or UNKNOWN_CONTACT_LABEL)
             for contact in conversation.participants
             if conversation.is_group_conversation
             or not self.is_operator(contact)))
     parts = [
         self.get_backend_name(conversation.account.backend),
         "group" if conversation.is_group_conversation else "private",
         "chat",
     ]
     if conversation.name:
         parts.append(
             self.generate_html("conversation_name",
                                html.escape(conversation.name)))
     parts.append("with")
     participants_html = concatenate(map(html.escape, participants))
     if conversation.is_group_conversation:
         parts.append(pluralize(len(participants), "participant"))
         parts.append("(%s)" % participants_html)
     else:
         parts.append(
             self.generate_html("conversation_name", participants_html))
     if conversation.account.name_is_significant:
         parts.append("in %s account" % conversation.account.name)
     return " ".join(parts)
コード例 #26
0
def create_control_file(control_file, control_fields):
    """
    Create a Debian control file.

    :param control_file: The filename of the control file to create (a string).
    :param control_fields: A dictionary with control file fields. This
                           dictionary is merged with the values in
                           :data:`DEFAULT_CONTROL_FIELDS`.
    :raises: :exc:`~exceptions.ValueError` when a mandatory binary control
             field is not present in the provided control fields (see also
             :data:`MANDATORY_BINARY_CONTROL_FIELDS`).
    """
    logger.debug("Creating control file: %s", format_path(control_file))
    # Merge the defaults with the fields defined by the caller.
    merged_fields = merge_control_fields(DEFAULT_CONTROL_FIELDS, control_fields)
    # Sanity check for mandatory fields that are missing.
    missing_fields = [f for f in MANDATORY_BINARY_CONTROL_FIELDS if f not in merged_fields]
    if missing_fields:
        raise ValueError(
            "Missing %s! (%s)"
            % (
                pluralize(len(missing_fields), "mandatory binary package control field"),
                concatenate(sorted(missing_fields)),
            )
        )
    # Make sure the parent directory of the control file exists.
    makedirs(os.path.dirname(control_file))
    # Remove the control file if it already exists in case it's a hard link to
    # an inode with multiple hard links that should _not_ be changed by us.
    if os.path.exists(control_file):
        os.unlink(control_file)
    # Write the control file.
    with open(control_file, "wb") as handle:
        merged_fields.dump(handle)
コード例 #27
0
ファイル: __init__.py プロジェクト: jonatlib/pip-accel
    def unpack_source_dists(self, arguments, use_wheels=False):
        """
        Find and unpack local source distributions and discover their metadata.

        :param arguments: The command line arguments to ``pip install ...`` (a
                          list of strings).
        :param use_wheels: Whether pip and pip-accel are allowed to use wheels_
                           (:data:`False` by default for backwards compatibility
                           with callers that use pip-accel as a Python API).
        :returns: A list of :class:`pip_accel.req.Requirement` objects.
        :raises: Any exceptions raised by pip, for example
                 :exc:`pip.exceptions.DistributionNotFound` when not all
                 requirements can be satisfied.

        This function checks whether there are local source distributions
        available for all requirements, unpacks the source distribution
        archives and finds the names and versions of the requirements. By using
        the ``pip install --download`` command we avoid reimplementing the
        following pip features:

        - Parsing of ``requirements.txt`` (including recursive parsing).
        - Resolution of possibly conflicting pinned requirements.
        - Unpacking source distributions in multiple formats.
        - Finding the name & version of a given source distribution.
        """
        unpack_timer = Timer()
        logger.info("Unpacking distribution(s) ..")
        with PatchedAttribute(pip_install_module, 'PackageFinder', CustomPackageFinder):
            requirements = self.get_pip_requirement_set(arguments, use_remote_index=False, use_wheels=use_wheels)
            logger.info("Finished unpacking %s in %s.", pluralize(len(requirements), "distribution"), unpack_timer)
            return requirements
コード例 #28
0
ファイル: concurrent.py プロジェクト: xolox/python-executor
 def error_message(self):
     """An error message that explains which commands *failed unexpectedly* (a string)."""
     summary = format("%i out of %s failed unexpectedly:",
                      self.pool.num_failed,
                      pluralize(self.pool.num_commands, "command"))
     details = "\n".join(" - %s" % cmd.error_message for cmd in self.commands)
     return summary + "\n\n" + details
コード例 #29
0
    def ranked_mirrors(self):
        """
        A list of :class:`CandidateMirror` objects (ordered from best to worst).

        The value of this property is computed by concurrently testing the
        mirrors in :attr:`available_mirrors` for the following details:

        - availability (:attr:`~CandidateMirror.is_available`)
        - connection speed (:attr:`~CandidateMirror.bandwidth`)
        - update status (:attr:`~CandidateMirror.is_updating`)

        The number of mirrors to test is limited to :attr:`max_mirrors` and you
        can change the number of simultaneous HTTP connections allowed by
        setting :attr:`concurrency`.
        """
        timer = Timer()
        # Sort the candidates based on the currently available information
        # (and transform the input argument into a list in the process).
        mirrors = sorted(self.available_mirrors, key=lambda c: c.sort_key, reverse=True)
        # Limit the number of candidates to a reasonable number?
        if self.max_mirrors and len(mirrors) > self.max_mirrors:
            mirrors = mirrors[:self.max_mirrors]
        # Prepare the Release.gpg URLs to fetch.
        mapping = dict((c.release_gpg_url, c) for c in mirrors)
        num_mirrors = pluralize(len(mapping), "mirror")
        logger.info("Checking %s for availability and performance ..", num_mirrors)
        # Concurrently fetch the Release.gpg files.
        with AutomaticSpinner(label="Checking mirrors"):
            for url, data, elapsed_time in fetch_concurrent(mapping.keys(), concurrency=self.concurrency):
                candidate = mapping[url]
                candidate.release_gpg_contents = data
                candidate.release_gpg_latency = elapsed_time
        # Concurrently check for Archive-Update-in-Progress markers.
        update_mapping = dict((c.archive_update_in_progress_url, c) for c in mirrors if c.is_available)
        logger.info("Checking %s for Archive-Update-in-Progress marker ..",
                    pluralize(len(update_mapping), "mirror"))
        with AutomaticSpinner(label="Checking mirrors"):
            for url, data, elapsed_time in fetch_concurrent(update_mapping.keys(), concurrency=self.concurrency):
                update_mapping[url].is_updating = data is not None
        # Sanity check our results.
        mirrors = list(mapping.values())
        logger.info("Finished checking %s (took %s).", num_mirrors, timer)
        if not any(c.is_available for c in mirrors):
            raise Exception("It looks like all %s are unavailable!" % num_mirrors)
        if all(c.is_updating for c in mirrors):
            logger.warning("It looks like all %s are being updated?!", num_mirrors)
        return sorted(mirrors, key=lambda c: c.sort_key, reverse=True)
コード例 #30
0
ファイル: cli.py プロジェクト: xolox/python-apache-manager
def report_memory_usage(lines, label, memory_usage):
    """Create a textual summary of Apache worker memory usage."""
    lines.append("")
    workers = pluralize(len(memory_usage), "worker")
    lines.append("Memory usage of %s (%s):" % (label, workers))
    lines.append(" - Minimum: %s" % format_size(memory_usage.min))
    lines.append(" - Average: %s" % format_size(memory_usage.average))
    lines.append(" - Maximum: %s" % format_size(memory_usage.max))
コード例 #31
0
 def error_message(self):
     """An error message that explains which commands *failed unexpectedly* (a string)."""
     summary = format("%i out of %s failed unexpectedly:",
                      self.pool.num_failed,
                      pluralize(self.pool.num_commands, "command"))
     details = "\n".join(" - %s" % cmd.error_message
                         for cmd in self.commands)
     return summary + "\n\n" + details
コード例 #32
0
def report_memory_usage(lines, label, memory_usage):
    """Create a textual summary of Apache worker memory usage."""
    lines.append("")
    workers = pluralize(len(memory_usage), "worker")
    lines.append("Memory usage of %s (%s):" % (label, workers))
    lines.append(" - Minimum: %s" % format_size(memory_usage.min))
    lines.append(" - Average: %s" % format_size(memory_usage.average))
    lines.append(" - Maximum: %s" % format_size(memory_usage.max))
コード例 #33
0
def discover_mirrors():
    """
    Discover available Ubuntu mirrors by querying :data:`MIRRORS_URL`.

    :returns: A set of :class:`.CandidateMirror` objects that have their
              :attr:`~.CandidateMirror.mirror_url` property set and may have
              the :attr:`~.CandidateMirror.last_updated` property set.
    :raises: If no mirrors are discovered an exception is raised.

    An example run:

    >>> from apt_mirror_updater.backends.ubuntu import discover_mirrors
    >>> from pprint import pprint
    >>> pprint(discover_mirrors())
    set([CandidateMirror(mirror_url='http://archive.ubuntu.com/ubuntu/'),
         CandidateMirror(mirror_url='http://ftp.nluug.nl/os/Linux/distr/ubuntu/'),
         CandidateMirror(mirror_url='http://ftp.snt.utwente.nl/pub/os/linux/ubuntu/'),
         CandidateMirror(mirror_url='http://ftp.tudelft.nl/archive.ubuntu.com/'),
         CandidateMirror(mirror_url='http://mirror.1000mbps.com/ubuntu/'),
         CandidateMirror(mirror_url='http://mirror.amsiohosting.net/archive.ubuntu.com/'),
         CandidateMirror(mirror_url='http://mirror.i3d.net/pub/ubuntu/'),
         CandidateMirror(mirror_url='http://mirror.nforce.com/pub/linux/ubuntu/'),
         CandidateMirror(mirror_url='http://mirror.nl.leaseweb.net/ubuntu/'),
         CandidateMirror(mirror_url='http://mirror.transip.net/ubuntu/ubuntu/'),
         ...])
    """
    timer = Timer()
    mirrors = set()
    logger.info("Discovering Ubuntu mirrors at %s ..", MIRRORS_URL)
    response = fetch_url(MIRRORS_URL, retry=True)
    soup = BeautifulSoup(response, 'html.parser')
    for table in soup.findAll('table'):
        for tr in table.findAll('tr'):
            for a in tr.findAll('a', href=True):
                # Check if the link looks like a mirror URL.
                if (a['href'].startswith(('http://', 'https://'))
                        and a['href'].endswith('/ubuntu/')):
                    # Try to figure out the mirror's reported latency.
                    last_updated = None
                    text = u''.join(tr.findAll(text=True))
                    for status_label, num_seconds in MIRROR_STATUSES:
                        if status_label in text:
                            last_updated = num_seconds
                            break
                    # Add the mirror to our overview.
                    mirrors.add(
                        CandidateMirror(
                            mirror_url=a['href'],
                            last_updated=last_updated,
                        ))
                    # Skip to the next row.
                    break
    if not mirrors:
        raise Exception("Failed to discover any Ubuntu mirrors! (using %s)" %
                        MIRRORS_URL)
    logger.info("Discovered %s in %s.", pluralize(len(mirrors),
                                                  "Ubuntu mirror"), timer)
    return mirrors
コード例 #34
0
    def install_requirements(self, requirements, **kw):
        """
        Manually install a requirement set from binary and/or wheel distributions.

        :param requirements: A list of :class:`pip_accel.req.Requirement` objects.
        :param kw: Any keyword arguments are passed on to
                   :func:`~pip_accel.bdist.BinaryDistributionManager.install_binary_dist()`.
        :returns: The number of packages that were just installed (an integer).
        """
        install_timer = Timer()
        install_types = []
        if any(not req.is_wheel for req in requirements):
            install_types.append('binary')
        if any(req.is_wheel for req in requirements):
            install_types.append('wheel')
        logger.info("Installing from %s distributions ..",
                    concatenate(install_types))
        # Track installed files by default (unless the caller specifically opted out).
        kw.setdefault('track_installed_files', True)
        num_installed = 0
        for requirement in requirements:
            # If we're upgrading over an older version, first remove the
            # old version to make sure we don't leave files from old
            # versions around.
            if is_installed(requirement.name):
                uninstall(requirement.name)
            # When installing setuptools we need to uninstall distribute,
            # otherwise distribute will shadow setuptools and all sorts of
            # strange issues can occur (e.g. upgrading to the latest
            # setuptools to gain wheel support and then having everything
            # blow up because distribute doesn't know about wheels).
            if requirement.name == 'setuptools' and is_installed('distribute'):
                uninstall('distribute')
            if requirement.is_editable:
                logger.debug("Installing %s in editable form using pip.",
                             requirement)
                command = InstallCommand()
                opts, args = command.parse_args(
                    ['--no-deps', '--editable', requirement.source_directory])
                command.run(opts, args)
            elif requirement.is_wheel:
                logger.info("Installing %s wheel distribution using pip ..",
                            requirement)
                wheel_version = pip_wheel_module.wheel_version(
                    requirement.source_directory)
                pip_wheel_module.check_compatibility(wheel_version,
                                                     requirement.name)
                requirement.pip_requirement.move_wheel_files(
                    requirement.source_directory)
            else:
                binary_distribution = self.bdists.get_binary_dist(requirement)
                self.bdists.install_binary_dist(binary_distribution, **kw)
            num_installed += 1
        logger.info("Finished installing %s in %s.",
                    pluralize(num_installed, "requirement"), install_timer)
        return num_installed
コード例 #35
0
ファイル: server.py プロジェクト: pombredanne/python-executor
 def port_number(self):
     """A dynamically selected port number that was not in use at the moment it was selected (an integer)."""
     self.logger.debug("Looking for a free ephemeral port (for %s traffic) ..", self.scheme.upper())
     for i in itertools.count(1):
         port_number = random.randint(49152, 65535)
         if not self.is_connected(port_number):
             self.logger.debug("Took %s to select free ephemeral port (%s).",
                               pluralize(i, "attempt"),
                               self.render_location(port_number=port_number))
             return port_number
コード例 #36
0
    def __init__(self, **kw):
        """
        Initialize a :class:`PropertyManager` object.

        :param kw: Any keyword arguments are passed on to :func:`set_properties()`.
        """
        self.set_properties(**kw)
        missing_properties = self.missing_properties
        if missing_properties:
            msg = "missing %s" % pluralize(len(missing_properties), "required argument")
            raise TypeError("%s (%s)" % (msg, concatenate(missing_properties)))
コード例 #37
0
    def __init__(self, **kw):
        """
        Initialize a :class:`PropertyManager` object.

        :param kw: Any keyword arguments are passed on to :func:`set_properties()`.
        """
        self.set_properties(**kw)
        missing_properties = self.missing_properties
        if missing_properties:
            msg = "missing %s" % pluralize(len(missing_properties), "required argument")
            raise TypeError("%s (%s)" % (msg, concatenate(missing_properties)))
コード例 #38
0
ファイル: tcp.py プロジェクト: xolox/python-executor
 def port_number(self):
     """A dynamically selected free ephemeral port number (an integer between 49152 and 65535)."""
     timer = Timer()
     logger.debug("Looking for free ephemeral port number ..")
     for i in itertools.count(1):
         value = self.ephemeral_port_number
         set_property(self, 'port_number', value)
         if not self.is_connected:
             logger.debug("Found free ephemeral port number %s after %s (took %s).",
                          self, pluralize(i, "attempt"), timer)
             return value
コード例 #39
0
ファイル: __init__.py プロジェクト: xolox/python-gentag
    def add_object(self, value, *tags):
        """
        Add an object to the scope.

        :param value: The object to add (any hashable value).
        :param tags: The names of tags to associate the object with.
        """
        logger.debug("Tagging object %r with %s: %s",
                     value, pluralize(len(tags), "tag"),
                     ", ".join(map(str, tags)))
        for name in tags:
            self.tags[name].objects.add(value)
コード例 #40
0
ファイル: __init__.py プロジェクト: jzoldak/pip-accel
    def installation_refused(self, requirement, missing_dependencies, reason):
        """
        Raise :py:exc:`.DependencyInstallationRefused` with a user friendly message.

        :param requirement: A :py:class:`.Requirement` object.
        :param missing_dependencies: A list of strings with missing dependencies.
        :param reason: The reason why installation was refused (a string).
        """
        msg = "Missing %s (%s) required by Python package %s (%s) but %s!"
        raise DependencyInstallationRefused(msg % (pluralize(len(missing_dependencies), "system package", "system packages"),
                                                   concatenate(missing_dependencies), requirement.name, requirement.version,
                                                   reason))
コード例 #41
0
 def port_number(self):
     """A dynamically selected free ephemeral port number (an integer between 49152 and 65535)."""
     timer = Timer()
     logger.debug("Looking for free ephemeral port number ..")
     for i in itertools.count(1):
         value = self.ephemeral_port_number
         set_property(self, 'port_number', value)
         if not self.is_connected:
             logger.debug(
                 "Found free ephemeral port number %s after %s (took %s).",
                 self, pluralize(i, "attempt"), timer)
             return value
コード例 #42
0
def discover_mirror_selection():
    """Discover "geographically suitable" Ubuntu mirrors."""
    timer = Timer()
    logger.info("Identifying fast Ubuntu mirrors using %s ..", MIRROR_SELECTION_URL)
    data = fetch_url(MIRROR_SELECTION_URL, retry=False)
    dammit = UnicodeDammit(data)
    mirrors = set(
        CandidateMirror(mirror_url=mirror_url.strip())
        for mirror_url in dammit.unicode_markup.splitlines()
        if mirror_url and not mirror_url.isspace()
    )
    logger.debug("Found %s in %s.", pluralize(len(mirrors), "fast Ubuntu mirror"), timer)
    return mirrors
コード例 #43
0
    def write_file(self, filename, contents):
        """
        Write a text file and provide feedback to the user.

        :param filename: The pathname of the file to write (a string).
        :param contents: The new contents of the file (a string).
        """
        logger.info("Writing file: %s", format_path(filename))
        contents = contents.rstrip() + b"\n"
        self.context.write_file(filename, contents)
        logger.debug("Wrote %s to %s.",
                     pluralize(len(contents.splitlines()), "line"),
                     format_path(filename))
コード例 #44
0
    def write_file(self, filename, contents):
        """
        Write a text file and provide feedback to the user.

        :param filename: The pathname of the file to write (a string).
        :param contents: The new contents of the file (a string).
        """
        logger.info("Writing file: %s", format_path(filename))
        contents = contents.rstrip() + b"\n"
        self.context.write_file(filename, contents)
        logger.debug("Wrote %s to %s.",
                     pluralize(len(contents.splitlines()), "line"),
                     format_path(filename))
コード例 #45
0
    def execute_file(self, filename):
        """
        Execute a file and provide feedback to the user.

        :param filename: The pathname of the file to execute (a string).
        :returns: Whatever the executed file returns on stdout (a string).
        """
        logger.info("Executing file: %s", format_path(filename))
        contents = self.context.execute(filename, capture=True).stdout
        num_lines = len(contents.splitlines())
        logger.debug("Execution of %s yielded % of output.",
                     format_path(filename),
                     pluralize(num_lines, 'line'))
        return contents.rstrip()
コード例 #46
0
ファイル: __init__.py プロジェクト: fjgauthier/pip-accel
    def installation_refused(self, requirement, missing_dependencies, reason):
        """
        Raise :py:exc:`.DependencyInstallationRefused` with a user friendly message.

        :param requirement: A :py:class:`.Requirement` object.
        :param missing_dependencies: A list of strings with missing dependencies.
        :param reason: The reason why installation was refused (a string).
        """
        msg = "Missing %s (%s) required by Python package %s (%s) but %s!"
        raise DependencyInstallationRefused(
            msg %
            (pluralize(len(missing_dependencies), "system package",
                       "system packages"), concatenate(missing_dependencies),
             requirement.name, requirement.version, reason))
コード例 #47
0
ファイル: __init__.py プロジェクト: jonatlib/pip-accel
    def install_requirements(self, requirements, **kw):
        """
        Manually install a requirement set from binary and/or wheel distributions.

        :param requirements: A list of :class:`pip_accel.req.Requirement` objects.
        :param kw: Any keyword arguments are passed on to
                   :func:`~pip_accel.bdist.BinaryDistributionManager.install_binary_dist()`.
        :returns: The number of packages that were just installed (an integer).
        """
        install_timer = Timer()
        install_types = []
        if any(not req.is_wheel for req in requirements):
            install_types.append('binary')
        if any(req.is_wheel for req in requirements):
            install_types.append('wheel')
        logger.info("Installing from %s distributions ..", concatenate(install_types))
        # Track installed files by default (unless the caller specifically opted out).
        kw.setdefault('track_installed_files', True)
        num_installed = 0
        for requirement in requirements:
            # If we're upgrading over an older version, first remove the
            # old version to make sure we don't leave files from old
            # versions around.
            if is_installed(requirement.name):
                uninstall(requirement.name)
            # When installing setuptools we need to uninstall distribute,
            # otherwise distribute will shadow setuptools and all sorts of
            # strange issues can occur (e.g. upgrading to the latest
            # setuptools to gain wheel support and then having everything
            # blow up because distribute doesn't know about wheels).
            if requirement.name == 'setuptools' and is_installed('distribute'):
                uninstall('distribute')
            if requirement.is_editable:
                logger.debug("Installing %s in editable form using pip.", requirement)
                command = InstallCommand()
                opts, args = command.parse_args(['--no-deps', '--editable', requirement.source_directory])
                command.run(opts, args)
            elif requirement.is_wheel:
                logger.info("Installing %s wheel distribution using pip ..", requirement)
                wheel_version = pip_wheel_module.wheel_version(requirement.source_directory)
                pip_wheel_module.check_compatibility(wheel_version, requirement.name)
                requirement.pip_requirement.move_wheel_files(requirement.source_directory)
            else:
                binary_distribution = self.bdists.get_binary_dist(requirement)
                self.bdists.install_binary_dist(binary_distribution, **kw)
            num_installed += 1
        logger.info("Finished installing %s in %s.",
                    pluralize(num_installed, "requirement"),
                    install_timer)
        return num_installed
コード例 #48
0
    def __init__(self, config):
        """
        Initialize the system package dependency manager.

        :param config: The pip-accel configuration (a :class:`.Config`
                       object).
        """
        # Defaults for unsupported systems.
        self.list_command = 'true'
        self.install_command = 'true'
        self.dependencies = {}
        # Keep a reference to the pip-accel configuration.
        self.config = config
        # Initialize the platform specific package manager interface.
        directory = os.path.dirname(os.path.abspath(__file__))
        for filename in sorted(os.listdir(directory)):
            pathname = os.path.join(directory, filename)
            if filename.endswith('.ini') and os.path.isfile(pathname):
                logger.debug("Loading configuration from %s ..", pathname)
                parser = configparser.RawConfigParser()
                parser.read(pathname)
                # Check if the package manager is supported.
                supported_command = parser.get('commands', 'supported')
                logger.debug("Checking if configuration is supported: %s",
                             supported_command)
                with open(os.devnull, 'wb') as null_device:
                    if subprocess.call(supported_command,
                                       shell=True,
                                       stdout=null_device,
                                       stderr=subprocess.STDOUT) == 0:
                        logger.debug(
                            "System package manager configuration is supported!"
                        )
                        # Get the commands to list and install system packages.
                        self.list_command = parser.get('commands', 'list')
                        self.install_command = parser.get(
                            'commands', 'install')
                        # Get the known dependencies.
                        self.dependencies = dict(
                            (n.lower(), v.split())
                            for n, v in parser.items('dependencies'))
                        logger.debug(
                            "Loaded dependencies of %s: %s",
                            pluralize(len(self.dependencies),
                                      "Python package"),
                            concatenate(sorted(self.dependencies)))
                    else:
                        logger.debug(
                            "Command failed, assuming configuration doesn't apply .."
                        )
コード例 #49
0
    def read_file(self, filename):
        """
        Read a text file and provide feedback to the user.

        :param filename: The pathname of the file to read (a string).
        :returns: The contents of the file (a string).
        """
        logger.info("Reading file: %s", format_path(filename))
        contents = self.context.read_file(filename)
        num_lines = len(contents.splitlines())
        logger.debug("Read %s from %s.",
                     pluralize(num_lines, 'line'),
                     format_path(filename))
        return contents.rstrip()
コード例 #50
0
 def stats_cmd(self, arguments):
     """Show some statistics about the local chat archive."""
     logger.info("Statistics about %s:", format_path(self.database_file))
     logger.info(" - Number of contacts: %i", self.num_contacts)
     logger.info(" - Number of conversations: %i", self.num_conversations)
     logger.info(" - Number of messages: %i", self.num_messages)
     logger.info(" - Database file size: %s",
                 format_size(os.path.getsize(self.database_file)))
     logger.info(
         " - Size of %s: %s",
         pluralize(self.num_messages, "plain text chat message"),
         format_size(
             self.session.query(
                 func.coalesce(func.sum(func.length(Message.text)),
                               0)).scalar()),
     )
     logger.info(
         " - Size of %s: %s",
         pluralize(self.num_html_messages, "HTML formatted chat message"),
         format_size(
             self.session.query(
                 func.coalesce(func.sum(func.length(Message.html)),
                               0)).scalar()),
     )
コード例 #51
0
ファイル: __init__.py プロジェクト: jzoldak/pip-accel
    def find_known_dependencies(self, requirement):
        """
        Find the known dependencies of a Python package.

        :param requirement: A :py:class:`.Requirement` object.
        :returns: A list of strings with system package names.
        """
        logger.info("Checking for known dependencies of %s ..", requirement.name)
        known_dependencies = sorted(self.dependencies.get(requirement.name.lower(), []))
        if known_dependencies:
            logger.info("Found %s: %s", pluralize(len(known_dependencies), "known dependency", "known dependencies"),
                        concatenate(known_dependencies))
        else:
            logger.info("No known dependencies... Maybe you have a suggestion?")
        return known_dependencies