Example #1
0
    def _get_pkgs_install_date(self, pkg_dicts):
        # Convert package names to dpkg list filenames
        queries = [self._pkg_name_to_dpkg_list_file(p["name"])
                   for p in pkg_dicts]
        # Call stat in batches
        exec_gen = execute_command_batch(
            self._session, ['stat', '-c', '%n: %Y'], queries,
            cmd_err_filter('No such file or directory'))
        # Parse and accumulate stat results in a dict
        results = {}
        for (out, _, exc) in exec_gen:
            if exc:
                out = exc.stdout  # One file not found, so continue
            # Parse the output and store by filename
            for outlines in out.splitlines():
                (fname, ftime) = outlines.split(": ")
                results[fname] = str(
                    pytz.utc.localize(
                        datetime.utcfromtimestamp(float(ftime))))

        # Now lookup the packages in the results
        for p in pkg_dicts:
            fname = self._pkg_name_to_dpkg_list_file(p["name"])
            if fname in results:
                p["install_date"] = results[fname]
Example #2
0
 def _get_pkgs_details_from_apt_cache_show(self, pkg_dicts):
     # Convert package names to name:arch=version format
     queries = ["%(name)s:%(architecture)s=%(version)s" % p
                for p in pkg_dicts]
     # Call "apt-cache show" in batches
     exec_gen = execute_command_batch(self._session, ['apt-cache', 'show'],
                                      queries)
     # Parse and accumulate "apt-cache show" results
     results = (parse_apt_cache_show_pkgs_output(out)
                for (out, _, _) in exec_gen)
     # Combine sequence of lists
     results = itertools.chain.from_iterable(results)
     # Turn apt-cache show results into a lookup table by package name
     results = self.create_lookup_from_apt_cache_show(results)
     # Loop through each package and find the respective apt-cache results
     for p in pkg_dicts:
         r = results.get("%(name)s:%(architecture)s" % p)
         if not r:
             lgr.warning("Was unable to run apt-cache show for %s" %
                         p["name"])
             continue
         # Update the dictionary with found results (if present)
         for f in ("source_name", "source_version", "size", "md5",
                   "sha1", "sha256"):
             if f in r:
                 p[f] = r[f]
Example #3
0
 def _get_pkgs_arch_and_version(self, pkg_dicts):
     # Convert package names to name:arch format
     # Use "dpkg -s pkg" to get the installed version and arch
     # Note: "architecture" is in the dict, but may be null
     queries = [(p["name"] if not p["architecture"]
                 else "%(name)s:%(architecture)s" % p)
                for p in pkg_dicts]
     # Call "dpkg -s" in batches
     exec_gen = execute_command_batch(self._session, ['dpkg', '-s'],
                                      queries)
     # Parse and accumulate "dpkg -s" results
     # dpkg -s uses the same output as apt-cache show pkg
     results = (parse_apt_cache_show_pkgs_output(out)
                for (out, _, _) in exec_gen)
     # Combine sequence of lists
     results = itertools.chain.from_iterable(results)
     # Turn dpkg -s results into a lookup table by package name
     results = self.create_lookup_from_apt_cache_show(results)
     # Loop through each package and find the respective dpkg results
     for p in pkg_dicts:
         r = results.get(p["name"] if not p["architecture"]
                         else "%(name)s:%(architecture)s" % p)
         if not r:
             lgr.warning("Was unable to run dpkg -s for %s" %
                         p["name"])
             continue
         # Update the dictionary with found results
         p["architecture"] = r["architecture"]
         p["version"] = r["version"]
Example #4
0
 def _get_packagefields_for_files(self, files):
     # Call dpkg query in batches
     exec_gen = execute_command_batch(
         self._session, ['dpkg-query', '-S'], files,
         cmd_err_filter('no path found matching pattern'))
     # Parse and accumulate stat results in a dict
     file_to_package_dict = {}
     for (out, _, exc) in exec_gen:
         if exc:
             out = exc.stdout  # One file not found, so continue
         # Now go through the output and assign packages to files
         for outline in out.splitlines():
             # Parse package name (architecture) and path
             # TODO: Handle query of /bin/sh better
             outdict = self._parse_dpkgquery_line(outline)
             if not outdict:
                 lgr.debug("Skipping line %s", outline)
                 continue
             # Pull the found path from the dictionary
             found_name = outdict.pop('path')
             if not found_name:
                 raise ValueError(
                     "Record %s got no path defined... skipping"
                     % repr(outdict)
                 )
             # Associate the file to the package name (and architecture)
             pkg = outdict
             lgr.debug("Identified file %r to belong to package %s",
                       found_name, pkg)
             file_to_package_dict[found_name] = pkg
     return file_to_package_dict
Example #5
0
    def install_packages(self, session=None):
        session = session or get_local_session()
        for env in self.environments:
            # TODO: Deal with system and editable packages.
            to_install = [
                "{p.name}=={p.version}".format(p=p) for p in env.packages
                if p.local and not p.editable
            ]
            if not to_install:
                lgr.info("No local, non-editable packages found")
                continue

            # TODO: Right now we just use the python to invoke "virtualenv
            # --python=..." when the directory doesn't exist, but we should
            # eventually use the yet-to-exist "satisfies" functionality to
            # check whether an existing virtual environment has the right
            # python (and maybe other things).
            pyver = "{v.major}.{v.minor}".format(
                v=parse_semantic_version(env.python_version))

            if not session.exists(env.path):
                # The location and version of virtualenv are recorded at the
                # time of tracing, but should we use these values?  For now,
                # use a plain "virtualenv" below on the basis that we just use
                # "apt-get" and "git" elsewhere.
                session.execute_command([
                    "virtualenv", "--python=python{}".format(pyver), env.path
                ])
            list(
                execute_command_batch(session,
                                      [env.path + "/bin/pip", "install"],
                                      to_install))
Example #6
0
def _pip_batched_show(session, which_pip, pkgs):
    cmd = [which_pip, "show", "-f"]
    batch = execute_command_batch(session, cmd, pkgs)
    sep_re = re.compile("^---$", flags=re.MULTILINE)
    entries = (sep_re.split(stacked) for stacked, _, _ in batch)

    for pkg, entry in zip(pkgs, itertools.chain(*entries)):
        info = parse_pip_show(entry)
        yield pkg, info
Example #7
0
 def _get_pkgs_versions_and_sources(self, pkg_dicts):
     # Convert package names to name:arch format
     queries = ["%(name)s:%(architecture)s" % p for p in pkg_dicts]
     # Call apt-cache policy in batches
     exec_gen = execute_command_batch(self._session,
                                      ['apt-cache', 'policy'],
                                      queries)
     # Parse results into a single generator
     results = (parse_apt_cache_policy_pkgs_output(out)
                for (out, _, _) in exec_gen)
     # Combine sequence of dicts
     results = join_sequence_of_dicts(results)
     # Loop through each package and find the respective apt-cache results
     for p in pkg_dicts:
         ver = results.get("%(name)s:%(architecture)s" % p)
         if not ver:
             ver = results.get(p["name"])
         if not ver:
             lgr.warning("Was unable to get version table for %s" %
                         p["name"])
             continue
         # Now construct the version table
         ver_dict = {}
         for v in ver.get("versions"):
             key = v["version"]
             ver_dict[key] = []
             for s in v.get("sources"):
                 s = s["source"]
                 # If we haven't named the source yet, name it
                 if s not in self._source_line_to_name_map:
                     # Make sure we can find the source
                     if s not in self._all_apt_sources:
                         lgr.warning("Cannot find source %s" % s)
                         continue
                     # Grab and name the source
                     source = self._all_apt_sources[s]
                     src_name = self._get_apt_source_name(source)
                     source.name = src_name
                     # Now add the source to our used sources
                     self._apt_sources[src_name] = source
                     # add the name for easy future lookup
                     self._source_line_to_name_map[s] = src_name
                 # Look up and add the short name for the source
                 ver_dict[key].append(
                     self._source_line_to_name_map[s])
         p["versions"] = ver_dict