Esempio n. 1
0
def load_package_groups_data_g(paths=[],
                               data=_DATA_0,
                               profkey="system_profile"):
    """
    :param paths: A list of package group data dirs
    """
    sysprof = bunch.bunchify(data.get(profkey, {}))

    for path in paths:
        logging.debug("Loading profiles from: " + path)
        pgdata = load_profiles(path)

        for grp in pgdata.get("groups", []):
            instif = grp.get("install_if", '')
            grp["install_if"] = parse_install_pred(instif, sysprof, False)
            logging.debug("install_if: %s -> %s" % (instif, grp["install_if"]))

            # TODO: Is 'type' of the packages (mandatory | default | optional)
            # to be checked?
            inst_pkgs = RU.uniq(
                (p["name"]
                 for p in grp.get("packages", []) if grp["install_if"]),
                use_set=True)
            uninst_pkgs = RU.uniq(
                (p["name"]
                 for p in grp.get("packages", []) if not grp["install_if"]),
                use_set=True)
            grp["install_pkgs"] = inst_pkgs
            grp["remove_pkgs"] = uninst_pkgs

            yield grp
Esempio n. 2
0
def load_package_groups_data_g(paths=[], data=_DATA_0,
                               profkey="system_profile"):
    """
    :param paths: A list of package group data dirs
    """
    sysprof = bunch.bunchify(data.get(profkey, {}))

    for path in paths:
        logging.debug("Loading profiles from: " + path)
        pgdata = load_profiles(path)

        for grp in pgdata.get("groups", []):
            instif = grp.get("install_if", '')
            grp["install_if"] = parse_install_pred(instif, sysprof, False)
            logging.debug("install_if: %s -> %s" % (instif, grp["install_if"]))

            # TODO: Is 'type' of the packages (mandatory | default | optional)
            # to be checked?
            inst_pkgs = RU.uniq((p["name"] for p in grp.get("packages", [])
                                 if grp["install_if"]), use_set=True)
            uninst_pkgs = RU.uniq((p["name"] for p in grp.get("packages", [])
                                   if not grp["install_if"]), use_set=True)
            grp["install_pkgs"] = inst_pkgs
            grp["remove_pkgs"] = uninst_pkgs

            yield grp
Esempio n. 3
0
def make_dependencies_dag(root, reqs=None, rreqs=None):
    """
    Make directed acyclic graph of RPM dependencies.

    see also:

    * http://en.wikipedia.org/wiki/Directed_acyclic_graph
    * http://en.wikipedia.org/wiki/Strongly_connected_component

    :param root: RPM Database root dir
    :param reqs: A dict represents RPM deps, {x: [package_requires_x]}.
    :param rreqs: A dict represents RPM deps, {x: [package_required_by_x]}.

    :return: networkx.DiGraph instance represents the dag of rpm deps.
    """
    if rreqs is None:
        rreqs = RU.make_reversed_requires_dict(root)

    if reqs is None:
        reqs = RU.make_requires_dict(root)

    g = make_dependency_graph(root, rreqs=rreqs)

    # Degenerate strongly connected components:
    for scc in list(NX.strongly_connected_components(g)):
        scc = sorted(U.uniq(scc))  # TODO: Is this needed?

        if len(scc) == 1:  # Ignore sccs of which length is 1.
            continue

        _degenerate_nodes(g, scc, "Strongly Connected Components")

    # Degenerate cyclic nodes:
    for cns in NX.simple_cycles(g):
        cns = sorted(U.uniq(cns))  # TODO: Likewise

        # Should not happen as selc cyclic nodes were removed in advance.
        assert len(cns) != 1, "Self cyclic node: " + cns[0]

        _degenerate_nodes(g, cns, "Cyclic nodes")

    assert NX.is_directed_acyclic_graph(g), \
        "I'm still missing something to make the dep. graph to dag..."

    return g
Esempio n. 4
0
def _notice_to_errata(notice):
    """
    Notice metadata examples:

    packages:

     'pkglist': [
        {'name': 'Red Hat Enterprise Linux Server (v. 6 for 64-bit x86_64)',
         'packages': [
            {'arch': 'x86_64',
             'epoch': '0',
             'filename': 'xorg-x11-drv-fbdev-0.4.3-16.el6.x86_64.rpm',
             'name': 'xorg-x11-drv-fbdev',
             'release': '16.el6',
             'src': 'xorg-x11-drv-fbdev-0.4.3-16.el6.src.rpm',
             'sum': ('sha256', '8f3da83bb19c3776053c543002c9...'),
             'version': '0.4.3'},
             ...
        },
        ...
     ]

    cve in notice_metadata["references"]:

    {'href': 'https://www.redhat.com/security/data/cve/CVE-2013-1994.html',
     'id': 'CVE-2013-1994',
     'title': 'CVE-2013-1994',
     'type': 'cve'}
    """
    nmd = notice.get_metadata()

    errata = dict(advisory=nmd["update_id"],
                  synopsis=nmd["title"],
                  description=nmd["description"],
                  update_date=nmd["updated"],
                  issue_date=nmd["issued"],
                  solution=nmd["solution"],
                  type=nmd["type"],
                  severity=nmd.get("severity", "N/A"))

    errata["bzs"] = [
        normalize_bz(bz) for bz in filter(
            lambda r: r.get("type") == "bugzilla", nmd.get("references", []))
    ]
    errata["cves"] = [
        normalize_cve(cve) for cve in filter(lambda r: r.get("type") == "cve",
                                             nmd.get("references", []))
    ]

    errata["packages"] = RU.concat(nps["packages"]
                                   for nps in nmd.get("pkglist", []))

    errata["package_names"] = ','.join(
        RU.uniq(p["name"] for p in errata["packages"]))
    errata["url"] = rpmkit.updateinfo.utils.errata_url(errata["advisory"])

    return errata
Esempio n. 5
0
def make_dependencies_dag(root, reqs=None, rreqs=None):
    """
    Make directed acyclic graph of RPM dependencies.

    see also:

    * http://en.wikipedia.org/wiki/Directed_acyclic_graph
    * http://en.wikipedia.org/wiki/Strongly_connected_component

    :param root: RPM Database root dir
    :param reqs: A dict represents RPM deps, {x: [package_requires_x]}.
    :param rreqs: A dict represents RPM deps, {x: [package_required_by_x]}.

    :return: networkx.DiGraph instance represents the dag of rpm deps.
    """
    if rreqs is None:
        rreqs = RU.make_reversed_requires_dict(root)

    if reqs is None:
        reqs = RU.make_requires_dict(root)

    g = make_dependency_graph(root, rreqs=rreqs)

    # Degenerate strongly connected components:
    for scc in list(NX.strongly_connected_components(g)):
        scc = sorted(U.uniq(scc))  # TODO: Is this needed?

        if len(scc) == 1:  # Ignore sccs of which length is 1.
            continue

        _degenerate_nodes(g, scc, "Strongly Connected Components")

    # Degenerate cyclic nodes:
    for cns in NX.simple_cycles(g):
        cns = sorted(U.uniq(cns))  # TODO: Likewise

        # Should not happen as selc cyclic nodes were removed in advance.
        assert len(cns) != 1, "Self cyclic node: " + cns[0]

        _degenerate_nodes(g, cns, "Cyclic nodes")

    assert NX.is_directed_acyclic_graph(g), "I'm still missing something to make the dep. graph to dag..."

    return g
Esempio n. 6
0
def load_packages(pf):
    """
    Load package info list from given file.

    :param pf: Packages list file.
    """
    labels = RU.uniq(load_packages_g(pf))
    LOG.info("Loaded %d RPM labels from %s" % (len(labels), pf))

    return labels
Esempio n. 7
0
def load_packages(pf):
    """
    Load package info list from given file.

    :param pf: Packages list file.
    """
    labels = RU.uniq(load_packages_g(pf))
    LOG.info("Loaded %d RPM labels from %s" % (len(labels), pf))

    return labels
Esempio n. 8
0
def _notice_to_errata(notice):
    """
    Notice metadata examples:

    packages:

     'pkglist': [
        {'name': 'Red Hat Enterprise Linux Server (v. 6 for 64-bit x86_64)',
         'packages': [
            {'arch': 'x86_64',
             'epoch': '0',
             'filename': 'xorg-x11-drv-fbdev-0.4.3-16.el6.x86_64.rpm',
             'name': 'xorg-x11-drv-fbdev',
             'release': '16.el6',
             'src': 'xorg-x11-drv-fbdev-0.4.3-16.el6.src.rpm',
             'sum': ('sha256', '8f3da83bb19c3776053c543002c9...'),
             'version': '0.4.3'},
             ...
        },
        ...
     ]

    cve in notice_metadata["references"]:

    {'href': 'https://www.redhat.com/security/data/cve/CVE-2013-1994.html',
     'id': 'CVE-2013-1994',
     'title': 'CVE-2013-1994',
     'type': 'cve'}
    """
    nmd = notice.get_metadata()

    errata = dict(advisory=nmd["update_id"], synopsis=nmd["title"],
                  description=nmd["description"], update_date=nmd["updated"],
                  issue_date=nmd["issued"], solution=nmd["solution"],
                  type=nmd["type"], severity=nmd.get("severity", "N/A"))

    errata["bzs"] = [normalize_bz(bz) for bz in
                     filter(lambda r: r.get("type") == "bugzilla",
                            nmd.get("references", []))]
    errata["cves"] = [normalize_cve(cve) for cve in
                      filter(lambda r: r.get("type") == "cve",
                             nmd.get("references", []))]

    errata["packages"] = RU.concat(nps["packages"] for nps in
                                   nmd.get("pkglist", []))

    errata["package_names"] = ','.join(RU.uniq(p["name"] for p
                                               in errata["packages"]))
    errata["url"] = rpmkit.updateinfo.utils.errata_url(errata["advisory"])

    return errata
Esempio n. 9
0
File: main.py Progetto: ssato/rpmkit
def errata_complement_g(errata, updates, score=0):
    """
    TODO: What should be complemented?

    :param errata: A list of errata
    :param updates: A list of update packages
    :param score: CVSS score
    """
    unas = set(p2na(u) for u in updates)
    for e in errata:
        e["id"] = errata_to_int(e)  # Sorting key
        e["updates"] = U.uniq(p for p in e.get("packages", []) if p2na(p)
                              in unas)
        e["update_names"] = list(set(u["name"] for u in e["updates"]))

        # TODO: Dirty hack to strip extra white spaces at the top and the end
        # of synopsis of some errata.
        e["synopsis"] = e["synopsis"].strip()

        if score > 0:
            e["cves"] = [fetch_cve_details(cve) for cve in e.get("cves", [])]

        yield e
Esempio n. 10
0
def errata_complement_g(errata, updates, score=0):
    """
    TODO: What should be complemented?

    :param errata: A list of errata
    :param updates: A list of update packages
    :param score: CVSS score
    """
    unas = set(p2na(u) for u in updates)
    for e in errata:
        e["id"] = errata_to_int(e)  # Sorting key
        e["updates"] = U.uniq(p for p in e.get("packages", [])
                              if p2na(p) in unas)
        e["update_names"] = list(set(u["name"] for u in e["updates"]))

        # TODO: Dirty hack to strip extra white spaces at the top and the end
        # of synopsis of some errata.
        e["synopsis"] = e["synopsis"].strip()

        if score > 0:
            e["cves"] = [fetch_cve_details(cve) for cve in e.get("cves", [])]

        yield e
Esempio n. 11
0
def ucat(xss):
    return RU.uniq(RU.concat(xss))
Esempio n. 12
0
def ucat(xss):
    return RU.uniq(RU.concat(xss))
Esempio n. 13
0
File: main.py Progetto: ssato/rpmkit
def analyze(host, score=0, keywords=ERRATA_KEYWORDS, core_rpms=[],
            period=(), refdir=None, nevra_keys=NEVRA_KEYS):
    """
    :param host: host object function :function:`prepare` returns
    :param score: CVSS base metrics score
    :param keywords: Keyword list to filter 'important' RHBAs
    :param core_rpms: Core RPMs to filter errata by them
    :param period: Period of errata in format of YYYY[-MM[-DD]],
        ex. ("2014-10-01", "2014-11-01")
    :param refdir: A dir holding reference data previously generated to
        compute delta (updates since that data)
    """
    base = host.base
    workdir = host.workdir

    timestamp = datetime.datetime.now().strftime("%F %T")
    metadata = bunch.bunchify(dict(id=host.id, root=host.root,
                                   workdir=host.workdir, repos=host.repos,
                                   backend=host.base.name, score=score,
                                   keywords=keywords,
                                   installed=len(host.installed),
                                   hosts=[host.id, ],
                                   generated=timestamp))
    # pylint: disable=maybe-no-member
    LOG.debug(_("%s: Dump metadata for %s"), host.id, host.root)
    # pylint: enable=maybe-no-member
    U.json_dump(metadata.toDict(), os.path.join(workdir, "metadata.json"))

    us = U.uniq(base.list_updates(), key=itemgetter(*nevra_keys))
    es = base.list_errata()
    es = U.uniq(errata_complement_g(es, us, score), key=itemgetter("id"),
                reverse=True)
    LOG.info(_("%s: Found %d Errata, %d Update RPMs"), host.id, len(es),
             len(us))

    LOG.debug(_("%s: Dump Errata and Update RPMs list..."), host.id)
    U.json_dump(dict(data=es, ), errata_list_path(workdir))
    U.json_dump(dict(data=us, ), updates_file_path(workdir))

    host.errata = es
    host.updates = us
    ips = host.installed

    LOG.info(_("%s: Analyze and dump results of errata data in %s"),
             host.id, workdir)
    dump_results(workdir, ips, es, us, score, keywords, core_rpms)

    if period:
        (start_date, end_date) = period_to_dates(*period)
        LOG.info(_("%s: Analyze errata in period: %s ~ %s"),
                 host.id, start_date, end_date)
        pes = [e for e in es if errata_in_period(e, start_date, end_date)]

        pdir = os.path.join(workdir, "%s_%s" % (start_date, end_date))
        if not os.path.exists(pdir):
            LOG.debug(_("%s: Creating period working dir %s"), host.id, pdir)
            os.makedirs(pdir)

        dump_results(pdir, ips, pes, us, score, keywords, core_rpms, False)

    if refdir:
        LOG.debug(_("%s [delta]: Analyze delta errata data by refering %s"),
                  host.id, refdir)
        (es, us) = compute_delta(refdir, es, us)
        LOG.info(_("%s [delta]: Found %d Errata, %d Update RPMs"), host.id,
                 len(es), len(us))

        deltadir = os.path.join(workdir, "delta")
        if not os.path.exists(deltadir):
            LOG.debug(_("%s: Creating delta working dir %s"),
                      host.id, deltadir)
            os.makedirs(deltadir)

        U.json_dump(dict(data=es, ), errata_list_path(deltadir))
        U.json_dump(dict(data=us, ), updates_file_path(deltadir))

        LOG.info(_("%s: Analyze and dump results of delta errata in %s"),
                 host.id, deltadir)
        dump_results(workdir, ips, es, us, score, keywords, core_rpms)
Esempio n. 14
0
def analyze(host,
            score=0,
            keywords=ERRATA_KEYWORDS,
            core_rpms=[],
            period=(),
            refdir=None,
            nevra_keys=NEVRA_KEYS):
    """
    :param host: host object function :function:`prepare` returns
    :param score: CVSS base metrics score
    :param keywords: Keyword list to filter 'important' RHBAs
    :param core_rpms: Core RPMs to filter errata by them
    :param period: Period of errata in format of YYYY[-MM[-DD]],
        ex. ("2014-10-01", "2014-11-01")
    :param refdir: A dir holding reference data previously generated to
        compute delta (updates since that data)
    """
    base = host.base
    workdir = host.workdir

    timestamp = datetime.datetime.now().strftime("%F %T")
    metadata = bunch.bunchify(
        dict(id=host.id,
             root=host.root,
             workdir=host.workdir,
             repos=host.repos,
             backend=host.base.name,
             score=score,
             keywords=keywords,
             installed=len(host.installed),
             hosts=[
                 host.id,
             ],
             generated=timestamp))
    # pylint: disable=maybe-no-member
    LOG.debug(_("%s: Dump metadata for %s"), host.id, host.root)
    # pylint: enable=maybe-no-member
    U.json_dump(metadata.toDict(), os.path.join(workdir, "metadata.json"))

    us = U.uniq(base.list_updates(), key=itemgetter(*nevra_keys))
    es = base.list_errata()
    es = U.uniq(errata_complement_g(es, us, score),
                key=itemgetter("id"),
                reverse=True)
    LOG.info(_("%s: Found %d Errata, %d Update RPMs"), host.id, len(es),
             len(us))

    LOG.debug(_("%s: Dump Errata and Update RPMs list..."), host.id)
    U.json_dump(dict(data=es, ), errata_list_path(workdir))
    U.json_dump(dict(data=us, ), updates_file_path(workdir))

    host.errata = es
    host.updates = us
    ips = host.installed

    LOG.info(_("%s: Analyze and dump results of errata data in %s"), host.id,
             workdir)
    dump_results(workdir, ips, es, us, score, keywords, core_rpms)

    if period:
        (start_date, end_date) = period_to_dates(*period)
        LOG.info(_("%s: Analyze errata in period: %s ~ %s"), host.id,
                 start_date, end_date)
        pes = [e for e in es if errata_in_period(e, start_date, end_date)]

        pdir = os.path.join(workdir, "%s_%s" % (start_date, end_date))
        if not os.path.exists(pdir):
            LOG.debug(_("%s: Creating period working dir %s"), host.id, pdir)
            os.makedirs(pdir)

        dump_results(pdir, ips, pes, us, score, keywords, core_rpms, False)

    if refdir:
        LOG.debug(_("%s [delta]: Analyze delta errata data by refering %s"),
                  host.id, refdir)
        (es, us) = compute_delta(refdir, es, us)
        LOG.info(_("%s [delta]: Found %d Errata, %d Update RPMs"), host.id,
                 len(es), len(us))

        deltadir = os.path.join(workdir, "delta")
        if not os.path.exists(deltadir):
            LOG.debug(_("%s: Creating delta working dir %s"), host.id,
                      deltadir)
            os.makedirs(deltadir)

        U.json_dump(dict(data=es, ), errata_list_path(deltadir))
        U.json_dump(dict(data=us, ), updates_file_path(deltadir))

        LOG.info(_("%s: Analyze and dump results of delta errata in %s"),
                 host.id, deltadir)
        dump_results(workdir, ips, es, us, score, keywords, core_rpms)