Пример #1
0
def process_archives(graph, archives):
    for archive in archives:
        if os.path.isfile(archive):
            with extract(archive) as ex:
                ctx = create_context(ex.tmp_dir)
                broker = dr.Broker()
                broker[ctx.__class__] = ctx
                yield dr.run(broker=broker)
        else:
            ctx = create_context(archive)
            broker = dr.Broker()
            broker[ctx.__class__] = ctx
            yield dr.run(graph, broker=broker)
 def _get_system_facts(archive_path):
     facts = {}
     default_packages = (
         "insights.specs.default",
         "insights.specs.insights_archive",
         "insights.combiners",
         "insights.parsers"
     )
     for pkg in default_packages:
         dr.load_components(pkg)
     broker = dr.Broker()
     try:
         with archives.extract(archive_path) as ex:
             ctx = create_context(ex.tmp_dir, HostArchiveContext)
             broker[ctx.__class__] = ctx
             broker = dr.run(components=[Specs.machine_id, LsEtc],
                             broker=broker)
             if Specs.machine_id in broker:
                 facts["id"] = broker[Specs.machine_id].content[0].strip()
             if LsEtc in broker:
                 facts["satellite_managed"] = any([broker[LsEtc].dir_contains(*satellite_file)
                                                   for satellite_file in SATELLITE_MANAGED_FILES.values()
                                                   if satellite_file[0] in broker[LsEtc]])
     except InvalidContentType:
         LOGGER.error("Unable to parse archive.")
     return facts
Пример #3
0
def process_archives(archives):
    for archive in archives:
        with extract(archive) as ex:
            ctx = create_context(ex.tmp_dir)
            broker = dr.Broker()
            broker[ctx.__class__] = ctx
            yield dr.run(broker=broker)
Пример #4
0
def get_rule_hit_info(archive, rule_name, timeout=None, tmp_dir=None):
    # We have to load everything again for multiprocessing or clustering to
    # work. Even though imports are cached internally after the first call,  we
    # can still optimize a bit with this LOADED hack.
    rule_func = dr.get_component(rule_name)

    if not LOADED[0]:
        load_default_plugins()
        LOADED[0] = True

    # this is also cached behind get_deps after the first call.
    graph, bool_deps = get_deps(rule_func)

    with extract(archive, timeout=timeout, extract_dir=tmp_dir) as arc:
        ctx = create_context(arc.tmp_dir, None)
        broker = dr.Broker()
        broker[ctx.__class__] = ctx

        results = dr.run(graph, broker=broker)

        rule_result = results.get(rule_func)
        rhr = results.get(RedHatRelease)

        result = extract_hits(bool_deps, results)

        result["archive"] = archive
        result["key"] = rule_result.get_key() if rule_result else None
        result["type"] = rule_result.__class__.__name__ if rule_result else None
        result["make_fail"] = True if rule_result and isinstance(rule_result, make_response) else False
        result["major"] = rhr.major if rhr else -1
        result["minor"] = rhr.minor if rhr else -1
        return result
Пример #5
0
    def process(self, broker, path):
        for w in self.watchers:
            w.watch_broker(broker)

        result = None

        try:
            self.fire("pre_extract", broker, path)

            with extract(path,
                         timeout=self.extract_timeout,
                         extract_dir=self.extract_tmp_dir) as extraction:
                ctx = create_context(extraction.tmp_dir)
                broker[ctx.__class__] = ctx

                self.fire("on_extract", ctx, broker, extraction)

                output = StringIO()
                with self.Formatter(broker, stream=output):
                    dr.run(self.target_components, broker=broker)
                output.seek(0)
                result = output.read()
                self.fire("on_engine_success", broker, result)
                return result
        except Exception as ex:
            self.fire("on_engine_failure", broker, ex)
            raise
        finally:
            self.fire("on_engine_complete", broker)
Пример #6
0
def create_evaluator(tmp_dir, system_id):
    from insights.core.hydration import create_context

    broker = dr.Broker()
    ctx = create_context(tmp_dir)
    broker[ctx.__class__] = ctx
    if system_id:
        return InsightsEvaluator(broker=broker, system_id=system_id)
    return SingleEvaluator(broker=broker)
Пример #7
0
    def _evaluate(self, broker, path):
        ctx = create_context(path)
        broker[ctx.__class__] = ctx

        output = StringIO()
        with self._Format(broker, stream=output):
            dr.run(self._target_components, broker=broker)
        output.seek(0)
        return output.read().encode("utf-8")
    def parse(self):
        # pylint: disable=too-many-branches
        """Parse given archive."""
        ARCHIVE_PARSE_COUNT.inc()
        default_packages = ("insights.specs.default",
                            "insights.specs.insights_archive",
                            "insights.combiners", "insights.parsers")
        for pkg in default_packages:
            dr.load_components(pkg)
        broker = dr.Broker()

        with archives.extract(self.archive_path) as ex:
            ctx = create_context(ex.tmp_dir, HostArchiveContext)
            broker[ctx.__class__] = ctx
            broker = dr.run(components=[Installed, DnfModules, YumReposD],
                            broker=broker)

            if Installed in broker:
                pkglist = broker[Installed]
                self._delete_blacklisted_packages(pkglist.packages)
                for pkg_name in pkglist.packages:
                    pkg = pkglist.get_max(pkg_name)
                    self.package_list.append("%s-%s:%s-%s.%s" %
                                             (pkg.name, pkg.epoch, pkg.version,
                                              pkg.release, pkg.arch))
            else:
                RPMDB_PARSE_FAILURE.inc()
                LOGGER.error("Unable to parse package list from archive.")
                return

            if YumReposD in broker:
                repolist = broker[YumReposD]
                for repo_file in repolist:
                    if repo_file.file_name == 'redhat.repo':
                        for repo in repo_file:
                            if repo_file[repo].get(
                                    'enabled',
                                    '1').lower() in ('1', 'true', 'enabled',
                                                     'yes', 'on'):
                                self.repo_list.append(repo)
                        break

            if not self.repo_list:
                REPOLIST_PARSE_FAILURE.inc()
                LOGGER.warning("Unable to parse RHSM repo list from archive.")

            if DnfModules in broker:
                for module in broker[DnfModules]:
                    for module_name in module.sections():
                        self.modules_list.append({
                            'module_name':
                            module_name,
                            'module_stream':
                            module.get(module_name, 'stream')
                        })
Пример #9
0
    def _get_system_profile(archive_path):
        profile = {}
        default_packages = ("insights.specs.default",
                            "insights.specs.insights_archive",
                            "insights.combiners", "insights.parsers")
        for pkg in default_packages:
            dr.load_components(pkg)
        broker = dr.Broker()
        try:
            with archives.extract(archive_path) as ex:
                ctx = create_context(ex.tmp_dir, HostArchiveContext)
                broker[ctx.__class__] = ctx
                broker = dr.run(components=[
                    Specs.machine_id, Installed, DnfModules, YumReposD
                ],
                                broker=broker)
                if Specs.machine_id in broker:
                    profile["id"] = broker[Specs.machine_id].content[0].strip()
                profile["installed_packages"] = []
                if Installed in broker:
                    pkglist = broker[Installed]
                    for pkg_name in pkglist.packages:
                        pkg = pkglist.get_max(pkg_name)
                        profile["installed_packages"].append(pkg.nevra)

                profile["yum_repos"] = []
                if YumReposD in broker:
                    repolist = broker[YumReposD]
                    for repo_file in repolist:
                        if repo_file.file_name == 'redhat.repo':
                            for repo in repo_file:
                                if repo_file[repo].get(
                                        'enabled',
                                        '1').lower() in ('1', 'true',
                                                         'enabled', 'yes',
                                                         'on'):
                                    profile["yum_repos"].append(repo)
                            break

                profile["dnf_modules"] = []
                if DnfModules in broker:
                    for module in broker[DnfModules]:
                        for module_name in module.sections():
                            profile["dnf_modules"].append({
                                'name':
                                module_name,
                                'stream':
                                module.get(module_name, 'stream')
                            })
                LOGGER.info(profile)
        except InvalidContentType:
            LOGGER.error("Unable to parse archive.")
        return profile
Пример #10
0
    def parse(self):
        # pylint: disable=too-many-branches
        """Parse given archive."""
        ARCHIVE_PARSE_COUNT.inc()
        default_packages = ("insights.specs.default",
                            "insights.specs.insights_archive",
                            "insights.combiners", "insights.parsers")
        for pkg in default_packages:
            dr.load_components(pkg)
        broker = dr.Broker()

        with archives.extract(self.archive_path) as ex:
            ctx = create_context(ex.tmp_dir, HostArchiveContext)
            broker[ctx.__class__] = ctx
            broker = dr.run(components=[
                Installed, SubscriptionManagerReposListEnabled, DnfModules
            ],
                            broker=broker)

            if Installed in broker:
                pkglist = broker[Installed]
                self._delete_blacklisted_packages(pkglist.packages)
                for pkg_name in pkglist.packages:
                    pkg = pkglist.get_max(pkg_name)
                    self.package_list.append("%s-%s:%s-%s.%s" %
                                             (pkg.name, pkg.epoch, pkg.version,
                                              pkg.release, pkg.arch))
            else:
                RPMDB_PARSE_FAILURE.inc()
                LOGGER.error("Unable to parse package list from archive.")
                return

            if SubscriptionManagerReposListEnabled in broker:
                repolist = broker[SubscriptionManagerReposListEnabled]
                for repo_record in repolist.records:
                    repo_label = repo_record.get("Repo ID", None)
                    if repo_label:
                        self.repo_list.append(repo_label)

            if not self.repo_list:
                REPOLIST_PARSE_FAILURE.inc()
                LOGGER.warning("Unable to parse RHSM repo list from archive.")

            if DnfModules in broker:
                for module in broker[DnfModules]:
                    for module_name in module.sections():
                        self.modules_list.append({
                            'module_name':
                            module_name,
                            'module_stream':
                            module.get(module_name, 'stream')
                        })
Пример #11
0
 def _get_system_facts(archive_path):
     facts = {}
     default_packages = ("insights.specs.default",
                         "insights.specs.insights_archive",
                         "insights.combiners", "insights.parsers")
     for pkg in default_packages:
         dr.load_components(pkg)
     broker = dr.Broker()
     try:
         with archives.extract(archive_path) as ex:
             ctx = create_context(ex.tmp_dir, HostArchiveContext)
             broker[ctx.__class__] = ctx
             broker = dr.run(components=[Specs.machine_id], broker=broker)
             if Specs.machine_id in broker:
                 facts["id"] = broker[Specs.machine_id].content[0].strip()
     except InvalidContentType:
         LOGGER.error("Unable to parse archive.")
     return facts
Пример #12
0
def create_broker(path):
    ctx = create_context(path)
    broker = dr.Broker()
    broker[ctx.__class__] = ctx
    return broker