def process(self, broker, path):
        for w in self.watchers:
            w.watch_broker(broker)

        result = None

        try:
            self.fire("pre_extract", broker, path)

            with extract(path, timeout=self.timeout,
                         extract_dir=self.tmp_dir) as extraction:
                ctx = create_context(extraction.tmp_dir)
                broker[ctx.__class__] = ctx

                self.fire("on_extract", ctx, broker, extraction)

                output = StringIO()
                with self.Format(broker, stream=output):
                    dr.run(self.comps, broker=broker)
                output.seek(0)
                result = output.read()
                self.fire("on_engine_success", broker, result)
                return result
        except Exception as ex:
            self.fire("on_engine_failure", broker, ex)
            raise
        finally:
            self.fire("on_engine_complete", broker)
Esempio n. 2
0
    def process(self, broker, path):
        for w in self.watchers:
            w.watch_broker(broker)

        result = None

        try:
            self.fire("pre_extract", broker, path)

            with extract(path,
                         timeout=self.extract_timeout,
                         extract_dir=self.extract_tmp_dir) as extraction:
                ctx, broker = initialize_broker(extraction.tmp_dir,
                                                broker=broker)

                self.fire("on_extract", ctx, broker, extraction)

                output = StringIO()
                with self.Formatter(broker, stream=output):
                    dr.run(self.target_components, broker=broker)
                output.seek(0)
                result = output.read()
                self.fire("on_engine_success", broker, result)
                return result
        except Exception as ex:
            self.fire("on_engine_failure", broker, ex)
            raise
        finally:
            self.fire("on_engine_complete", broker)
Esempio n. 3
0
def run_graph(seed_broker, g, output_dir):
    to_save = [
        plugins.datasource, plugins.parser, plugins.combiner, plugins.rule
    ]
    broker = dr.Broker(seed_broker)
    for _type in to_save:
        path = os.path.join(output_dir, dr.get_simple_name(_type))
        fs.ensure_path(path)
        broker.add_observer(persister(path), _type)
    dr.run(g, broker)
Esempio n. 4
0
def process_archives(graph, archives):
    for archive in archives:
        if os.path.isfile(archive):
            with extract(archive) as ex:
                ctx = create_context(ex.tmp_dir)
                broker = dr.Broker()
                broker[ctx.__class__] = ctx
                yield dr.run(broker=broker)
        else:
            ctx = create_context(archive)
            broker = dr.Broker()
            broker[ctx.__class__] = ctx
            yield dr.run(graph, broker=broker)
Esempio n. 5
0
def process_archives(archives):
    for archive in archives:
        with extract(archive) as ex:
            ctx = create_context(ex.tmp_dir)
            broker = dr.Broker()
            broker[ctx.__class__] = ctx
            yield dr.run(broker=broker)
Esempio n. 6
0
    def process_dir(self, path):
        broker = create_broker(path)
        broker = dr.run(broker=broker)

        archive_meta = meta(hostname=get_hostname(broker),
                            uname=get_uname(broker),
                            release=get_release(broker),
                            version=get_version(broker),
                            **self.kwargs)

        datasources = get_datasources(broker)
        for d in datasources:
            name = dr.get_simple_name(d)
            large = is_large(name)
            reader = line_reader if large else file_reader

            providers = broker[d]
            if not isinstance(providers, list):
                providers = [providers]

            for p in providers:
                file_meta = meta(path=p.path, target=name)
                transform = compose(archive_meta, file_meta, to_dict)
                if large:
                    transform = compose(line_counter(), transform)
                stream_transform = liftI(transform)
                yield (name, p.path, compose(stream_transform, reader))
Esempio n. 7
0
def test_spec_factory():
    hn = HostContext()
    broker = dr.Broker()
    broker[HostContext] = hn
    broker = dr.run(dr.get_dependency_graph(dostuff), broker)
    assert dostuff in broker, broker.tracebacks
    assert broker[Stuff.smpl_file].content == file_content
 def _get_system_facts(archive_path):
     facts = {}
     default_packages = (
         "insights.specs.default",
         "insights.specs.insights_archive",
         "insights.combiners",
         "insights.parsers"
     )
     for pkg in default_packages:
         dr.load_components(pkg)
     broker = dr.Broker()
     try:
         with archives.extract(archive_path) as ex:
             ctx = create_context(ex.tmp_dir, HostArchiveContext)
             broker[ctx.__class__] = ctx
             broker = dr.run(components=[Specs.machine_id, LsEtc],
                             broker=broker)
             if Specs.machine_id in broker:
                 facts["id"] = broker[Specs.machine_id].content[0].strip()
             if LsEtc in broker:
                 facts["satellite_managed"] = any([broker[LsEtc].dir_contains(*satellite_file)
                                                   for satellite_file in SATELLITE_MANAGED_FILES.values()
                                                   if satellite_file[0] in broker[LsEtc]])
     except InvalidContentType:
         LOGGER.error("Unable to parse archive.")
     return facts
Esempio n. 9
0
def process_facts(facts, meta, broker, use_pandas=False):
    if use_pandas:
        import pandas as pd

    broker[ClusterMeta] = meta
    for k, v in facts.items():
        broker[k] = pd.DataFrame(v) if use_pandas else v
    return dr.run(dr.COMPONENTS[dr.GROUPS.cluster], broker=broker)
Esempio n. 10
0
def run_input_data(component, input_data):
    broker = dr.Broker()
    for k, v in input_data.data.items():
        broker[k] = v

    graph = dr.get_dependency_graph(component)
    broker = dr.run(graph, broker=broker)
    for v in broker.tracebacks.values():
        print(v)
    return broker
    def parse(self):
        # pylint: disable=too-many-branches
        """Parse given archive."""
        ARCHIVE_PARSE_COUNT.inc()
        default_packages = ("insights.specs.default",
                            "insights.specs.insights_archive",
                            "insights.combiners", "insights.parsers")
        for pkg in default_packages:
            dr.load_components(pkg)
        broker = dr.Broker()

        with archives.extract(self.archive_path) as ex:
            ctx = create_context(ex.tmp_dir, HostArchiveContext)
            broker[ctx.__class__] = ctx
            broker = dr.run(components=[Installed, DnfModules, YumReposD],
                            broker=broker)

            if Installed in broker:
                pkglist = broker[Installed]
                self._delete_blacklisted_packages(pkglist.packages)
                for pkg_name in pkglist.packages:
                    pkg = pkglist.get_max(pkg_name)
                    self.package_list.append("%s-%s:%s-%s.%s" %
                                             (pkg.name, pkg.epoch, pkg.version,
                                              pkg.release, pkg.arch))
            else:
                RPMDB_PARSE_FAILURE.inc()
                LOGGER.error("Unable to parse package list from archive.")
                return

            if YumReposD in broker:
                repolist = broker[YumReposD]
                for repo_file in repolist:
                    if repo_file.file_name == 'redhat.repo':
                        for repo in repo_file:
                            if repo_file[repo].get(
                                    'enabled',
                                    '1').lower() in ('1', 'true', 'enabled',
                                                     'yes', 'on'):
                                self.repo_list.append(repo)
                        break

            if not self.repo_list:
                REPOLIST_PARSE_FAILURE.inc()
                LOGGER.warning("Unable to parse RHSM repo list from archive.")

            if DnfModules in broker:
                for module in broker[DnfModules]:
                    for module_name in module.sections():
                        self.modules_list.append({
                            'module_name':
                            module_name,
                            'module_stream':
                            module.get(module_name, 'stream')
                        })
Esempio n. 12
0
    def _get_system_profile(archive_path):
        profile = {}
        default_packages = ("insights.specs.default",
                            "insights.specs.insights_archive",
                            "insights.combiners", "insights.parsers")
        for pkg in default_packages:
            dr.load_components(pkg)
        broker = dr.Broker()
        try:
            with archives.extract(archive_path) as ex:
                ctx = create_context(ex.tmp_dir, HostArchiveContext)
                broker[ctx.__class__] = ctx
                broker = dr.run(components=[
                    Specs.machine_id, Installed, DnfModules, YumReposD
                ],
                                broker=broker)
                if Specs.machine_id in broker:
                    profile["id"] = broker[Specs.machine_id].content[0].strip()
                profile["installed_packages"] = []
                if Installed in broker:
                    pkglist = broker[Installed]
                    for pkg_name in pkglist.packages:
                        pkg = pkglist.get_max(pkg_name)
                        profile["installed_packages"].append(pkg.nevra)

                profile["yum_repos"] = []
                if YumReposD in broker:
                    repolist = broker[YumReposD]
                    for repo_file in repolist:
                        if repo_file.file_name == 'redhat.repo':
                            for repo in repo_file:
                                if repo_file[repo].get(
                                        'enabled',
                                        '1').lower() in ('1', 'true',
                                                         'enabled', 'yes',
                                                         'on'):
                                    profile["yum_repos"].append(repo)
                            break

                profile["dnf_modules"] = []
                if DnfModules in broker:
                    for module in broker[DnfModules]:
                        for module_name in module.sections():
                            profile["dnf_modules"].append({
                                'name':
                                module_name,
                                'stream':
                                module.get(module_name, 'stream')
                            })
                LOGGER.info(profile)
        except InvalidContentType:
            LOGGER.error("Unable to parse archive.")
        return profile
Esempio n. 13
0
def test_run():
    broker = dr.Broker()
    broker["common"] = 3
    graph = dr.get_dependency_graph(stage3)
    graph.update(dr.get_dependency_graph(stage4))
    broker = dr.run(graph, broker)

    assert stage3 in broker.instances
    assert broker[stage3] == 3

    assert stage4 in broker.instances
    assert broker[stage4] == 3
Esempio n. 14
0
    def parse(self):
        # pylint: disable=too-many-branches
        """Parse given archive."""
        ARCHIVE_PARSE_COUNT.inc()
        default_packages = ("insights.specs.default",
                            "insights.specs.insights_archive",
                            "insights.combiners", "insights.parsers")
        for pkg in default_packages:
            dr.load_components(pkg)
        broker = dr.Broker()

        with archives.extract(self.archive_path) as ex:
            ctx = create_context(ex.tmp_dir, HostArchiveContext)
            broker[ctx.__class__] = ctx
            broker = dr.run(components=[
                Installed, SubscriptionManagerReposListEnabled, DnfModules
            ],
                            broker=broker)

            if Installed in broker:
                pkglist = broker[Installed]
                self._delete_blacklisted_packages(pkglist.packages)
                for pkg_name in pkglist.packages:
                    pkg = pkglist.get_max(pkg_name)
                    self.package_list.append("%s-%s:%s-%s.%s" %
                                             (pkg.name, pkg.epoch, pkg.version,
                                              pkg.release, pkg.arch))
            else:
                RPMDB_PARSE_FAILURE.inc()
                LOGGER.error("Unable to parse package list from archive.")
                return

            if SubscriptionManagerReposListEnabled in broker:
                repolist = broker[SubscriptionManagerReposListEnabled]
                for repo_record in repolist.records:
                    repo_label = repo_record.get("Repo ID", None)
                    if repo_label:
                        self.repo_list.append(repo_label)

            if not self.repo_list:
                REPOLIST_PARSE_FAILURE.inc()
                LOGGER.warning("Unable to parse RHSM repo list from archive.")

            if DnfModules in broker:
                for module in broker[DnfModules]:
                    for module_name in module.sections():
                        self.modules_list.append({
                            'module_name':
                            module_name,
                            'module_stream':
                            module.get(module_name, 'stream')
                        })
 def _get_system_facts(archive_path):
     facts = {}
     default_packages = ("insights.specs.default",
                         "insights.specs.insights_archive",
                         "insights.combiners", "insights.parsers")
     for pkg in default_packages:
         dr.load_components(pkg)
     broker = dr.Broker()
     try:
         with archives.extract(archive_path) as ex:
             ctx = create_context(ex.tmp_dir, HostArchiveContext)
             broker[ctx.__class__] = ctx
             broker = dr.run(components=[Specs.machine_id], broker=broker)
             if Specs.machine_id in broker:
                 facts["id"] = broker[Specs.machine_id].content[0].strip()
     except InvalidContentType:
         LOGGER.error("Unable to parse archive.")
     return facts
Esempio n. 16
0
 def run_components(self):
     dr.run(dr.COMPONENTS[dr.GROUPS.single], broker=self.broker)
Esempio n. 17
0
def process_facts(facts, meta, broker, cluster_graph):
    broker[ClusterMeta] = meta
    for k, v in facts.items():
        broker[k] = pd.DataFrame(v)
    return dr.run(cluster_graph, broker=broker)
Esempio n. 18
0
 def run_components(self, graph=None):
     dr.run(graph or dr.COMPONENTS[dr.GROUPS.single], broker=self.broker)
Esempio n. 19
0
def process_facts(facts, meta, broker):
    broker[ClusterMeta] = meta
    for k, v in facts.items():
        broker[k] = pd.DataFrame(v)
    return dr.run(dr.COMPONENTS[dr.GROUPS.cluster], broker=broker)