def process_archives(graph, archives): for archive in archives: if os.path.isfile(archive): with extract(archive) as ex: ctx = create_context(ex.tmp_dir) broker = dr.Broker() broker[ctx.__class__] = ctx yield dr.run(broker=broker) else: ctx = create_context(archive) broker = dr.Broker() broker[ctx.__class__] = ctx yield dr.run(graph, broker=broker)
def main(): args = parse_args() logging.basicConfig(level=logging.DEBUG if args.verbose else logging.WARN) ctx = HostContext() broker = dr.Broker() broker[HostContext] = ctx out_path = args.output dr.load_components("insights.specs.default") dr.load_components("insights.specs.insights_archive") dr.load_components("insights.specs.sos_archive") dr.load_components("insights.parsers") dr.load_components("insights.combiners") for path in args.plugins: dr.load_components(path) graphs = dr.get_subgraphs(dr.COMPONENTS[dr.GROUPS.single]) worker_args = [(broker, g, out_path) for g in graphs] if args.parallel: run_parallel(worker_args) else: run_serial(worker_args)
def test_spec_factory(): hn = HostContext() broker = dr.Broker() broker[HostContext] = hn broker = dr.run(dr.get_dependency_graph(dostuff), broker) assert dostuff in broker, broker.tracebacks assert broker[Stuff.smpl_file].content == file_content
def _get_system_facts(archive_path): facts = {} default_packages = ( "insights.specs.default", "insights.specs.insights_archive", "insights.combiners", "insights.parsers" ) for pkg in default_packages: dr.load_components(pkg) broker = dr.Broker() try: with archives.extract(archive_path) as ex: ctx = create_context(ex.tmp_dir, HostArchiveContext) broker[ctx.__class__] = ctx broker = dr.run(components=[Specs.machine_id, LsEtc], broker=broker) if Specs.machine_id in broker: facts["id"] = broker[Specs.machine_id].content[0].strip() if LsEtc in broker: facts["satellite_managed"] = any([broker[LsEtc].dir_contains(*satellite_file) for satellite_file in SATELLITE_MANAGED_FILES.values() if satellite_file[0] in broker[LsEtc]]) except InvalidContentType: LOGGER.error("Unable to parse archive.") return facts
def process_archives(archives): for archive in archives: with extract(archive) as ex: ctx = create_context(ex.tmp_dir) broker = dr.Broker() broker[ctx.__class__] = ctx yield dr.run(broker=broker)
def hydrate_new_dir(path, broker=None): broker = broker or dr.Broker() for root, dirs, names in os.walk(path): for name in names: p = os.path.join(root, name) with open(p) as f: serde.hydrate(serde.ser.load(f), broker) return SingleEvaluator(broker=broker)
def process_facts(facts, meta, use_pandas=False): if use_pandas: import pandas as pd broker = dr.Broker() broker[ClusterMeta] = meta for k, v in facts.items(): broker[k] = pd.DataFrame(v) if use_pandas else v return dr.run(dr.COMPONENTS[dr.GROUPS.cluster], broker=broker)
def create_evaluator(tmp_dir, system_id): from insights.core.hydration import create_context broker = dr.Broker() ctx = create_context(tmp_dir) broker[ctx.__class__] = ctx if system_id: return InsightsEvaluator(broker=broker, system_id=system_id) return SingleEvaluator(broker=broker)
def run_input_data(component, input_data): broker = dr.Broker() for k, v in input_data.data.items(): broker[k] = v graph = dr.get_dependency_graph(component) broker = dr.run(graph, broker=broker) for v in broker.tracebacks.values(): print(v) return broker
def run_graph(seed_broker, g, output_dir): to_save = [ plugins.datasource, plugins.parser, plugins.combiner, plugins.rule ] broker = dr.Broker(seed_broker) for _type in to_save: path = os.path.join(output_dir, dr.get_simple_name(_type)) fs.ensure_path(path) broker.add_observer(persister(path), _type) dr.run(g, broker)
def parse(self): # pylint: disable=too-many-branches """Parse given archive.""" ARCHIVE_PARSE_COUNT.inc() default_packages = ("insights.specs.default", "insights.specs.insights_archive", "insights.combiners", "insights.parsers") for pkg in default_packages: dr.load_components(pkg) broker = dr.Broker() with archives.extract(self.archive_path) as ex: ctx = create_context(ex.tmp_dir, HostArchiveContext) broker[ctx.__class__] = ctx broker = dr.run(components=[Installed, DnfModules, YumReposD], broker=broker) if Installed in broker: pkglist = broker[Installed] self._delete_blacklisted_packages(pkglist.packages) for pkg_name in pkglist.packages: pkg = pkglist.get_max(pkg_name) self.package_list.append("%s-%s:%s-%s.%s" % (pkg.name, pkg.epoch, pkg.version, pkg.release, pkg.arch)) else: RPMDB_PARSE_FAILURE.inc() LOGGER.error("Unable to parse package list from archive.") return if YumReposD in broker: repolist = broker[YumReposD] for repo_file in repolist: if repo_file.file_name == 'redhat.repo': for repo in repo_file: if repo_file[repo].get( 'enabled', '1').lower() in ('1', 'true', 'enabled', 'yes', 'on'): self.repo_list.append(repo) break if not self.repo_list: REPOLIST_PARSE_FAILURE.inc() LOGGER.warning("Unable to parse RHSM repo list from archive.") if DnfModules in broker: for module in broker[DnfModules]: for module_name in module.sections(): self.modules_list.append({ 'module_name': module_name, 'module_stream': module.get(module_name, 'stream') })
def initialize_broker(path, context=None, broker=None): ctx = create_context(path, context=context) broker = broker or dr.Broker() if isinstance(ctx, ClusterArchiveContext): return ctx, broker broker[ctx.__class__] = ctx if isinstance(ctx, SerializedArchiveContext): h = Hydration(ctx.root) broker = h.hydrate(broker=broker) return ctx, broker
def _get_system_profile(archive_path): profile = {} default_packages = ("insights.specs.default", "insights.specs.insights_archive", "insights.combiners", "insights.parsers") for pkg in default_packages: dr.load_components(pkg) broker = dr.Broker() try: with archives.extract(archive_path) as ex: ctx = create_context(ex.tmp_dir, HostArchiveContext) broker[ctx.__class__] = ctx broker = dr.run(components=[ Specs.machine_id, Installed, DnfModules, YumReposD ], broker=broker) if Specs.machine_id in broker: profile["id"] = broker[Specs.machine_id].content[0].strip() profile["installed_packages"] = [] if Installed in broker: pkglist = broker[Installed] for pkg_name in pkglist.packages: pkg = pkglist.get_max(pkg_name) profile["installed_packages"].append(pkg.nevra) profile["yum_repos"] = [] if YumReposD in broker: repolist = broker[YumReposD] for repo_file in repolist: if repo_file.file_name == 'redhat.repo': for repo in repo_file: if repo_file[repo].get( 'enabled', '1').lower() in ('1', 'true', 'enabled', 'yes', 'on'): profile["yum_repos"].append(repo) break profile["dnf_modules"] = [] if DnfModules in broker: for module in broker[DnfModules]: for module_name in module.sections(): profile["dnf_modules"].append({ 'name': module_name, 'stream': module.get(module_name, 'stream') }) LOGGER.info(profile) except InvalidContentType: LOGGER.error("Unable to parse archive.") return profile
def test_run(): broker = dr.Broker() broker["common"] = 3 graph = dr.get_dependency_graph(stage3) graph.update(dr.get_dependency_graph(stage4)) broker = dr.run(graph, broker) assert stage3 in broker.instances assert broker[stage3] == 3 assert stage4 in broker.instances assert broker[stage4] == 3
def parse(self): # pylint: disable=too-many-branches """Parse given archive.""" ARCHIVE_PARSE_COUNT.inc() default_packages = ("insights.specs.default", "insights.specs.insights_archive", "insights.combiners", "insights.parsers") for pkg in default_packages: dr.load_components(pkg) broker = dr.Broker() with archives.extract(self.archive_path) as ex: ctx = create_context(ex.tmp_dir, HostArchiveContext) broker[ctx.__class__] = ctx broker = dr.run(components=[ Installed, SubscriptionManagerReposListEnabled, DnfModules ], broker=broker) if Installed in broker: pkglist = broker[Installed] self._delete_blacklisted_packages(pkglist.packages) for pkg_name in pkglist.packages: pkg = pkglist.get_max(pkg_name) self.package_list.append("%s-%s:%s-%s.%s" % (pkg.name, pkg.epoch, pkg.version, pkg.release, pkg.arch)) else: RPMDB_PARSE_FAILURE.inc() LOGGER.error("Unable to parse package list from archive.") return if SubscriptionManagerReposListEnabled in broker: repolist = broker[SubscriptionManagerReposListEnabled] for repo_record in repolist.records: repo_label = repo_record.get("Repo ID", None) if repo_label: self.repo_list.append(repo_label) if not self.repo_list: REPOLIST_PARSE_FAILURE.inc() LOGGER.warning("Unable to parse RHSM repo list from archive.") if DnfModules in broker: for module in broker[DnfModules]: for module_name in module.sections(): self.modules_list.append({ 'module_name': module_name, 'module_stream': module.get(module_name, 'stream') })
def test_run_incremental(): broker = dr.Broker() broker["dep1"] = 1 broker["dep2"] = 2 broker["common"] = 3 graph = dr.get_dependency_graph(stage1) graph.update(dr.get_dependency_graph(stage2)) graph.update(dr.get_dependency_graph(stage3)) graph.update(dr.get_dependency_graph(stage4)) brokers = list(dr.run_incremental(graph, broker)) assert len(brokers) == 3
def hydrate(payload, broker=None): broker = broker or dr.Broker() name = payload["name"] key = dr.get_component(name) or name results = unmarshal(payload["results"]) if results: broker[key] = results errors = unmarshal(payload["errors"]) if errors: broker.exceptions[key] = errors return broker
def main(): dr.load_components("insights.parsers") dr.load_components("insights.combiners") broker = dr.Broker() if len(sys.argv) > 1: evaluator = hydrate_old_archive(path=sys.argv[1], tmp_dir="/tmp") else: evaluator = hydrate_new_dir("output") evaluator.process() broker = evaluator.broker pprint(broker.instances) pprint(dict(broker.exceptions))
def hydrate(self, broker=None): """ Loads a Broker from a previously saved one. A Broker is created if one isn't provided. """ broker = broker or dr.Broker() for path in glob(os.path.join(self.meta_data, "*")): try: with open(path) as f: doc = ser.load(f) res = self._hydrate_one(doc) comp, results, exec_time, ser_time = res if results: broker[comp] = results broker.exec_times[comp] = exec_time + ser_time except Exception as ex: log.warning(ex) return broker
def _get_system_facts(archive_path): facts = {} default_packages = ("insights.specs.default", "insights.specs.insights_archive", "insights.combiners", "insights.parsers") for pkg in default_packages: dr.load_components(pkg) broker = dr.Broker() try: with archives.extract(archive_path) as ex: ctx = create_context(ex.tmp_dir, HostArchiveContext) broker[ctx.__class__] = ctx broker = dr.run(components=[Specs.machine_id], broker=broker) if Specs.machine_id in broker: facts["id"] = broker[Specs.machine_id].content[0].strip() except InvalidContentType: LOGGER.error("Unable to parse archive.") return facts
def create_broker(path): ctx = create_context(path) broker = dr.Broker() broker[ctx.__class__] = ctx return broker