def load_plugins(): """ Load the plugins we care about """ dr.load_components("insights.specs.default") dr.load_components("insights.parsers") dr.load_components("insights.combiners") dr.load_components("telemetry.rules.plugins") dr.load_components("prodsec")
def _get_system_facts(archive_path): facts = {} default_packages = ( "insights.specs.default", "insights.specs.insights_archive", "insights.combiners", "insights.parsers" ) for pkg in default_packages: dr.load_components(pkg) broker = dr.Broker() try: with archives.extract(archive_path) as ex: ctx = create_context(ex.tmp_dir, HostArchiveContext) broker[ctx.__class__] = ctx broker = dr.run(components=[Specs.machine_id, LsEtc], broker=broker) if Specs.machine_id in broker: facts["id"] = broker[Specs.machine_id].content[0].strip() if LsEtc in broker: facts["satellite_managed"] = any([broker[LsEtc].dir_contains(*satellite_file) for satellite_file in SATELLITE_MANAGED_FILES.values() if satellite_file[0] in broker[LsEtc]]) except InvalidContentType: LOGGER.error("Unable to parse archive.") return facts
def generate_tests(metafunc, test_func, package_names, pattern=None): """ This function hooks in to pytest's test collection framework and provides a test for every (input_data, expected) tuple that is generated from all @archive_provider-decorated functions. """ if metafunc.function is test_func: if type(package_names) not in (list, tuple): package_names = [package_names] for package_name in package_names: load_components(package_name, include=pattern or ".*", exclude=None) args = [] ids = [] slow_mode = metafunc.config.getoption("--runslow") fast_mode = metafunc.config.getoption("--smokey") for f in tests.ARCHIVE_GENERATORS: ts = f(stride=1 if slow_mode else f.stride) if fast_mode: ts = islice(ts, 0, 1) for t in ts: args.append(t) input_data_name = t[2].name if not isinstance( t[2], list) else "multi-node" ids.append("#".join([get_name(f), input_data_name])) metafunc.parametrize("component,compare_func,input_data,expected", args, ids=ids)
def get_archives(module_name, system_filter): load_components(module_name) m = sys.modules[module_name] for sub_m in m.__all__: demo_submodule = sys.modules[".".join([module_name, sub_m])] if hasattr(demo_submodule, "demo"): for a in demo_submodule.demo: if system_filter in a.name: yield a
def parse(self): # pylint: disable=too-many-branches """Parse given archive.""" ARCHIVE_PARSE_COUNT.inc() default_packages = ("insights.specs.default", "insights.specs.insights_archive", "insights.combiners", "insights.parsers") for pkg in default_packages: dr.load_components(pkg) broker = dr.Broker() with archives.extract(self.archive_path) as ex: ctx = create_context(ex.tmp_dir, HostArchiveContext) broker[ctx.__class__] = ctx broker = dr.run(components=[Installed, DnfModules, YumReposD], broker=broker) if Installed in broker: pkglist = broker[Installed] self._delete_blacklisted_packages(pkglist.packages) for pkg_name in pkglist.packages: pkg = pkglist.get_max(pkg_name) self.package_list.append("%s-%s:%s-%s.%s" % (pkg.name, pkg.epoch, pkg.version, pkg.release, pkg.arch)) else: RPMDB_PARSE_FAILURE.inc() LOGGER.error("Unable to parse package list from archive.") return if YumReposD in broker: repolist = broker[YumReposD] for repo_file in repolist: if repo_file.file_name == 'redhat.repo': for repo in repo_file: if repo_file[repo].get( 'enabled', '1').lower() in ('1', 'true', 'enabled', 'yes', 'on'): self.repo_list.append(repo) break if not self.repo_list: REPOLIST_PARSE_FAILURE.inc() LOGGER.warning("Unable to parse RHSM repo list from archive.") if DnfModules in broker: for module in broker[DnfModules]: for module_name in module.sections(): self.modules_list.append({ 'module_name': module_name, 'module_stream': module.get(module_name, 'stream') })
def _get_system_profile(archive_path): profile = {} default_packages = ("insights.specs.default", "insights.specs.insights_archive", "insights.combiners", "insights.parsers") for pkg in default_packages: dr.load_components(pkg) broker = dr.Broker() try: with archives.extract(archive_path) as ex: ctx = create_context(ex.tmp_dir, HostArchiveContext) broker[ctx.__class__] = ctx broker = dr.run(components=[ Specs.machine_id, Installed, DnfModules, YumReposD ], broker=broker) if Specs.machine_id in broker: profile["id"] = broker[Specs.machine_id].content[0].strip() profile["installed_packages"] = [] if Installed in broker: pkglist = broker[Installed] for pkg_name in pkglist.packages: pkg = pkglist.get_max(pkg_name) profile["installed_packages"].append(pkg.nevra) profile["yum_repos"] = [] if YumReposD in broker: repolist = broker[YumReposD] for repo_file in repolist: if repo_file.file_name == 'redhat.repo': for repo in repo_file: if repo_file[repo].get( 'enabled', '1').lower() in ('1', 'true', 'enabled', 'yes', 'on'): profile["yum_repos"].append(repo) break profile["dnf_modules"] = [] if DnfModules in broker: for module in broker[DnfModules]: for module_name in module.sections(): profile["dnf_modules"].append({ 'name': module_name, 'stream': module.get(module_name, 'stream') }) LOGGER.info(profile) except InvalidContentType: LOGGER.error("Unable to parse archive.") return profile
def parse(self): # pylint: disable=too-many-branches """Parse given archive.""" ARCHIVE_PARSE_COUNT.inc() default_packages = ("insights.specs.default", "insights.specs.insights_archive", "insights.combiners", "insights.parsers") for pkg in default_packages: dr.load_components(pkg) broker = dr.Broker() with archives.extract(self.archive_path) as ex: ctx = create_context(ex.tmp_dir, HostArchiveContext) broker[ctx.__class__] = ctx broker = dr.run(components=[ Installed, SubscriptionManagerReposListEnabled, DnfModules ], broker=broker) if Installed in broker: pkglist = broker[Installed] self._delete_blacklisted_packages(pkglist.packages) for pkg_name in pkglist.packages: pkg = pkglist.get_max(pkg_name) self.package_list.append("%s-%s:%s-%s.%s" % (pkg.name, pkg.epoch, pkg.version, pkg.release, pkg.arch)) else: RPMDB_PARSE_FAILURE.inc() LOGGER.error("Unable to parse package list from archive.") return if SubscriptionManagerReposListEnabled in broker: repolist = broker[SubscriptionManagerReposListEnabled] for repo_record in repolist.records: repo_label = repo_record.get("Repo ID", None) if repo_label: self.repo_list.append(repo_label) if not self.repo_list: REPOLIST_PARSE_FAILURE.inc() LOGGER.warning("Unable to parse RHSM repo list from archive.") if DnfModules in broker: for module in broker[DnfModules]: for module_name in module.sections(): self.modules_list.append({ 'module_name': module_name, 'module_stream': module.get(module_name, 'stream') })
def load_plugins(alibs): """ Load the plugins we care about and any that have been defined on the command line """ # Load core components try: dr.load_components("insights.specs.default", continue_on_error=False) dr.load_components("insights.parsers", continue_on_error=False) dr.load_components("insights.combiners", continue_on_error=False) except ImportError: print( "**** Error encountered loading core components, please confirm that you have " ) print("properly installed core into you virtual environment ****") return False # Load optional components if desired try: for al in alibs: dr.load_components(al, continue_on_error=False) except ImportError: print( "**** Error encountered loading plugins, please confirm that you have **** " ) print( "**** properly defined the components you are using on the command line ****" ) return False return True
def main(): dr.load_components("insights.parsers") dr.load_components("insights.combiners") broker = dr.Broker() if len(sys.argv) > 1: evaluator = hydrate_old_archive(path=sys.argv[1], tmp_dir="/tmp") else: evaluator = hydrate_new_dir("output") evaluator.process() broker = evaluator.broker pprint(broker.instances) pprint(dict(broker.exceptions))
def main(filename): dr.load_components("insights.specs.default") dr.load_components("insights.specs.insights_archive") dr.load_components("insights.specs.sos_archive") dr.load_components("insights.parsers") dr.load_components("insights.combiners") parsers = sorted([c for c in dr.DELEGATES if is_parser(c)], lambda x, y: cmp(dr.get_name(x), dr.get_name(y))) combiners = sorted([c for c in dr.DELEGATES if is_combiner(c)], lambda x, y: cmp(dr.get_name(x), dr.get_name(y))) specs = sorted([ c for c in dr.DELEGATES if is_datasource(c) and dr.get_module_name(c) == 'insights.specs' ], lambda x, y: cmp(dr.get_name(x), dr.get_name(y))) with open(filename, "w") as fh: fh.write("Components Cross-Reference\n") fh.write("==========================\n") fh.write("Specs Dependents\n") fh.write("----------------\n") for spec in specs: info = dict(name=dr.get_name(spec)) info['dependents'] = [] for d in dr.get_dependents(spec): info['dependents'].append({ 'name': dr.get_name(d), 'dependents': [dr.get_name(sd) for sd in dr.get_dependents(d)] }) print_spec_info(info, fh) blank_line(fh) fh.write("Parser Dependents/Dependencies\n") fh.write("------------------------------\n") for pars in parsers: print_info(component_info(pars), fh) blank_line(fh) fh.write("Combiner Dependents/Dependencies\n") fh.write("--------------------------------\n") for comb in combiners: print_info(component_info(comb), fh)
def _get_system_facts(archive_path): facts = {} default_packages = ("insights.specs.default", "insights.specs.insights_archive", "insights.combiners", "insights.parsers") for pkg in default_packages: dr.load_components(pkg) broker = dr.Broker() try: with archives.extract(archive_path) as ex: ctx = create_context(ex.tmp_dir, HostArchiveContext) broker[ctx.__class__] = ctx broker = dr.run(components=[Specs.machine_id], broker=broker) if Specs.machine_id in broker: facts["id"] = broker[Specs.machine_id].content[0].strip() except InvalidContentType: LOGGER.error("Unable to parse archive.") return facts
def init(): util.initialize_logging() for module in config["plugin_packages"]: dr.load_components(module)
def main(): # config = get_config() dr.load_components("insights.specs.default") dr.load_components("insights.parsers") dr.load_components("insights.combiners") dr.load_components("telemetry.rules.plugins") dr.load_components("prodsec") ds = dr.COMPONENTS_BY_TYPE[datasource] specs = [] for c in ds: if not is_datasource(c): continue if not any(is_datasource(d) for d in dr.get_dependents(c)): specs.append(c) deps = defaultdict(dict) pspec = '' for spec in sorted(specs, key=dr.get_name): info = dict(name=dr.get_simple_name(spec)) f = filters.get_filters(spec) info['dependents'] = [] spds = None d = [d for d in dr.get_dependencies(spec) if is_datasource(d)] for dp in d: c = dr.get_dependencies(dp) for cdeps in c: if is_datasource(cdeps) and '__qualname__' in cdeps.func_dict and 'DefaultSpecs' in cdeps.func_dict['__qualname__']: spds = cdeps for d in dr.get_dependencies(spec): cp = '' lines = [] if d.__doc__ and "Returns the first" in d.__doc__: lines = d.__doc__.replace(',', '\n') lines = lines.splitlines() head = [lines[0]] top = ["<ul>"] bottom = ["</ul>"] if spds: lines = [l.replace('Command:', '') for l in lines] lines = [l.replace('Path:', '') for l in lines] lines = ["<li>" + l + "</li>" for l in lines[1:]] # lines = ["<li>" + spds.func_doc + ',' + l + "</li>" for l in lines[1:]] else: lines = ["<li>" + l + "</li>" for l in lines[1:]] cp = "\n".join(head + top + lines + bottom) else: if spds: d.__doc__ = d.__doc__.replace('Command:', '') d.__doc__ = d.__doc__.replace('Path:', '') d.__doc__ = spds.func_doc + ', ' + d.__doc__ cp = d.__doc__ for d in dr.get_dependents(spec): if dr.get_simple_name(pspec) == dr.get_simple_name(d): continue pspec = d p = [dr.get_name(sd) for sd in dr.get_dependents(d)] rules = sorted([x.rsplit('.', 2)[1] for x in p]) deps[info['name']][info['name'] + "_spec-def"] = cp deps[info['name']][info['name'] + "_rules"] = ", ".join(rules) deps[info['name']][info['name'] + "_filters"] = f report = Environment().from_string(REPORT).render( report_date=datetime.date.today().strftime("%B %d, %Y"), specs=deps) print(report)
def main(): args = parse_args() logging.basicConfig(level=logging.DEBUG if args.verbose else logging.WARN) ctx = HostContext() broker = dr.Broker() broker[HostContext] = ctx out_path = args.output dr.load_components("insights.specs.default") dr.load_components("insights.specs.insights_archive") dr.load_components("insights.specs.sos_archive") dr.load_components("insights.parsers") dr.load_components("insights.combiners") for path in args.plugins: dr.load_components(path) graphs = dr.get_subgraphs(dr.COMPONENTS[dr.GROUPS.single]) worker_args = [(broker, g, out_path) for g in graphs] if args.parallel: run_parallel(worker_args) else: run_serial(worker_args)