def add_filter(component, patterns): """ Add a filter or list of filters to a component. When the component is a datasource, the filter will be directly added to that datasouce. In cases when the component is a parser or combiner, the filter will be added to underlying filterable datasources by traversing dependency graph. A filter is a simple string, and it matches if it is contained anywhere within a line. Args: component (component): The component to filter, can be datasource, parser or combiner. patterns (str, [str]): A string, list of strings, or set of strings to add to the datasource's filters. """ def inner(component, patterns): if component in _CACHE: del _CACHE[component] types = six.string_types + (list, set) if not isinstance(patterns, types): raise TypeError( "Filter patterns must be of type string, list, or set.") if isinstance(patterns, six.string_types): patterns = set([patterns]) elif isinstance(patterns, list): patterns = set(patterns) for pat in patterns: if not pat: raise Exception("Filter patterns must not be empy.") FILTERS[component] |= patterns if not plugins.is_datasource(component): for dep in dr.run_order(dr.get_dependency_graph(component)): if plugins.is_datasource(dep): d = dr.get_delegate(dep) if d.filterable: inner(dep, patterns) else: delegate = dr.get_delegate(component) if delegate.raw: raise Exception("Filters aren't applicable to raw datasources.") if not delegate.filterable: raise Exception("Filters aren't applicable to %s." % dr.get_name(component)) inner(component, patterns)
def _resolve_registry_points(cls, base, dct): module = cls.__module__ parents = [x for x in cls.__mro__ if x not in (cls, SpecSet, object)] for k, v in dct.items(): if isinstance(v, RegistryPoint): # add v under its name to this class's registry. v.__name__ = k cls.registry[k] = v if is_datasource(v): v.__qualname__ = ".".join([cls.__name__, k]) v.__name__ = k v.__module__ = module setattr(cls, k, SpecDescriptor(v)) if k in base.registry: # if the datasource has the same name as a RegistryPoint in the # base class, the datasource to the RegistryPoint. point = base.registry[k] # the RegistryPoint gets the implementation datasource as a # dependency dr.add_dependency(point, v) dr.mark_hidden(v) # Datasources override previously defined datasources of the # same name for contexts they all depend on. Here we tell # datasources of the same name not to execute under contexts # the new datasource will handle. _register_context_handler(parents, v)
def _get_available_models(broker, group=dr.GROUPS.single): """ Given a broker populated with datasources, return everything that could run based on them. """ state = set(broker.instances.keys()) models = {} for comp in dr.run_order(dr.COMPONENTS[group]): if comp in dr.DELEGATES and not plugins.is_datasource(comp): if dr.DELEGATES[comp].get_missing_dependencies(state): continue if plugins.is_type( comp, (plugins.rule, plugins.condition, plugins.incident)): name = "_".join( [dr.get_base_module_name(comp), dr.get_simple_name(comp)]) else: name = dr.get_simple_name(comp) if name in models: prev = models[name] models[dr.get_name(prev).replace(".", "_")] = prev del models[name] name = dr.get_name(comp).replace(".", "_") models[name] = comp state.add(comp) return models
def get_datasources(broker): all_datasources = set() for n in dir(Specs): a = getattr(Specs, n) if is_datasource(a): all_datasources.add(a) return all_datasources & set(broker.instances)
def add_filter(ds, patterns): """ Add a filter or list of filters to a datasource. A filter is a simple string, and it matches if it is contained anywhere within a line. Args: ds (@datasource component): The datasource to filter patterns (str, [str]): A string, list of strings, or set of strings to add to the datasource's filters. """ if not plugins.is_datasource(ds): raise Exception("Filters are applicable only to datasources.") delegate = dr.get_delegate(ds) if delegate.raw: raise Exception("Filters aren't applicable to raw datasources.") if not delegate.filterable: raise Exception("Filters aren't applicable to %s." % dr.get_name(ds)) if ds in _CACHE: del _CACHE[ds] if isinstance(patterns, six.string_types): FILTERS[ds].add(patterns) elif isinstance(patterns, list): FILTERS[ds] |= set(patterns) elif isinstance(patterns, set): FILTERS[ds] |= patterns else: raise TypeError("patterns must be string, list, or set.")
def get_filters(component, filters=None): filters = filters or set() if not plugins.is_datasource(component): return filters if component in FILTERS: filters |= FILTERS[component] for d in dr.get_dependents(component): filters |= get_filters(d, filters) return filters
def inner(c, filters=None): filters = filters or set() if not ENABLED: return filters if not plugins.is_datasource(c): return filters if c in FILTERS: filters |= FILTERS[c] for d in dr.get_dependents(c): filters |= inner(d, filters) return filters
def main(filename): dr.load_components("insights.specs.default") dr.load_components("insights.specs.insights_archive") dr.load_components("insights.specs.sos_archive") dr.load_components("insights.specs.jdr_archive") dr.load_components("insights.parsers") dr.load_components("insights.combiners") parsers = sorted([c for c in dr.DELEGATES if is_parser(c)], key=dr.get_name) combiners = sorted([c for c in dr.DELEGATES if is_combiner(c)], key=dr.get_name) specs = sorted([ c for c in dr.DELEGATES if is_datasource(c) and dr.get_module_name(c) == 'insights.specs' ], key=dr.get_name) with open(filename, "w") as fh: fh.write("Components Cross-Reference\n") fh.write("==========================\n") fh.write("Specs Dependents\n") fh.write("----------------\n") for spec in specs: info = dict(name=dr.get_name(spec)) info['dependents'] = [] for d in dr.get_dependents(spec): info['dependents'].append({ 'name': dr.get_name(d), 'dependents': [dr.get_name(sd) for sd in dr.get_dependents(d)] }) print_spec_info(info, fh) blank_line(fh) fh.write("Parser Dependents/Dependencies\n") fh.write("------------------------------\n") for pars in parsers: print_info(component_info(pars), fh) blank_line(fh) fh.write("Combiner Dependents/Dependencies\n") fh.write("--------------------------------\n") for comb in combiners: print_info(component_info(comb), fh)
def get_datasources(self, comp, broker): """ Get the most relevant activated datasources for each rule. """ graph = dr.get_dependency_graph(comp) ds = [] for cand in graph: if cand in broker and is_datasource(cand): val = broker[cand] if not isinstance(val, list): val = [val] results = [] for v in val: if isinstance(v, ContentProvider): results.append(v.cmd or v.path or "python implementation") ds.extend(results) return ds
def _resolve_registry_points(cls, base, dct): module = cls.__module__ parents = [x for x in cls.__mro__ if x not in (cls, SpecSet, object)] for k, v in dct.items(): if isinstance(v, RegistryPoint): v.__name__ = k cls.registry[k] = v if is_datasource(v): v.__qualname__ = ".".join([cls.__name__, k]) v.__name__ = k v.__module__ = module setattr(cls, k, SpecDescriptor(v)) if k in base.registry: point = base.registry[k] dr.add_dependency(point, v) dr.mark_hidden(v) _register_context_handler(parents, v)
def _show_tree(self, node, indent="", depth=None, dep_getter=dr.get_dependencies): if depth is not None and depth == 0: return [] results = [] color = self._get_color(node) if plugins.is_datasource(node): results.extend( self._show_datasource(node, self._broker.get(node), indent=indent)) else: _type = self._get_type_name(node) name = dr.get_name(node) suffix = self._get_rule_value(node) desc = ansiformat(color, "{n} ({t}".format(n=name, t=_type)) results.append(indent + desc + suffix + ansiformat(color, ")")) dashes = "\u250A\u254C\u254C\u254C\u254C\u254C" if node in self._broker.exceptions: for ex in self._broker.exceptions[node]: results.append(indent + dashes + ansiformat(color, str(ex))) deps = dep_getter(node) next_indent = indent + "\u250A " for d in deps: results.extend( self._show_tree(d, next_indent, depth=depth if depth is None else depth - 1, dep_getter=dep_getter)) return results
def main(): # config = get_config() dr.load_components("insights.specs.default") dr.load_components("insights.parsers") dr.load_components("insights.combiners") dr.load_components("telemetry.rules.plugins") dr.load_components("prodsec") ds = dr.COMPONENTS_BY_TYPE[datasource] specs = [] for c in ds: if not is_datasource(c): continue if not any(is_datasource(d) for d in dr.get_dependents(c)): specs.append(c) deps = defaultdict(dict) pspec = '' for spec in sorted(specs, key=dr.get_name): info = dict(name=dr.get_simple_name(spec)) f = filters.get_filters(spec) info['dependents'] = [] spds = None d = [d for d in dr.get_dependencies(spec) if is_datasource(d)] for dp in d: c = dr.get_dependencies(dp) for cdeps in c: if is_datasource(cdeps) and '__qualname__' in cdeps.func_dict and 'DefaultSpecs' in cdeps.func_dict['__qualname__']: spds = cdeps for d in dr.get_dependencies(spec): cp = '' lines = [] if d.__doc__ and "Returns the first" in d.__doc__: lines = d.__doc__.replace(',', '\n') lines = lines.splitlines() head = [lines[0]] top = ["<ul>"] bottom = ["</ul>"] if spds: lines = [l.replace('Command:', '') for l in lines] lines = [l.replace('Path:', '') for l in lines] lines = ["<li>" + l + "</li>" for l in lines[1:]] # lines = ["<li>" + spds.func_doc + ',' + l + "</li>" for l in lines[1:]] else: lines = ["<li>" + l + "</li>" for l in lines[1:]] cp = "\n".join(head + top + lines + bottom) else: if spds: d.__doc__ = d.__doc__.replace('Command:', '') d.__doc__ = d.__doc__.replace('Path:', '') d.__doc__ = spds.func_doc + ', ' + d.__doc__ cp = d.__doc__ for d in dr.get_dependents(spec): if dr.get_simple_name(pspec) == dr.get_simple_name(d): continue pspec = d p = [dr.get_name(sd) for sd in dr.get_dependents(d)] rules = sorted([x.rsplit('.', 2)[1] for x in p]) deps[info['name']][info['name'] + "_spec-def"] = cp deps[info['name']][info['name'] + "_rules"] = ", ".join(rules) deps[info['name']][info['name'] + "_filters"] = f report = Environment().from_string(REPORT).render( report_date=datetime.date.today().strftime("%B %d, %Y"), specs=deps) print(report)