def _get_local_osds(self): if not self.cli_cache['ceph_volume_lvm_list']: return s = FileSearcher() sd = SequenceSearchDef(start=SearchDef(r"^=+\s+osd\.(\d+)\s+=+.*"), body=SearchDef([ r"\s+osd\s+(fsid)\s+(\S+)\s*", r"\s+(devices)\s+([\S]+)\s*" ]), tag="ceph-lvm") s.add_search_term(sd, path=self.cli_cache['ceph_volume_lvm_list']) local_osds = [] for results in s.search().find_sequence_sections(sd).values(): id = None fsid = None dev = None for result in results: if result.tag == sd.start_tag: id = int(result.get(1)) elif result.tag == sd.body_tag: if result.get(1) == "fsid": fsid = result.get(2) elif result.get(1) == "devices": dev = result.get(2) local_osds.append(CephOSD(id, fsid, dev)) return local_osds
def udev_bcache_devs(self): """ If bcache devices exist fetch information and return as a list. """ if self._bcache_devs: return self._bcache_devs udevadm_info = self.cli.udevadm_info_exportdb() if not udevadm_info: return self._bcache_devs s = FileSearcher() sdef = SequenceSearchDef(start=SearchDef(r"^P: .+/(bcache\S+)"), body=SearchDef(r"^S: disk/by-uuid/(\S+)"), tag="bcacheinfo") s.add_search_term(sdef, utils.mktemp_dump('\n'.join(udevadm_info))) results = s.search() devs = [] for section in results.find_sequence_sections(sdef).values(): dev = {} for r in section: if r.tag == sdef.start_tag: dev["name"] = r.get(1) else: dev["by-uuid"] = r.get(1) devs.append(dev) self._bcache_devs = devs return self._bcache_devs
def test_filesearcher_network_info(self): filepath = os.path.join(HotSOSConfig.DATA_ROOT, 'sos_commands', 'networking', 'ip_-d_address') filepath2 = os.path.join(HotSOSConfig.DATA_ROOT, 'sos_commands', 'networking', 'ip_-s_-d_link') ip = "10.0.0.128" mac = "22:c2:7b:1c:12:1b" s = FileSearcher() sd = SearchDef(r".+({}).+".format(ip)) s.add_search_term(sd, filepath) sd = SearchDef(r"^\s+link/ether\s+({})\s+.+".format(mac)) s.add_search_term(sd, filepath2) results = s.search() self.assertEqual(set(results.files), set([filepath, filepath2])) self.assertEqual(len(results.find_by_path(filepath)), 1) self.assertEqual(len(results.find_by_path(filepath2)), 2) self.assertEqual(results.find_by_path(filepath)[0].linenumber, 38) for result in results.find_by_path(filepath): self.assertEqual(result.get(1), ip) expected = {52: mac, 141: mac} for result in results.find_by_path(filepath2): ln = result.linenumber self.assertEqual(result.tag, None) self.assertEqual(result.get(1), expected[ln])
def test_sequence_searcher_section_start_end_same(self): """ Test scenario: * multiple sections that end with start of the next * start def matches unique start * end def matches any start """ with tempfile.NamedTemporaryFile(mode='w', delete=False) as ftmp: ftmp.write(SEQ_TEST_7) ftmp.close() s = FileSearcher() sd = SequenceSearchDef(start=SearchDef(r"^section (2)"), body=SearchDef(r"\d_\d"), end=SearchDef(r"^section (\d+)"), tag="seq-search-test7") s.add_search_term(sd, path=ftmp.name) results = s.search() sections = results.find_sequence_sections(sd) self.assertEqual(len(sections), 1) for id in sections: for r in sections[id]: if r.tag == sd.start_tag: self.assertEqual(r.get(1), "2") elif r.tag == sd.body_tag: self.assertTrue(r.get(0) in ["2_1"]) os.remove(ftmp.name)
def memory_searchdef(self): start = SearchDef([r"^Status of node '([^']*)'$", r"^Status of node ([^']*) ...$"]) body = SearchDef(r"^\s+\[{total,([0-9]+)}.+") end = SearchDef(r"^$") return SequenceSearchDef(start=start, body=body, end=end, tag='memory')
def connections_searchdef(self): start = SearchDef([r"^Connections:$", r"^Listing connections ...$"]) # Again, the user and protocol columns are inverted # between 3.6.x and 3.8.x so we have to catch both and # decide. body = SearchDef(r"^<(rabbit[^>.]*)(?:[.][0-9]+)+>.+(?:[A-Z]+\s+{[\d,]+}\s+(\S+)|\d+\s+{[\d,]+}\s+\S+\s+(\S+)).+{\"connection_name\",\"([^:]+):\d+:.+$") # pylint: disable=C0301 # noqa end = SearchDef(r"^$") return SequenceSearchDef(start=start, body=body, end=end, tag='connections')
def queues_searchdef(self): start = SearchDef([r"^Queues on ([^:]+):", (r"^Listing queues for vhost ([^:]+) " r"...")]) # NOTE: we don't use a list for the body here because # we need to know which expression matched so that we # can know in which order to retrieve the columns since # their order is inverted between 3.6.x and 3.8.x body = SearchDef(r"^(?:<([^.\s]+)[.0-9]+>\s+(\S+)|" r"(\S+)\s+(?:\S+\s+){4}<([^.\s]+)[.0-9]" r"+>)\s+.+") end = SearchDef(r"^$") return SequenceSearchDef(start=start, body=body, end=end, tag='queues')
def stats(self): """ Get ip link info for the interface. """ counters = self.cache_load() if counters: return counters s = FileSearcher() seqdef = SequenceSearchDef( # match start of interface start=SearchDef(IP_IFACE_NAME_TEMPLATE.format(self.name)), # match body of interface body=SearchDef(r".+"), # match next interface or EOF end=SearchDef([IP_IFACE_NAME, IP_EOF]), tag="ifaces") f_ip_link_show = mktemp_dump(''.join(self.cli_helper.ip_link())) s.add_search_term(seqdef, path=f_ip_link_show) results = s.search() os.unlink(f_ip_link_show) stats_raw = [] for section in results.find_sequence_sections(seqdef).values(): for result in section: if result.tag == seqdef.body_tag: stats_raw.append(result.get(0)) if not stats_raw: return {} # NOTE: we only expect one match counters = {} for i, line in enumerate(stats_raw): ret = re.compile(r"\s+([RT]X):\s+.+").findall(line) if ret: rxtx = ret[0].lower() ret = re.compile(r"\s*([a-z]+)\s*").findall(line) if ret: for j, column in enumerate(ret): value = int(stats_raw[i + 1].split()[j]) if column in ['packets', 'dropped', 'errors', 'overrun']: if rxtx not in counters: counters[rxtx] = {} counters[rxtx][column] = value if counters: self.cache_save(counters) return counters return {}
def _get_version_info(self, daemon_type=None): """ Returns a dict of ceph versions info for the provided daemon type. If no daemon type provided, version info is collected for all types and the resulting dict is keyed by daemon type otherwise it is keyed by version (and only versions for that daemon type.) """ out = self.cli_cache['ceph_versions'] if not out: return versions = {} s = FileSearcher() body = SearchDef(r"\s+\"ceph version (\S+) .+ (\S+) " r"\(\S+\)\":\s+(\d)+,?$") if daemon_type is None: # all/any - start matches any so no seq ending needed sd = SequenceSearchDef(start=SearchDef(r"^\s+\"(\S+)\":\s+{"), body=body, tag='versions') else: start = SearchDef(r"^\s+\"({})\":\s+{{".format(daemon_type)) sd = SequenceSearchDef(start=start, body=body, end=SearchDef(r"^\s+\"\S+\":\s+{"), tag='versions') s.add_search_term(sd, path=self.cli_cache['ceph_versions']) for section in s.search().find_sequence_sections(sd).values(): _versions = {} for result in section: if result.tag == sd.start_tag: _daemon_type = result.get(1) versions[_daemon_type] = _versions elif result.tag == sd.body_tag: version = result.get(1) rname = result.get(2) amount = result.get(3) _versions[version] = { 'release_name': rname, 'count': int(amount) } # If specific daemon_type provided only return version for that type # otherwise all. if daemon_type is not None: versions = versions.get(daemon_type) return versions
def _create_search_results(self, path, contents): with open(path, 'w') as fd: for line in contents: fd.write(line) s = FileSearcher() s.add_search_term(SearchDef(r'^(\S+) (\S+) .+', tag='all'), path) return s.search().find_by_tag('all')
def test_ordered_multiple(self): start0 = '2021-07-19 09:08:58.498000' end0 = '2021-07-19 09:09:58.498000' start1 = '2021-07-19 09:03:58.498000' end1 = '2021-07-19 09:04:58.498000' expected = { '0': { 'duration': 60.0, 'start': start0, 'end': end0 }, '1': { 'duration': 60.0, 'start': start1, 'end': end1 } } with tempfile.NamedTemporaryFile(mode='w', delete=False) as ftmp: ftmp.write(SEQ_TEST_6) ftmp.close() s = FileSearcher() expr = r'^([0-9\-]+) (\S+) iteration:([0-9]+) start' s.add_search_term(SearchDef(expr, tag="eventX-start"), path=ftmp.name) expr = r'^([0-9\-]+) (\S+) iteration:([0-9]+) end' s.add_search_term(SearchDef(expr, tag="eventX-end"), path=ftmp.name) events = analytics.LogEventStats(s.search(), "eventX") os.remove(ftmp.name) events.run() top5 = events.get_top_n_events_sorted(5) self.assertEqual(top5, expected) stats = events.get_event_stats() expected = { 'avg': 60.0, 'incomplete': 2, 'max': 60.0, 'min': 60.0, 'samples': 2, 'stdev': 0.0 } self.assertEqual(stats, expected)
def test_filesearcher_error(self): s = FileSearcher() with mock.patch.object(SearchResult, '__init__') as mock_init: def fake_init(*args, **kwargs): raise EOFError("some error") mock_init.side_effect = fake_init path = os.path.join(HotSOSConfig.DATA_ROOT) s.add_search_term(SearchDef("."), path) s.search()
def _add_agent_searches(self, project, agent, data_source, expr_template): if project.exceptions: values = "(?:{})".format('|'.join(project.exceptions)) expr = expr_template.format(values) hint = '( ERROR | Traceback)' tag = "{}.{}".format(project.name, agent) self.searchobj.add_search_term(SearchDef(expr, tag=tag, hint=hint), data_source) warn_exprs = self._agent_warnings.get(project.name, []) if warn_exprs: values = "(?:{})".format('|'.join(warn_exprs)) expr = expr_template.format(values) self.searchobj.add_search_term( SearchDef(expr, tag=tag, hint='WARNING'), data_source) err_exprs = self._agent_errors.get(project.name, []) if err_exprs: expr = expr_template.format("(?:{})".format('|'.join(err_exprs))) sd = SearchDef(expr, tag=tag, hint='ERROR') self.searchobj.add_search_term(sd, data_source)
def test_sequence_searcher_multi_sequence(self): """ Test scenario: * search containing multiple seqeunce definitions * data containing 2 results of each where one is incomplete * test that single incomplete result gets removed """ with tempfile.NamedTemporaryFile(mode='w', delete=False) as ftmp: ftmp.write(MULTI_SEQ_TEST) ftmp.close() s = FileSearcher() sdA = SequenceSearchDef(start=SearchDef(r"^sectionA (\d+)"), body=SearchDef(r"\d_\d"), end=SearchDef(r"^section\S+ (\d+)"), tag="seqA-search-test") sdB = SequenceSearchDef(start=SearchDef(r"^sectionB (\d+)"), body=SearchDef(r"\d_\d"), end=SearchDef(r"^section\S+ (\d+)"), tag="seqB-search-test") s.add_search_term(sdA, path=ftmp.name) s.add_search_term(sdB, path=ftmp.name) results = s.search() sections = results.find_sequence_sections(sdA) self.assertEqual(len(sections), 1) sections = results.find_sequence_sections(sdB) self.assertEqual(len(sections), 2) os.remove(ftmp.name)
def _get_vcpu_info(self): nova_config = OpenstackConfig( os.path.join(HotSOSConfig.DATA_ROOT, "etc/nova/nova.conf")) vcpu_info = {} guests = [] s = FileSearcher() if self.nova.instances: for i in self.nova.instances.values(): guests.append(i.name) path = os.path.join(HotSOSConfig.DATA_ROOT, 'etc/libvirt/qemu', "{}.xml".format(i.name)) s.add_search_term(SearchDef(".+vcpus>([0-9]+)<.+", tag=i.name), path) total_vcpus = 0 results = s.search() for guest in guests: for r in results.find_by_tag(guest): vcpus = r.get(1) total_vcpus += int(vcpus) vcpu_info["used"] = total_vcpus sysinfo = SystemBase() if sysinfo.num_cpus is not None: total_cores = sysinfo.num_cpus vcpu_info["system-cores"] = total_cores pinset = nova_config.get("vcpu_pin_set", expand_to_list=True) or [] pinset += nova_config.get("cpu_dedicated_set", expand_to_list=True) or [] pinset += nova_config.get("cpu_shared_set", expand_to_list=True) or [] if pinset: # if pinning is used, reduce total num of cores available # to those included in nova cpu sets. available_cores = len(set(pinset)) else: available_cores = total_cores vcpu_info["available-cores"] = available_cores cpu = CPU() # put this here so that available cores value has # context if cpu.smt is not None: vcpu_info["smt"] = cpu.smt factor = float(total_vcpus) / available_cores vcpu_info["overcommit-factor"] = round(factor, 2) return vcpu_info
def test_sequence_searcher_overlapping_incomplete(self): with tempfile.NamedTemporaryFile(mode='w', delete=False) as ftmp: ftmp.write(SEQ_TEST_3) ftmp.close() s = FileSearcher() sd = SequenceSearchDef( start=SearchDef(r"^(a\S*) (start\S*) point\S*"), body=SearchDef(r"leads to"), end=SearchDef(r"^an (ending)$"), tag="seq-search-test3") s.add_search_term(sd, path=ftmp.name) results = s.search() sections = results.find_sequence_sections(sd) self.assertEqual(len(sections), 1) for id in sections: for r in sections[id]: if r.tag == sd.start_tag: self.assertEqual(r.get(1), "another") elif r.tag == sd.end_tag: self.assertEqual(r.get(1), "ending") os.remove(ftmp.name)
def test_sequence_searcher_multiple_sections(self): with tempfile.NamedTemporaryFile(mode='w', delete=False) as ftmp: ftmp.write(SEQ_TEST_5) ftmp.close() s = FileSearcher() sd = SequenceSearchDef( start=SearchDef(r"^(a\S*) (start\S*) point\S*"), body=SearchDef(r"value is (\S+)"), end=SearchDef(r"^$"), tag="seq-search-test5") s.add_search_term(sd, path=ftmp.name) results = s.search() sections = results.find_sequence_sections(sd) self.assertEqual(len(sections), 2) for id in sections: for r in sections[id]: if r.tag == sd.start_tag: self.assertEqual(r.get(1), "another") elif r.tag == sd.body_tag: self.assertTrue(r.get(1) in ["3", "4"]) elif r.tag == sd.end_tag: self.assertEqual(r.get(0), "") os.remove(ftmp.name)
def test_search_filter_invert_match(self): with tempfile.NamedTemporaryFile(mode='w', delete=False) as ftmp: ftmp.write(FILTER_TEST_1) ftmp.close() s = FileSearcher() fd = FilterDef(r" (ERROR)", invert_match=True) s.add_filter_term(fd, path=ftmp.name) sd = SearchDef(r".+ INFO (.+)") s.add_search_term(sd, path=ftmp.name) results = s.search().find_by_path(ftmp.name) self.assertEqual(len(results), 1) for r in results: self.assertEqual(r.get(1), "blah") os.remove(ftmp.name)
def process_events(self, event): ext_output = {} events = {} events_found = {} s = FileSearcher() for result in event.results: instance_id = result.get(1) event_id = result.get(3) events[event_id] = { 'instance_id': instance_id, 'data_source': result.source } for stage in EXT_EVENT_META[event.name]['stages_keys']: expr = (r".+\[instance: {}\]\s+{}\s.*\s?event\s+{}-{}.? " ".+".format(instance_id, stage, event.name, event_id)) tag = "{}_{}_{}".format(instance_id, event_id, stage) sd = SearchDef(expr, tag, hint=event.name) s.add_search_term(sd, result.source) results = s.search() for event_id in events: instance_id = events[event_id]['instance_id'] data_source = events[event_id]['data_source'] stages = self.get_state_dict(event.name) for stage in stages: tag = "{}_{}_{}".format(instance_id, event_id, stage) r = results.find_by_tag(tag, path=data_source) if r: stages[stage] = True if all([stages[stage] for stage in stages]): result = 'succeeded' else: result = 'failed' if result not in ext_output: ext_output[result] = [] info = {'port': event_id, 'instance': instance_id} ext_output[result].append(info) if ext_output: for result in ext_output: events_found[result] = list(ext_output[result]) return events_found
def timed_out_plugins(self): timeouts = [] if not os.path.exists(os.path.join(HotSOSConfig.DATA_ROOT, 'sos_logs')): return timeouts searcher = FileSearcher() path = os.path.join(HotSOSConfig.DATA_ROOT, 'sos_logs/ui.log') searcher.add_search_term(SearchDef(r".* Plugin (\S+) timed out.*", tag="timeouts"), path=path) results = searcher.search() for r in results.find_by_tag("timeouts"): plugin = r.get(1) timeouts.append(plugin) return timeouts
def rss(self): """Return memory RSS for a given daemon. NOTE: this assumes we have ps auxwwwm format. """ if self._rss: return self._rss s = FileSearcher() # columns: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND sd = SearchDef(r"\S+\s+\d+\s+\S+\s+\S+\s+\d+\s+(\d+)\s+.+/ceph-{}\s+" r".+--id\s+{}\s+.+".format(self.daemon_type, self.id)) s.add_search_term(sd, path=self.cli_cache['ps']) rss = 0 # we only expect one result for result in s.search().find_by_path(self.cli_cache['ps']): rss = int(int(result.get(1)) / 1024) break self._rss = "{}M".format(rss) return self._rss
def cluster_partition_handling_searchdef(self): return SearchDef(r"^\s*{cluster_partition_handling,([^}]*)}", tag='cluster_partition_handling')
def test_filesearcher_logs(self): expected = { 9891: '2022-02-09 22:50:18.131', 9892: '2022-02-09 22:50:19.703' } logs_root = "var/log/neutron/" filepath = os.path.join(HotSOSConfig.DATA_ROOT, logs_root, 'neutron-openvswitch-agent.log.2.gz') globpath = os.path.join(HotSOSConfig.DATA_ROOT, logs_root, 'neutron-l3-agent.log') globpath_file1 = os.path.join(HotSOSConfig.DATA_ROOT, logs_root, 'neutron-l3-agent.log') globpath_file2 = os.path.join(HotSOSConfig.DATA_ROOT, logs_root, 'neutron-l3-agent.log.1.gz') s = FileSearcher() sd = SearchDef(r'^(\S+\s+[0-9:\.]+)\s+.+full sync.+', tag="T1") s.add_search_term(sd, filepath) sd = SearchDef(r'^(\S+\s+[0-9:\.]+)\s+.+ERROR.+', tag="T2") s.add_search_term(sd, filepath) sd = SearchDef((r'^(\S+\s+[0-9:\.]+)\s+.+ INFO .+ Router [0-9a-f\-]+' '.+'), tag="T3") s.add_search_term(sd, globpath) sd = SearchDef(r'non-existant-pattern', tag="T4") # search for something that doesn't exist to test that code path s.add_search_term(sd, globpath) results = s.search() self.assertEqual(set(results.files), set([filepath, globpath])) self.assertEqual(len(results.find_by_path(filepath)), 1220) tag_results = results.find_by_tag("T1", path=filepath) self.assertEqual(len(tag_results), 2) for result in tag_results: ln = result.linenumber self.assertEqual(result.tag, "T1") self.assertEqual(result.get(1), expected[ln]) tag_results = results.find_by_tag("T1") self.assertEqual(len(tag_results), 2) for result in tag_results: ln = result.linenumber self.assertEqual(result.tag, "T1") self.assertEqual(result.get(1), expected[ln]) self.assertEqual(len(results.find_by_path(globpath_file1)), 1) self.assertEqual(len(results.find_by_path(globpath_file2)), 0) # these files have the same content so expect same result from both expected = {5380: '2022-02-10 16:09:22.641'} path_results = results.find_by_path(globpath_file1) for result in path_results: ln = result.linenumber self.assertEqual(result.tag, "T3") self.assertEqual(result.get(1), expected[ln]) path_results = results.find_by_path(globpath_file2) for result in path_results: ln = result.linenumber self.assertEqual(result.tag, "T3") self.assertEqual(result.get(1), expected[ln])
def _get_interfaces(self, namespaces=False): """ Get all interfaces in ip address show. @param namespaces: if set to True will get interfaces from all namespaces on the host. @return: list of NetworkPort objects for each interface found. """ interfaces = [] interfaces_raw = self.cache_load(namespaces=namespaces) if interfaces_raw: for iface in interfaces_raw: interfaces.append(NetworkPort(**iface)) return interfaces interfaces_raw = [] seq = SequenceSearchDef(start=SearchDef(IP_IFACE_NAME), body=SearchDef([IP_IFACE_V4_ADDR, IP_IFACE_V6_ADDR, IP_IFACE_HW_ADDR, IP_IFACE_VXLAN_INFO]), tag='ip_addr_show') search_obj = FileSearcher() if namespaces: for ns in self.cli.ip_netns(): ns_name = ns.partition(" ")[0] ip_addr = self.cli.ns_ip_addr(namespace=ns_name) path = mktemp_dump('\n'.join(ip_addr)) search_obj.add_search_term(seq, path) else: path = mktemp_dump('\n'.join(self.cli.ip_addr())) search_obj.add_search_term(seq, path) if not search_obj.paths: log.debug("no network info found (namespaces=%s)", namespaces) return [] r = search_obj.search() for path in search_obj.paths: # we no longer need this file so can delete it os.unlink(path) sections = r.find_sequence_sections(seq, path).values() for section in sections: addrs = [] encap_info = None hwaddr = None name = None state = None for result in section: if result.tag == seq.start_tag: name = result.get(1) state = result.get(2) elif result.tag == seq.body_tag: if result.get(1) in ['inet', 'inet6']: addrs.append(result.get(2)) elif result.get(1) in ['vxlan']: encap_info = {result.get(1): { 'id': result.get(2), 'local_ip': result.get(3), 'dev': result.get(4)}} else: hwaddr = result.get(2) interfaces_raw.append({'name': name, 'addresses': addrs, 'hwaddr': hwaddr, 'state': state, 'encap_info': encap_info}) self.cache_save(interfaces_raw, namespaces=namespaces) for iface in interfaces_raw: interfaces.append(NetworkPort(**iface)) return interfaces
def _load_event_definitions(self): """ Load event search definitions from yaml. An event is identified using between one and two expressions. If it requires a start and end to be considered complete then these can be specified for match otherwise we can match on a single line. Note that multi-line events can be overlapping hence why we don't use a SequenceSearchDef (we use core.analytics.LogEventStats). """ plugin = YDefsLoader('events').load_plugin_defs() if not plugin: return group_name = self._yaml_defs_group log.debug("loading defs for subgroup=%s", group_name) group_defs = plugin.get(group_name) group = YDefsSection(group_name, group_defs) log.debug("sections=%s, events=%s", len(group.branch_sections), len(group.leaf_sections)) for event in group.leaf_sections: results_passthrough = bool(event.passthrough_results) log.debug("event: %s", event.name) log.debug("input: %s (command=%s)", event.input.path, event.input.command is not None) log.debug("passthrough: %s", results_passthrough) section_name = event.parent.name # this is hopefully unique enough to allow two events from # different sections to have the same name and not clobber each # others results. search_tag = "{}.{}".format(section_name, event.name) # if this is a multiline event (has a start and end), append # this to the tag so that it can be used with # core.analytics.LogEventStats. search_meta = {'searchdefs': [], 'datasource': None, 'passthrough_results': results_passthrough} if event.expr: hint = None if event.hint: hint = event.hint.value search_meta['searchdefs'].append( SearchDef(event.expr.value, tag=search_tag, hint=hint)) elif event.start: if (event.body or (event.end and not results_passthrough)): log.debug("event '%s' search is a sequence", event.name) sd_start = SearchDef(event.start.expr) sd_end = None # explicit end is optional for sequence definition if event.end: sd_end = SearchDef(event.end.expr) sd_body = None if event.body: sd_body = SearchDef(event.body.expr) # NOTE: we don't use hints here sequence_def = SequenceSearchDef(start=sd_start, body=sd_body, end=sd_end, tag=search_tag) search_meta['searchdefs'].append(sequence_def) search_meta['is_sequence'] = True elif (results_passthrough and (event.start and event.end)): # start and end required for core.analytics.LogEventStats search_meta['searchdefs'].append( SearchDef(event.start.expr, tag="{}-start".format(search_tag), hint=event.start.hint)) search_meta['searchdefs'].append( SearchDef(event.end.expr, tag="{}-end".format(search_tag), hint=event.end.hint)) else: log.debug("unexpected search definition passthrough=%s " "body provided=%s, end provided=%s", results_passthrough, event.body is not None, event.end is not None) else: log.debug("invalid search definition for event '%s' in " "section '%s'", event, event.parent.name) continue datasource = event.input.path if section_name not in self.__event_defs: self.__event_defs[section_name] = {} search_meta['datasource'] = datasource self.__event_defs[section_name][event.name] = search_meta
def _result(self): if self.expr: if self.cache.expr: results = self.cache.expr.results log.debug("check %s - using cached result=%s", self.name, results) else: s = FileSearcher() s.add_search_term(SearchDef(self.expr.value, tag=self.name), self.input.path) results = s.search() self.expr.cache.set('results', results) # The following aggregates results by group/index and stores in # the property cache to make them accessible via # PropertyCacheRefResolver. results_by_idx = {} for item in results: for _result in item[1]: for idx, value in enumerate(_result): if idx not in results_by_idx: results_by_idx[idx] = set() results_by_idx[idx].add(value) for idx in results_by_idx: self.expr.cache.set('results_group_{}'.format(idx), list(results_by_idx[idx])) self.cache.set('expr', self.expr.cache) if not results: log.debug("check %s search has no matches so result=False", self.name) return False results = results.find_by_tag(self.name) parameters = self.check_paramaters if parameters: result_age_hours = parameters.search_result_age_hours results = self.filter_by_age(results, result_age_hours) if results: period_hours = parameters.search_period_hours results = self.filter_by_period(results, period_hours, parameters.min_results) count = len(results) if count >= parameters.min_results: return True else: log.debug("check %s does not have enough matches (%s) to " "satisfy min of %s", self.name, count, parameters.min_results) return False else: log.debug("no check paramaters provided") return len(results) > 0 elif self.requires: if self.cache.requires: result = self.cache.requires.passes log.debug("check %s - using cached result=%s", self.name, result) else: result = self.requires.passes self.cache.set('requires', self.requires.cache) return result else: raise Exception("no supported properties found in check {}".format( self.name))