def _get_analyzers(self, reqstatus): # Do not take the control menu into account. # The expected behavior is yet to be determined. results = env.dataprovider.query(["max(heartbeat.create_time)", "heartbeat.analyzer(-1).analyzerid/group_by"]) if not results: return c = Criterion() for create_time, analyzerid in results: c |= Criterion("heartbeat.create_time", "==", create_time) & Criterion("heartbeat.analyzer(-1).analyzerid", "==", analyzerid) for heartbeat in env.dataprovider.get(c): heartbeat = heartbeat["heartbeat"] status, status_text = utils.get_analyzer_status_from_latest_heartbeat( heartbeat, self._heartbeat_error_margin ) if reqstatus and status not in reqstatus: continue delta = heartbeat.get("create_time") - utils.timeutil.now() analyzerid = heartbeat["analyzer(-1).analyzerid"] heartbeat_listing = url_for("HeartbeatDataSearch.forensic", criteria=Criterion("heartbeat.analyzer(-1).analyzerid", "==", analyzerid), _default=None) alert_listing = url_for("AlertDataSearch.forensic", criteria=Criterion("alert.analyzer.analyzerid", "==", analyzerid), _default=None) heartbeat_analyze = url_for(".analyze", analyzerid=analyzerid) analyzer = heartbeat["analyzer(-1)"] node_name = analyzer["node.name"] or _("Node name n/a") osversion = analyzer["osversion"] or _("OS version n/a") ostype = analyzer["ostype"] or _("OS type n/a") yield { "id": analyzerid, "label": "%s - %s %s" % (node_name, ostype, osversion), "location": analyzer["node.location"] or _("Node location n/a"), "node": node_name, "name": analyzer["name"], "model": analyzer["model"], "class": analyzer["class"], "version": analyzer["version"], "latest_heartbeat": localization.format_timedelta(delta, add_direction=True), "status": status, "status_text": status_text, "links": [ resource.HTMLNode("a", _("Alert listing"), href=alert_listing), resource.HTMLNode("a", _("Heartbeat listing"), href=heartbeat_listing), resource.HTMLNode("a", _("Heartbeat analysis"), href=heartbeat_analyze) ] }
def _get_analyzers(self): criteria = None if "filter_path" in self.parameters: criteria = "%s == '%s'" % (self.parameters["filter_path"], utils.escape_criteria(self.parameters["filter_value"])) for (analyzerid,) in env.idmef_db.getValues(["heartbeat.analyzer(-1).analyzerid/group_by"], criteria): analyzer, heartbeat = env.idmef_db.getAnalyzer(analyzerid) status, status_text = utils.get_analyzer_status_from_latest_heartbeat( heartbeat, self._heartbeat_error_margin ) if self.parameters["status"] and status not in self.parameters["status"]: continue delta = float(heartbeat.get("create_time")) - time.time() parameters = {"heartbeat.analyzer(-1).analyzerid": analyzerid} heartbeat_listing = utils.create_link(view.getViewPath("HeartbeatListing"), parameters) parameters = {"analyzer_object_0": "alert.analyzer.analyzerid", "analyzer_operator_0": "=", "analyzer_value_0": analyzerid} alert_listing = utils.create_link(view.getViewPath("AlertListing"), parameters) parameters = {"analyzerid": analyzerid} heartbeat_analyze = utils.create_link(self.view_path + "/HeartbeatAnalyze", parameters) node_name = analyzer["node.name"] or _("Node name n/a") osversion = analyzer["osversion"] or _("OS version n/a") ostype = analyzer["ostype"] or _("OS type n/a") yield {"id": analyzerid, "label": "%s - %s %s" % (node_name, ostype, osversion), "location": analyzer["node.location"] or _("Node location n/a"), "node": node_name, "name": analyzer["name"], "model": analyzer["model"], "class": analyzer["class"], "version": analyzer["version"], "latest_heartbeat": localization.format_timedelta(delta, add_direction=True), "status": status, "status_text": status_text, "links": [ {"text": _("Alert listing"), "link": alert_listing}, {"text": _("Heartbeat listing"), "link": heartbeat_listing}, {"text": _("Heartbeat analysis"), "link": heartbeat_analyze, "class": "widget-link", "title": _("Heartbeat analysis")}, ]}
def ajax_groupby(self): limit = int(env.request.parameters["limit"]) page = int(env.request.parameters.get("page", 1)) search = self._prepare(page, limit) step = search.get_step() results = search.get_result() resrows = [] # We need to reorder results according to what is expected permutation = [ search.get_index(f) for f in ["_aggregation"] + search.groupby ] for i, result in enumerate(results): values = [result[permutation[idx]] for idx in range(len(result))] cells = {} for idx, group in enumerate(search.groupby): label = values[idx + 1] if isinstance(label, datetime.datetime): label = label.strftime(step.unit_format) elif isinstance(label, datetime.timedelta): label = format_timedelta(label) link = search.get_groupby_link([group], [values[idx + 1]], step, cview='.forensic') cells[group] = resource.HTMLNode("a", label, href=link) link = search.get_groupby_link(search.groupby, values[1:], step, cview='.forensic') cells["_aggregation"] = resource.HTMLNode("a", values[0], href=link) resrows.append({"id": text_type(i), "cell": cells}) total = (page if len(resrows) < limit else page + 1) * limit return utils.viewhelpers.GridAjaxResponse( resrows, total).add_html_content(mainmenu.HTMLMainMenu(update=True))
def ajax_listing(self): now = utils.timeutil.utcnow() sort_index = env.request.parameters.get("sort_index", "name") sort_order = env.request.parameters.get("sort_order", "asc") sort_func = { "name": lambda x: _(crontab.format(x.ext_type, x.name)).lower(), "user": lambda x: text_type(x.user) if x.user else _("SYSTEM"), "last": lambda x: x.base, "next": lambda x: x.next_schedule - now if x.enabled else datetime.timedelta.max, } sort_key = sort_func.get(sort_index, sort_func["name"]) rows = [] for i in sorted(crontab.list(), key=sort_key, reverse=(sort_order == "desc")): if not i.enabled: next = _("Disabled") else: next = i.next_schedule - now if next.total_seconds() < 0: next = _("Pending") else: next = localization.format_timedelta(next, granularity="minute") if i.runcnt > 0: last = localization.format_timedelta(i.base - now, add_direction=True) else: last = _("n/a") if i.error: last = resource.HTMLNode("a", _("Error"), _class="cronjob-error") rows.append({ "id": i.id, "name": resource.HTMLNode("a", _(crontab.format(i.ext_type, i.name)), href=url_for(".edit", id=i.id)), "schedule": crontab.format_schedule(i.schedule), "user": text_type(i.user) if i.user else _("SYSTEM"), "last": last, "next": next, "error": i.error }) return GridAjaxResponse(rows, len(rows))
def analyze(self, analyzerid): analyzer, heartbeat = self._get_analyzer(analyzerid) delta = heartbeat["create_time"] - utils.timeutil.now() analyzer.last_heartbeat_time = localization.format_timedelta(delta, add_direction=True) analyzer.status = None analyzer.events = [] res = env.dataprovider.get(Criterion("heartbeat.analyzer(-1).analyzerid", "=", analyzerid), limit=self._heartbeat_count) prev = None total_interval = 0 # Iterate from oldest heartbeat to newest for obj in reversed(res): cur = HeartbeatObject(obj["heartbeat"]) if not (prev and cur.status and cur.interval): prev = cur continue total_interval += cur.interval event = None if cur.status == "starting": if prev.status == "exiting": event = utils.AttrObj(time=cur.time_str, value=_("Normal sensor start"), type="start") else: event = utils.AttrObj(time=cur.time_str, value=_("Unexpected sensor restart"), type="unexpected_restart") elif cur.status == "running": delta = cur.time - prev.time if abs(delta.total_seconds() - cur.interval) > self._heartbeat_error_margin: delta = localization.format_timedelta(delta, granularity="second") event = utils.AttrObj(time=cur.time_str, value=_("Unexpected heartbeat interval: %(delta)s") % {'delta': delta}, type="abnormal_heartbeat_interval") elif cur.status == "exiting": event = utils.AttrObj(time=cur.time_str, value=_("Normal sensor stop"), type="normal_stop") if event: analyzer.events.append(event) prev = cur if prev: analyzer.status, analyzer.status_meaning = \ utils.get_analyzer_status_from_latest_heartbeat(obj["heartbeat"], self._heartbeat_error_margin) if analyzer.status == "missing": delta = utils.timeutil.now() - prev.time analyzer.events.append(utils.AttrObj(time=prev.time_str, value=_("Sensor is down since %s") % localization.format_timedelta(delta), type="down")) if not analyzer.status: analyzer.status, analyzer.status_meaning = "unknown", _("Unknown") if not analyzer.events: delta = localization.format_timedelta(total_interval / self._heartbeat_count) analyzer.events.append(utils.AttrObj( time="", value=_("No anomaly in the last %(count)d heartbeats (one heartbeat every %(delta)s average)") % {'count': self._heartbeat_count, 'delta': delta}, type="no_anomaly" )) return template.PrewikkaTemplate(__name__, "templates/heartbeatanalyze.mak").render(analyzer=analyzer)
def _get_analyzers(self, reqstatus): # Do not take the control menu into account. # The expected behavior is yet to be determined. for (analyzerid, ) in env.dataprovider.query( ["heartbeat.analyzer(-1).analyzerid/group_by"]): analyzer, heartbeat = self._get_analyzer(analyzerid) status, status_text = utils.get_analyzer_status_from_latest_heartbeat( heartbeat, self._heartbeat_error_margin) if reqstatus and status not in reqstatus: continue delta = float(heartbeat.get("create_time")) - time.time() heartbeat_listing = url_for( "HeartbeatListing.render", **{"heartbeat.analyzer(-1).analyzerid": analyzerid}) alert_listing = url_for( "AlertListing.render", **{ "analyzer_object_0": "alert.analyzer.analyzerid", "analyzer_operator_0": "=", "analyzer_value_0": analyzerid }) heartbeat_analyze = url_for(".analyze", analyzerid=analyzerid) node_name = analyzer["node.name"] or _("Node name n/a") osversion = analyzer["osversion"] or _("OS version n/a") ostype = analyzer["ostype"] or _("OS type n/a") yield { "id": analyzerid, "label": "%s - %s %s" % (node_name, ostype, osversion), "location": analyzer["node.location"] or _("Node location n/a"), "node": node_name, "name": analyzer["name"], "model": analyzer["model"], "class": analyzer["class"], "version": analyzer["version"], "latest_heartbeat": localization.format_timedelta(delta, add_direction=True), "status": status, "status_text": status_text, "links": [ { "text": _("Alert listing"), "link": alert_listing }, { "text": _("Heartbeat listing"), "link": heartbeat_listing }, { "text": _("Heartbeat analysis"), "link": heartbeat_analyze, "class": "widget-link", "title": _("Heartbeat analysis") }, ] }
def analyze(self, analyzerid): analyzer, heartbeat = self._get_analyzer(analyzerid) delta = float(heartbeat["create_time"]) - time.time() analyzer.last_heartbeat_time = localization.format_timedelta( delta, add_direction=True) analyzer.status = None analyzer.events = [] res = env.dataprovider.get(Criterion( "heartbeat.analyzer(-1).analyzerid", "=", analyzerid), limit=self._heartbeat_count) prev = None latest = True total_interval = 0 for idx, cur in enumerate(res): cur = cur["heartbeat"] cur_status, cur_interval, cur_time = cur.get( "additional_data('Analyzer status').data" )[0], cur["heartbeat_interval"], cur["create_time"] cur_time_str = localization.format_datetime(float(cur_time)) try: prev = res[idx + 1]["heartbeat"] prev_status, prev_time = prev.get( "additional_data('Analyzer status').data" )[0], prev["create_time"] except: break if not cur_status or not cur_interval: continue total_interval += int(cur_interval) if latest: latest = False analyzer.status, analyzer.status_meaning = \ utils.get_analyzer_status_from_latest_heartbeat(cur, self._heartbeat_error_margin) if analyzer.status == "missing": delta = time.time() - float(cur_time) analyzer.events.append( utils.AttrObj(time=cur_time_str, value=_("Sensor is down since %s") % localization.format_timedelta(delta), type="down")) event = None if cur_status == "starting": if prev_status == "exiting": event = utils.AttrObj(time=cur_time_str, value=_("Normal sensor start"), type="start") else: event = utils.AttrObj(time=cur_time_str, value=_("Unexpected sensor restart"), type="unexpected_restart") elif cur_status == "running": delta = abs(int(cur_time) - int(prev_time) - int(cur_interval)) if delta > self._heartbeat_error_margin: delta = localization.format_timedelta(delta, granularity="second") event = utils.AttrObj( time=cur_time_str, value=_("Unexpected heartbeat interval: %(delta)s") % {'delta': delta}, type="abnormal_heartbeat_interval") elif cur_status == "exiting": event = utils.AttrObj(time=cur_time_str, value=_("Normal sensor stop"), type="normal_stop") if event: analyzer.events.append(event) if not analyzer.status: analyzer.status, analyzer.status_meaning = "unknown", _("Unknown") if not analyzer.events: delta = localization.format_timedelta(total_interval / self._heartbeat_count) analyzer.events.append( utils.AttrObj( time="", value= _("No anomaly in the last %(count)d heartbeats (one heartbeat every %(delta)s average)" ) % { 'count': self._heartbeat_count, 'delta': delta }, type="no_anomaly")) return template.PrewikkaTemplate( __name__, "templates/heartbeatanalyze.mak").render(analyzer=analyzer)
def render(self): analyzers = { } criteria = None if self.parameters.has_key("filter_path"): criteria = "%s == '%s'" % (self.parameters["filter_path"], utils.escape_criteria(self.parameters["filter_value"])) locations = { } nodes = { } for (analyzerid,) in env.idmef_db.getValues(["heartbeat.analyzer(-1).analyzerid/group_by"], criteria): analyzer, heartbeat = env.idmef_db.getAnalyzer(analyzerid) parameters = { "analyzerid": analyzer["analyzerid"] } analyzer.alert_listing = utils.create_link(view.getViewPath("SensorAlertListing"), parameters) analyzer.heartbeat_listing = utils.create_link(view.getViewPath("SensorHeartbeatListing"), parameters) analyzer.heartbeat_analyze = utils.create_link(self.view_path + "/HeartbeatAnalyze", parameters) node_key = "" addresses = [] for addr in analyzer["node.address(*).address"]: node_key += addr address = {} address["value"] = addr address["inline_filter"] = utils.create_link(self.view_path, { "filter_path": "heartbeat.analyzer(-1).node.address(*).address", "filter_value": addr }) address["host_links"] = [] for typ, linkname, link, widget in env.hookmgr.trigger("HOOK_LINK", addr): if typ == "host": address["host_links"].append((linkname, link, widget)) if "host" in env.url: for urlname, url in env.url["host"].items(): address["host_links"].append((urlname.capitalize(), url.replace("$host", addr), False)) addresses.append(address) analyzer.model_inline_filter = utils.create_link(self.view_path, { "filter_path": "heartbeat.analyzer(-1).model", "filter_value": analyzer["model"] }) analyzer.status, analyzer.status_meaning = \ get_analyzer_status_from_latest_heartbeat(heartbeat, self._heartbeat_error_margin) delta = float(heartbeat.get("create_time")) - time.time() analyzer.last_heartbeat_time = localization.format_timedelta(delta, add_direction=True) node_location = analyzer["node.location"] or _("Node location n/a") node_name = analyzer.get("node.name") or _("Node name n/a") osversion = analyzer["osversion"] or _("OS version n/a") ostype = analyzer["ostype"] or _("OS type n/a") node_key = node_name + osversion + ostype if not locations.has_key(node_location): locations[node_location] = { "total": 1, "missing": 0, "unknown": 0, "offline": 0, "online": 0, "nodes": { } } else: locations[node_location]["total"] += 1 if not locations[node_location]["nodes"].has_key(node_key): locations[node_location]["nodes"][node_key] = { "total": 1, "missing": 0, "unknown": 0, "offline": 0, "online": 0, "analyzers": [ ], "node.name": node_name, "node.location": node_location, "ostype": ostype, "osversion": osversion, "node_addresses": addresses } else: locations[node_location]["nodes"][node_key]["total"] += 1 locations[node_location][analyzer.status] += 1 locations[node_location]["nodes"][node_key][analyzer.status] += 1 if analyzer.status in ["missing", "unknown"]: locations[node_location]["nodes"][node_key]["analyzers"].insert(0, analyzer) else: locations[node_location]["nodes"][node_key]["analyzers"].append(analyzer) self.dataset["locations"] = locations
def render(self): analyzerid = self.parameters["analyzerid"] analyzer, heartbeat = env.idmef_db.getAnalyzer(analyzerid) delta = float(heartbeat["create_time"]) - time.time() analyzer.last_heartbeat_time = localization.format_timedelta(delta, add_direction=True) analyzer.status = None analyzer.events = [ ] idents = env.idmef_db.getHeartbeatIdents(criteria="heartbeat.analyzer(-1).analyzerid == %s" % analyzerid, limit=self._heartbeat_count) prev = None latest = True total_interval = 0 for idx, ident in enumerate(idents): cur = env.idmef_db.getHeartbeat(ident)["heartbeat"] cur_status, cur_interval, cur_time = cur.get("additional_data('Analyzer status').data")[0], cur["heartbeat_interval"], cur["create_time"] cur_time_str = localization.format_datetime(float(cur_time)) try: prev = env.idmef_db.getHeartbeat(idents[idx + 1])["heartbeat"] prev_status, prev_time = prev.get("additional_data('Analyzer status').data")[0], prev["create_time"] except: break if not cur_status or not cur_interval: continue total_interval += int(cur_interval) if latest: latest = False analyzer.status, analyzer.status_meaning = \ get_analyzer_status_from_latest_heartbeat(cur, self._heartbeat_error_margin) if analyzer.status == "missing": delta = time.time() - float(cur_time) analyzer.events.append({ "time": cur_time_str, "value": _("Sensor is down since %s") % localization.format_timedelta(delta), "type": "down"}) event = None if cur_status == "starting": if prev_status == "exiting": event = { "time": cur_time_str, "value": _("Normal sensor start"), "type": "start" } else: event = { "time": cur_time_str, "value": _("Unexpected sensor restart"), "type": "unexpected_restart" } elif cur_status == "running": delta = abs(int(cur_time) - int(prev_time) - int(cur_interval)) if delta > self._heartbeat_error_margin: delta = localization.format_timedelta(delta, granularity="second") event = { "time": cur_time_str, "value": _("Unexpected heartbeat interval: %(delta)s") % {'delta': delta}, "type": "abnormal_heartbeat_interval" } elif cur_status == "exiting": event = { "time": cur_time_str, "value": _("Normal sensor stop"), "type": "normal_stop" } if event: analyzer.events.append(event) if not analyzer.status: analyzer.status, analyzer.status_meaning = "unknown", _("Unknown") if not analyzer.events: delta = localization.format_timedelta(total_interval / self._heartbeat_count) analyzer.events.append({ "time": "", "value": _("No anomaly in the last %(count)d heartbeats (one heartbeat every %(delta)s average)") % {'count': self._heartbeat_count, 'delta':delta}, "type": "no_anomaly" }) self.dataset["analyzer"] = analyzer