def filterheader(positive: bool) -> FilterHeader: if positive: aand, oor, eq = "And", "Or", "=" else: aand, oor, eq = "Or", "And", "!=" stars = user.stars filters = "" count = 0 if what == "host": for star in stars: if ";" in star: continue filters += "Filter: host_name %s %s\n" % (eq, livestatus.lqencode(star)) count += 1 else: for star in stars: if ";" not in star: continue h, s = star.split(";") filters += "Filter: host_name %s %s\n" % (eq, livestatus.lqencode(h)) filters += "Filter: service_description %s %s\n" % (eq, livestatus.lqencode(s)) filters += "%s: 2\n" % aand count += 1 # No starred object and show only starred -> show nothing if count == 0 and positive: return "Filter: host_state = -4612\n" # no starred object and show unstarred -> show everything if count == 0: return "" filters += "%s: %d\n" % (oor, count) return filters
def find_host_services( host_name: str, service_description: str = "" ) -> Iterator[Tuple[str, str, Tuple[str, ...]]]: if not host_name and not service_description: # optimization: avoid query with empty result return # TODO: site hint! # Also fetch host data with the *same* query. This saves one round trip. And head # host has at least one service query = "GET services\n" \ "Columns: description check_command perf_data metrics host_check_command host_metrics \n" if host_name: query += "Filter: host_name = %s\n" % livestatus.lqencode(host_name) if service_description: query += "Filter: service_description = %s\n" % livestatus.lqencode( service_description) host_check_command, host_metrics = None, None for svc_desc, check_command, perf_data, rrd_metrics, \ host_check_command, host_metrics in sites.live().query(query): parsed_perf_data, check_command = parse_perf_data( perf_data, check_command) known_metrics = set([perf[0] for perf in parsed_perf_data] + rrd_metrics) yield svc_desc, check_command, tuple(known_metrics) if host_check_command: yield "_HOST_", host_check_command, tuple(host_metrics)
def get_livestatus_filters( self, livestatus_table: LivestatusTable, used_filters: UsedFilters) -> LivestatusFilterHeaders: filter_lines = [] entries = used_filters.get(self.name, []) if len(entries) > 3: raise MKGeneralException( "You can only set up to three 'tg:' filters") for entry in entries: if ":" not in entry: # Be compatible to pre 1.6 filtering for some time (no # tag-group:tag-value, but tag-value only) filter_lines.append("Filter: tag_values >= %s" % livestatus.lqencode(entry)) continue tag_key, tag_value = entry.split(":", 1) filter_lines.append( "Filter: tags = %s %s" % (livestatus.lqencode(tag_key), livestatus.lqencode(tag_value))) if len(filter_lines) > 1: filter_lines.append("And: %d" % len(filter_lines)) return "\n".join(filter_lines)
def get_current_perfdata(host, service, dsname): perf_data = sites.live().query_value( "GET services\nFilter: host_name = %s\nFilter: description = %s\n" "Columns: perf_data" % (livestatus.lqencode(host), livestatus.lqencode(service))) for part in perf_data.split(): name, rest = part.split("=") if name == dsname: return float(rest.split(";")[0])
def encode_label_for_livestatus(column: str, label: Label) -> str: """ >>> encode_label_for_livestatus("labels", Label("key", "value", False)) "Filter: labels = 'key' 'value'" """ return "Filter: %s %s %s %s" % ( lqencode(column), "!=" if label.negate else "=", lqencode(quote_dict(label.id)), lqencode(quote_dict(label.value)), )
def get_logfile_lines(site, host_name, file_name): if site: # Honor site hint if available sites.live().set_only_sites([site]) query = \ "GET hosts\n" \ "Columns: mk_logwatch_file:file:%s/%s\n" \ "Filter: name = %s\n" % (livestatus.lqencode(host_name), livestatus.lqencode(file_name.replace('\\', '\\\\').replace(' ', '\\s')), livestatus.lqencode(host_name)) file_content = sites.live().query_value(query) if site: # Honor site hint if available sites.live().set_only_sites(None) if file_content is None: return None return [line.decode("utf-8") for line in file_content.splitlines()]
def _get_html_from_livestatus( site_id: SiteId, host_name: HostName, service_description: str, ) -> LivestatusRow: query = ( "GET services\nColumns: robotmk_last_log\nFilter: host_name = %s\nFilter: service_description = %s\n" % (lqencode(host_name), lqencode(service_description))) with only_sites(site_id): row = live().query_row(query) return row
def filter(self, value: FilterHTTPVariables) -> FilterHeader: address_val = value.get(self.htmlvars[0]) if not address_val: return "" if value.get(self.htmlvars[1]) == "yes": op = "~" address = "^" + livestatus.lqencode(address_val) else: op = "=" address = livestatus.lqencode(address_val) if self._what == "primary": return "Filter: host_address %s %s\n" % (op, address) varname = "ADDRESS_4" if self._what == "ipv4" else "ADDRESS_6" return "Filter: host_custom_variables %s %s %s\n" % (op, varname, address)
def encode_label_for_livestatus( column: str, label_id: str, label_value: str, ) -> str: """ >>> encode_label_for_livestatus("labels", "key", "value") "Filter: labels = 'key' 'value'" """ return "Filter: %s = %s %s" % ( lqencode(column), lqencode(quote_dict(label_id)), lqencode(quote_dict(label_value)), )
def get_crash_report_rows(self, only_sites: Optional[List[SiteId]], filter_headers: str) -> List[Dict[str, str]]: # First fetch the information that is needed to query for the dynamic columns (crash_info, # ...) crash_infos = self._get_crash_report_info(only_sites, filter_headers) if not crash_infos: return [] rows = [] for crash_info in crash_infos: file_path = "/".join( [crash_info["crash_type"], crash_info["crash_id"]]) headers = ["crash_info"] columns = [ "file:crash_info:%s/crash.info" % livestatus.lqencode(file_path) ] if crash_info["crash_type"] == "check": headers += ["agent_output", "snmp_info"] columns += [ "file:agent_output:%s/agent_output" % livestatus.lqencode(file_path), "file:snmp_info:%s/snmp_info" % livestatus.lqencode(file_path), ] try: sites.live().set_prepend_site(False) sites.live().set_only_sites( [SiteId(ensure_str(crash_info["site"]))]) raw_row = sites.live().query_row( "GET crashreports\n" "Columns: %s\n" "Filter: id = %s" % (" ".join(columns), livestatus.lqencode(crash_info["crash_id"]))) finally: sites.live().set_only_sites(None) sites.live().set_prepend_site(False) crash_info.update(dict(zip(headers, raw_row))) rows.append(crash_info) return rows
def _filter(self, value: FilterHTTPVariables) -> FilterHeader: return "Filter: %s %s%s %s\n" % ( self.column, self._negate_symbol(value), self.op, livestatus.lqencode(value[self.request_vars[0]]), )
def lq_logic(filter_condition: str, values: List[str], join: str) -> str: """JOIN with (Or, And) FILTER_CONDITION the VALUES for a livestatus query""" conditions = u"".join(u"%s %s\n" % (filter_condition, livestatus.lqencode(x)) for x in values) connective = u"%s: %d\n" % (join, len(values)) if len(values) > 1 else u"" return conditions + connective
def _get_crash_report_row(self, crash_id: str, site_id: str) -> Optional[Dict[str, str]]: rows = CrashReportsRowTable().get_crash_report_rows( only_sites=[config.SiteId(ensure_str(site_id))], filter_headers="Filter: id = %s" % livestatus.lqencode(crash_id)) if not rows: return None return rows[0]
def filter(self, value: FilterHTTPVariables) -> FilterHeader: headers = [] # Do not restrict to a certain number, because we'd like to link to this # via an URL, e.g. from the virtual host tree snapin num = 0 column = livestatus.lqencode(self.object_type) + "_tags" while value.get("%s%d_grp" % (self.var_prefix, num)): prefix = "%s%d" % (self.var_prefix, num) num += 1 op = value.get(prefix + "_op") tag_group = config.tags.get_tag_group( value.get(prefix + "_grp", "")) tag = value.get(prefix + "_val", "") if not tag_group or not op: continue headers.append( encode_label_for_livestatus(column, tag_group.id, tag, negate=op != "is")) if headers: return "\n".join(headers) + "\n" return ""
def _fetch_data_for_hosts(self, hostnames: Set[HostName]) -> List[_MeshNode]: hostname_filters = [] if hostnames: for hostname in hostnames: hostname_filters.append("Filter: host_name = %s" % livestatus.lqencode(hostname)) hostname_filters.append("Or: %d" % len(hostnames)) try: sites.live().set_prepend_site(True) columns = [ "name", "state", "alias", "icon_image", "parents", "childs", "has_been_checked" ] query_result = sites.live().query("GET hosts\nColumns: %s\n%s" % (" ".join(columns), "\n".join(hostname_filters))) finally: sites.live().set_prepend_site(False) return [{ "site": str(x[0]), "name": str(x[1]), "state": int(x[2]), "alias": str(x[3]), "icon_image": str(x[4]), "outgoing": [str(i) for i in x[5]], "incoming": [str(i) for i in x[6]], "has_been_checked": bool(x[7]), } for x in query_result]
def _query_for_metrics_of_host(self, site_id, host_name, service_name): if not host_name or not service_name: return {} query = ( "GET services\n" "Columns: description check_command service_perf_data host_state service_state\n" "Filter: host_name = %s\n" "Filter: service_description = %s\n" % (livestatus.lqencode(host_name), service_name)) try: rows = sites.live().query(query) except Exception: raise MKGeneralException( _("The query for the given metric, service and host names returned no data." )) for service_description, check_command, service_perf_data, host_state, svc_state in rows: return { "service_description": service_description, "check_command": check_command, "service_perf_data": service_perf_data, "host_state": host_state, "svc_state": svc_state, }
def lq_logic(filter_condition, values, join): # type: (Text, List[Text], Text) -> Text """JOIN with (Or, And) FILTER_CONDITION the VALUES for a livestatus query""" conditions = u"".join(u"%s %s\n" % (filter_condition, livestatus.lqencode(x)) for x in values) connective = u"%s: %d\n" % (join, len(values)) if len(values) > 1 else u"" return conditions + connective
def _get_crash_report_row(self, crash_id, site_id): # type: (Text, Text) -> Optional[Dict[Text, Text]] rows = CrashReportsRowTable().get_crash_report_rows( only_sites=[config.SiteId(bytes(site_id))], filter_headers="Filter: id = %s" % livestatus.lqencode(crash_id)) if not rows: return None return rows[0]
def make_command(spec, cmdtag): return ("SCHEDULE_" + cmdtag + "_DOWNTIME;%s;" % spec) + ("%d;%d;%d;0;%d;%s;" % ( down_from, down_to_not_none, fixed_and_recurring, duration, config.user.id, )) + livestatus.lqencode(comment)
def action(self, cmdtag, spec, row, row_index, num_rows): if html.request.var("_add_comment"): comment = html.request.get_unicode_input("_comment") if not comment: raise MKUserError("_comment", _("You need to supply a comment.")) command = "ADD_" + cmdtag + "_COMMENT;%s;1;%s" % \ (spec, config.user.id) + (";%s" % livestatus.lqencode(comment)) title = _("<b>add a comment to</b>") return command, title
def address_families(family: str) -> FilterHeader: if family == "both": return lq_logic("Filter: tags =", ["ip-v4 ip-v4", "ip-v6 ip-v6"], "Or") if family[0] == "4": tag = livestatus.lqencode("ip-v4") elif family[0] == "6": tag = livestatus.lqencode("ip-v6") filt = "Filter: tags = %s %s\n" % (tag, tag) if family.endswith("_only"): if family[0] == "4": tag = livestatus.lqencode("ip-v6") elif family[0] == "6": tag = livestatus.lqencode("ip-v4") filt += "Filter: tags != %s %s\n" % (tag, tag) return filt
def get_rrd_data( hostname: HostName, service_description: ServiceName, varname: MetricName, cf: ConsolidationFunctionName, fromtime: Timestamp, untiltime: Timestamp, max_entries: int = 400, ) -> TimeSeries: """Fetch RRD historic metrics data of a specific service, within the specified time range returns a TimeSeries object holding interval and data information Query to livestatus always returns if database is found, thus: - Values can be None when there is no data for a given timestamp - Reply from livestatus/rrdtool is always enough to describe the queried interval. That means, the returned bounds are always outside the queried interval. LEGEND O timestamps of measurements | query values, fromtime and untiltime x returned start, no data contained v returned data rows, includes end y --O---O---O---O---O---O---O---O |---------------| x---v---v---v---v---y """ step = 1 rpn = "%s.%s" % (varname, cf.lower()) # "MAX" -> "max" point_range = ":".join( livestatus.lqencode(str(x)) for x in (fromtime, untiltime, step, max_entries)) column = "rrddata:m1:%s:%s" % (rpn, point_range) lql = livestatus_lql([hostname], [column], service_description) + "OutputFormat: python\n" try: connection = livestatus.SingleSiteConnection( "unix:%s" % cmk.utils.paths.livestatus_unix_socket) response = connection.query_value(lql) except livestatus.MKLivestatusNotFoundError as e: if cmk.utils.debug.enabled(): raise raise MKGeneralException( "Cannot get historic metrics via Livestatus: %s" % e) if response is None: raise MKGeneralException( "Cannot retrieve historic data with Nagios Core") return TimeSeries(response)