def _table_query( properties, context, column_generator, table: str, infos: List[str] ) -> Tuple[List[str], LivestatusResponse]: filter_headers, only_sites = visuals.get_filter_headers(table, infos, context) columns = column_generator(properties, context) query = ( f"GET {table}\n" "Columns: %(cols)s\n" "%(filter)s" % { "cols": " ".join(columns), "filter": filter_headers, } ) with sites.only_sites(only_sites), sites.prepend_site(): try: rows = sites.live().query(query) except MKTimeout: raise except Exception: raise MKGeneralException(_("The query returned no data.")) return ["site"] + columns, rows
def _get_data(self, properties, context): mode_properties = properties["render_mode"][1] time_range = self._int_time_range_from_rangespec( mode_properties["time_range"]) filter_headers, only_sites = get_filter_headers( "log", self.filter_infos(), context) object_type_filter = self._get_object_type_filter(properties) query = ( "GET log\n" "Columns: log_state host_name service_description log_type log_time\n" "Filter: class = %d\n" "Filter: log_time >= %f\n" "Filter: log_time <= %f\n" "%s" "%s" % (self.log_class, time_range[0], time_range[1], object_type_filter, lqencode(filter_headers))) with sites.only_sites(only_sites): try: return sites.live().query(query) except MKTimeout: raise except Exception: raise MKGeneralException(_("The query returned no data."))
def _livestatus_get_labels( self, only_sites: List[str]) -> List[Dict[SiteId, _Labels]]: """ Get labels for all sites that need an update and the user is authorized for """ query: str = ( "GET services\n" # "Cache: reload\n" # "Columns: host_labels labels\n") with sites.prepend_site(), sites.only_sites(only_sites): rows = [(x[0], x[1], x[2]) for x in sites.live(user).query(query)] host_labels: Dict[SiteId, _Labels] = {} service_labels: Dict[SiteId, _Labels] = {} for row in rows: site_id = row[0] host_label = row[1] service_label = row[2] for key, value in host_label.items(): host_labels.setdefault(site_id, {}).update({key: value}) for key, value in service_label.items(): service_labels.setdefault(site_id, {}).update({key: value}) return [host_labels, service_labels]
def _get_hostnames_from_filters(self) -> Set[str]: # Determine hosts from filters filter_headers = self._get_filter_headers() query = "GET hosts\nColumns: name" if filter_headers: query += "\n%s" % filter_headers with sites.only_sites(html.request.var("site")): return {x[0] for x in sites.live().query(query)}
def _query_livestatus( self, only_sites: List[SiteId], ) -> List[Tuple[SiteId, Dict[str, str], Dict[str, str]]]: query: str = "GET services\n" "Cache: reload\n" "Columns: host_labels labels\n" with sites.prepend_site(), sites.only_sites(only_sites): rows = [(x[0], x[1], x[2]) for x in sites.live(user).query(query)] return rows
def bi_livestatus_query( query: str, only_sites: Optional[List[SiteId]] = None, output_format: LivestatusOutputFormat = LivestatusOutputFormat.PYTHON, ) -> LivestatusResponse: with sites.output_format(output_format), sites.only_sites(only_sites), sites.prepend_site(): try: sites.live().set_auth_domain("bi") return sites.live().query(query) finally: sites.live().set_auth_domain("read")
def _get_hostnames_from_filters( self, context: VisualContext, filters: List[Filter] ) -> Set[HostName]: filter_headers = "".join(get_livestatus_filter_headers(context, filters)) query = "GET hosts\nColumns: name" if filter_headers: query += "\n%s" % filter_headers site = request.var("site") with sites.only_sites(None if site is None else SiteId(site)): return {HostName(x) for x in sites.live().query_column_unique(query)}
def fetch_rrd_data(site, host_name, service_description, entries, graph_recipe, graph_data_range): start_time, end_time = graph_data_range["time_range"] step = graph_data_range["step"] point_range = ":".join(map(str, (start_time, end_time, max(1, step)))) query = livestatus_query_for_rrd_data( host_name, service_description, entries, graph_recipe["consolidation_function"], point_range) with sites.only_sites(site): return zip(entries, sites.live().query_row(query))
def livestatus_query_bare( table: Literal["host", "service"], context: VisualContext, columns: List[str], cache: Optional[Literal["reload"]] = None, ) -> List[Dict[str, Any]]: """Return for the service table filtered by context the given columns. Optional cache reload. Return with site info in""" if query := livestatus_query_bare_string(table, context, columns, cache): selected_sites = get_only_sites_from_context(context) res_columns = ["site"] + columns with sites.only_sites(selected_sites), sites.prepend_site(): return [dict(zip(res_columns, row)) for row in sites.live().query(query)]
def _get_html_from_livestatus( site_id: SiteId, host_name: HostName, service_description: str, ) -> LivestatusRow: query = ( "GET services\nColumns: robotmk_last_log\nFilter: host_name = %s\nFilter: service_description = %s\n" % (lqencode(host_name), lqencode(service_description))) with only_sites(site_id): row = live().query_row(query) return row
def get_graph_data_from_livestatus(only_sites, host_name, service_description): columns = ["perf_data", "metrics", "check_command"] query = livestatus_lql([host_name], columns, service_description) what = "host" if service_description == "_HOST_" else "service" labels = ["site"] + ["%s_%s" % (what, col) for col in columns] with sites.only_sites(only_sites), sites.prepend_site(): info = dict(zip(labels, sites.live().query_row(query))) info["host_name"] = host_name if what == "service": info["service_description"] = service_description return info
def _get_default_view_hostnames(self, max_nodes: int) -> Set[HostName]: """ Returns all hosts without any parents """ query = "GET hosts\nColumns: name\nFilter: parents =" with sites.prepend_site(), sites.only_sites(html.request.var("site")): hosts = [(x[0], x[1]) for x in sites.live().query(query)] # If no explicit site is set and the number of initially displayed hosts # exceeds the auto growth range, only the hosts of the master site are shown if len(hosts) > max_nodes: hostnames = {x[1] for x in hosts if x[0] == config.omd_site()} else: hostnames = {x[1] for x in hosts} return hostnames
def fetch_rrd_data(site, host_name, service_description, entries, graph_recipe, graph_data_range): start_time, end_time = graph_data_range["time_range"] step: Union[int, float, str] = graph_data_range["step"] # assumes str step is well formatted, colon separated step length & rrd point count if not isinstance(step, str): step = max(1, step) point_range = ":".join(map(str, (start_time, end_time, step))) lql_columns = list(rrd_columns(entries, graph_recipe["consolidation_function"], point_range)) query = livestatus_lql([host_name], lql_columns, service_description) with sites.only_sites(site): return list(zip(entries, sites.live().query_row(query)))
def get_graph_data_from_livestatus(only_sites, host_name, service_description): columns = [u'perf_data', u'metrics', u'check_command'] query = livestatus_lql([host_name], columns, service_description) what = 'host' if service_description == "_HOST_" else 'service' labels = [u"site"] + [u"%s_%s" % (what, col) for col in columns] with sites.only_sites(only_sites), sites.prepend_site(): info = dict(zip(labels, sites.live().query_row(query))) info['host_name'] = host_name if what == 'service': info['service_description'] = service_description return info
def _query_livestatus( self, only_sites: List[SiteId], ) -> _LivestatusLabelResponse: with sites.prepend_site(), sites.only_sites(only_sites): service_rows = sites.live().query("GET services\n" "Cache: reload\n" "Columns: labels\n") host_rows = sites.live().query("GET hosts\n" "Cache: reload\n" "Columns: labels\n") return _LivestatusLabelResponse(host_rows, service_rows)
def _get_alias_of_host(site, host_name): query = ("GET hosts\n" "Cache: reload\n" "Columns: alias\n" "Filter: name = %s" % livestatus.lqencode(host_name)) with sites.only_sites(site): try: return sites.live().query_value(query) except Exception as e: logger.warning("Could not determine alias of host %s on site %s: %s", host_name, site, e) if config.debug: raise return host_name
def _get_stats(cls, context, settings): filter_headers, only_sites = visuals.get_filter_headers( table=cls._livestatus_table(), infos=settings["infos"], context=context ) query = cls._stats_query() + "\n" + filter_headers try: if only_sites: with sites.only_sites(only_sites): result: List[int] = sites.live().query_row(query) else: result = sites.live().query_summed_stats(query) except MKLivestatusNotFoundError: result = [] return cls._named_stats(result)
def bi_livestatus_query( query: str, only_sites: Optional[List[SiteId]] = None, output_format: LivestatusOutputFormat = LivestatusOutputFormat.PYTHON, fetch_full_data: bool = False, ) -> LivestatusResponse: with sites.output_format(output_format), sites.only_sites( only_sites), sites.prepend_site(): try: auth_domain = "bi_fetch_full_data" if fetch_full_data else "bi" sites.live().set_auth_domain(auth_domain) return sites.live().query(query) finally: sites.live().set_auth_domain("read")
def _get_html_from_livestatus( report_type: str, site_id: SiteId, host_name: HostName, service_description: str, ) -> LivestatusRow: report_column: Literal["robotmk_last_log", "robotmk_last_error_log"] = ( "robotmk_last_log" if report_type == "robotmk" else "robotmk_last_error_log") query = ( "GET services\nColumns: %s\nFilter: host_name = %s\nFilter: service_description = %s\n" % (report_column, lqencode(host_name), lqencode(service_description))) with only_sites(site_id): row = live().query_row(query) return row
def _sorted_unique_lq(query: str, limit: int, value: str, params: Dict) -> Choices: """Livestatus query of single column of unique elements. Prepare dropdown choices""" selected_sites = get_only_sites_from_context(params.get("context", {})) with sites.only_sites(selected_sites), sites.set_limit(limit): choices = [(h, h) for h in sorted(sites.live().query_column_unique(query), key=lambda h: h.lower())] if len(choices) > limit: choices.insert( 0, (None, _("(Max suggestions reached, be more specific)"))) if (value, value) not in choices and params["strict"] == "False": choices.insert( 0, (value, value)) # User is allowed to enter anything they want return choices
def _get_data(cls, properties, context): time_range = cls._int_time_range_from_rangespec(properties["time_range"]) filter_headers, only_sites = get_filter_headers("log", cls.filter_infos(), context) query = ("GET log\n" "Columns: log_state host_name service_description log_type log_time\n" "Filter: class = %d\n" "Filter: log_time >= %f\n" "Filter: log_time <= %f\n" "Filter: log_type ~ %s .*\n" "%s" % (cls.log_class(), time_range[0], time_range[1], lqencode(properties["log_target"].upper()), lqencode(filter_headers))) with sites.only_sites(only_sites): try: return sites.live().query(query) except MKTimeout: raise except Exception as _e: raise MKGeneralException(_("The query returned no data."))
def _query_for_metrics_of_host(self, host_name, site_id): if not host_name: return {} query = ("GET services\n" "Columns: description check_command metrics\n" "Filter: host_name = %s\n" % livestatus.lqencode(host_name)) response = {} with sites.only_sites(site_id): rows = sites.live().query(query) for service_description, check_command, metrics in rows: response[service_description] = { "check_command": check_command, "metrics": self._get_metric_infos(metrics, check_command), } return response
def __call__(self, cls, properties, context): filter_headers, only_sites = visuals.get_filter_headers("log", ["host", "service"], context) columns = self.f(cls, properties, context) query = ("GET services\n" "Columns: %(cols)s\n" "%(filter)s" % { "cols": " ".join(columns), "filter": filter_headers, }) with sites.only_sites(only_sites), sites.prepend_site(): try: rows = sites.live().query(query) except MKTimeout: raise except Exception: raise MKGeneralException(_("The query returned no data.")) return ['site'] + columns, rows
def __live_query_to_choices( query_callback: Callable[[MultiSiteConnection], Collection[LivestatusColumn]], limit: int, value: str, params: Dict, ) -> Choices: selected_sites = get_only_sites_from_context(params.get("context", {})) with sites.only_sites(selected_sites), sites.set_limit(limit): query_result = query_callback(sites.live()) choices = [(h, h) for h in sorted(query_result, key=lambda h: h.lower())] if len(choices) > limit: choices.insert( 0, (None, _("(Max suggestions reached, be more specific)"))) if (value, value) not in choices and params["strict"] is False: choices.insert( 0, (value, value)) # User is allowed to enter anything they want return choices
def _fetch_simple_number_data(self, properties, context): mode_properties = properties["render_mode"][1] time_range = self._int_time_range_from_rangespec( mode_properties["time_range"]) filter_headers, only_sites = get_filter_headers( "log", self.filter_infos(), context) object_type_filter = self._get_object_type_filter(properties) query = ("GET log\n" "Stats: log_type != \n" "Filter: class = %d\n" "Filter: log_time >= %f\n" "Filter: log_time <= %f\n" "%s" "%s" % (self.log_class, time_range[0], time_range[1], object_type_filter, lqencode(filter_headers))) with sites.only_sites(only_sites): try: return sites.live().query_summed_stats(query) except livestatus.MKLivestatusNotFoundError: raise MKGeneralException(_("The query returned no data."))
def fetch_rrd_data( site: SiteId, host_name: HostName, service_description: ServiceName, metrics: set[MetricProperties], graph_recipe: GraphRecipe, graph_data_range: GraphDataRange, ) -> list[tuple[MetricProperties, TimeSeriesValues]]: start_time, end_time = graph_data_range["time_range"] step = graph_data_range["step"] # assumes str step is well formatted, colon separated step length & rrd point count if not isinstance(step, str): step = max(1, step) point_range = ":".join(map(str, (start_time, end_time, step))) lql_columns = list( rrd_columns(metrics, graph_recipe["consolidation_function"], point_range)) query = livestatus_lql([host_name], lql_columns, service_description) with sites.only_sites(site): return list(zip(metrics, sites.live().query_row(query)))
def _get_data(cls, properties, context, return_column_headers=True): time_range = cls.int_time_range_from_rangespec(properties["time_range"]) c_headers = "ColumnHeaders: on\n" if return_column_headers else "" filter_headers, only_sites = get_filter_headers("log", ["host", "service"], context) metrics = { "CPU load": "load1", "CPU utilization": "util", } service_desc = properties["service"] query = ( "GET services\n" "Columns: host_name host_state service_description service_state service_check_command service_metrics service_perf_data rrddata:v1:%(metric)s:%(start)s:%(end)s:%(step)s\n" # rrddata:m1:load1.max:%(start)s:%(end)s:%(step)s rrddata:m5:load5.max:%(start)s:%(end)s:%(step)s rrddata:m15:load15.max:%(start)s:%(end)s:%(step)s "%(column)s" "Filter: service_description ~~ %(service)s\n" "%(filter)s" % { "metric": metrics[service_desc], "start": time_range[0], "end": time_range[1], "step": 300, "service": service_desc, "column": c_headers, "filter": filter_headers, }) with sites.only_sites(only_sites), sites.prepend_site(): try: rows = sites.live().query(query) except MKTimeout: raise except Exception: raise MKGeneralException(_("The query returned no data.")) if return_column_headers: return rows[0], rows[1:] return rows, ""
def _query_for_host_names(self, site_id): with sites.only_sites(site_id): return sites.live().query_column("GET hosts\nColumns: name\n")