def _create_scatter_elements(cls, properties, context): columns, data_rows = cls._get_data(properties, context) # type: ignore[call-arg] elements = [] metric_name = get_metric_info(properties["metric"], 0)[0]["title"] for row in data_rows: d_row = dict(zip(columns, row)) series = merge_multicol(d_row, columns, properties) site, host = row[:2] for ts, elem in series.time_data_pairs(): if elem: elements.append({ "timestamp": ts, "value": round(elem, 3), "tag": "scatter", "label": host, "url": cls._create_url_to_service_view( site, host, metric_name), "tooltip": "%s on %s: %.2f" % (host, date_and_time(ts), elem), }) return elements
def create_data_for_single_metric(cls, properties, context): columns, data_rows = cls._get_data(properties, context) data = [] used_metrics = [] for idx, row in enumerate(data_rows): d_row = dict(zip(columns, row)) translated_metrics = translate_perf_data( d_row["service_perf_data"], d_row["service_check_command"]) metric = translated_metrics.get(properties['metric']) if metric is None: continue series = merge_multicol(d_row, columns, properties) site = d_row['site'] host = d_row["host_name"] svc_url = makeuri( request, [("view_name", "service"), ("site", site), ("host", host), ("service", d_row['service_description'])], filename="view.py", ) row_id = "row_%d" % idx # Historic values for ts, elem in series.time_data_pairs(): if elem: data.append({ "tag": row_id, "timestamp": ts, "value": elem, "label": host, }) # Live value data.append({ "tag": row_id, "timestamp": int(time.time()), "value": metric['value'], "formatted_value": metric['unit']['render'](metric['value']), "url": svc_url, "label": host, }) used_metrics.append((row_id, metric, d_row)) return data, used_metrics
def create_data_for_single_metric( properties, context: VisualContext, column_generator: Callable[[Any, VisualContext], List[str]], ) -> Tuple[List[Dict[str, Any]], List[Tuple[str, TranslatedMetric, Dict[str, LivestatusColumn]]]]: # TODO: should return live value and historic values as two different elements, for better typing support. columns, data_rows = service_table_query(properties, context, column_generator) data = [] used_metrics = [] for idx, row in enumerate(data_rows): d_row = dict(zip(columns, row)) translated_metrics = translate_perf_data( d_row["service_perf_data"], d_row["service_check_command"] ) metric = translated_metrics.get(properties["metric"]) if metric is None: continue series = merge_multicol(d_row, columns, properties) host = d_row["host_name"] row_id = "row_%d" % idx # Historic values for ts, elem in series.time_data_pairs(): if elem: data.append( { "tag": row_id, "timestamp": ts, "value": elem, "label": host, } ) # Live value data.append( { "tag": row_id, "last_value": True, "timestamp": int(time.time()), "value": metric["value"], "label": host, "url": create_service_view_url(d_row), } ) used_metrics.append((row_id, metric, d_row)) return data, used_metrics
def create_data_for_single_metric(properties, context, column_generator): columns, data_rows = service_table_query(properties, context, column_generator) data = [] used_metrics = [] for idx, row in enumerate(data_rows): d_row = dict(zip(columns, row)) translated_metrics = translate_perf_data( d_row["service_perf_data"], d_row["service_check_command"]) metric = translated_metrics.get(properties['metric']) if metric is None: continue series = merge_multicol(d_row, columns, properties) host = d_row["host_name"] row_id = "row_%d" % idx # Historic values for ts, elem in series.time_data_pairs(): if elem: data.append({ "tag": row_id, "timestamp": ts, "value": elem, "label": host, }) # Live value data.append({ "tag": row_id, "last_value": True, "timestamp": int(time.time()), "value": metric['value'], "label": host, "url": create_service_view_url(d_row), }) used_metrics.append((row_id, metric, d_row)) return data, used_metrics