def get_interface_metrics(managed_objects): from noc.sa.models.managedobject import ManagedObject # mo = self.object meric_map = { "load_in": "Interface | Load | In", "load_out": "Interface | Load | Out", "errors_in": "Interface | Errors | In", "errors_out": "Interface | Errors | Out", } if not isinstance(managed_objects, Iterable): managed_objects = [managed_objects] bi_map = {str(getattr(mo, "bi_id", mo)): mo for mo in managed_objects} query_interval = ( ManagedObjectProfile.get_max_metrics_interval( set(mo.object_profile.id for mo in ManagedObject.objects.filter(bi_id__in=list(bi_map))) ) * 1.5 ) from_date = datetime.datetime.now() - datetime.timedelta(seconds=max(query_interval, 3600)) from_date = from_date.replace(microsecond=0) SQL = """SELECT managed_object, path[4] as iface, argMax(ts, ts), argMax(load_in, ts), argMax(load_out, ts), argMax(errors_in, ts), argMax(errors_out, ts) FROM interface WHERE date >= toDate('%s') AND ts >= toDateTime('%s') AND managed_object IN (%s) GROUP BY managed_object, iface """ % ( from_date.date().isoformat(), from_date.isoformat(sep=" "), ", ".join(bi_map), ) ch = ch_connection() mtable = [] # mo_id, mac, iface, ts metric_map = defaultdict(dict) last_ts = {} # mo -> ts try: for mo_bi_id, iface, ts, load_in, load_out, errors_in, errors_out in ch.execute(post=SQL): mo = bi_map.get(mo_bi_id) if mo: mtable += [[mo, iface, ts, load_in, load_out]] metric_map[mo][iface] = { meric_map["load_in"]: int(load_in), meric_map["load_out"]: int(load_out), meric_map["errors_in"]: int(errors_in), meric_map["errors_out"]: int(errors_out), } last_ts[mo] = max(ts, last_ts.get(mo, ts)) except ClickhouseError: pass return metric_map, last_ts
def get_ajax_data(self, **kwargs): # Parse query params query = self.decode_query( self.handler.get_argument("key") ) # type: List[Tuple[int, int, str]] # Get metrics from_ts = datetime.datetime.now() - datetime.timedelta(seconds=1800) from_ts = from_ts.replace(microsecond=0) interface_sql = """ SELECT managed_object, path[4] AS iface, argMax(load_in, ts) AS load_in, argMax(load_out, ts) AS load_out, argMax(packets_in, ts) AS packets_in, argMax(packets_out, ts) AS packets_out, argMax(errors_in, ts) AS errors_in, argMax(errors_out, ts) AS errors_out FROM interface WHERE date >= toDate('%s') AND ts >= toDateTime('%s') AND (%s) GROUP BY managed_object, iface """ % ( from_ts.date().isoformat(), from_ts.isoformat(sep=" "), " OR ".join( "(managed_object=%d AND path[4]='%s')" % (q[1], q[2].replace("'", "''")) for q in query ), ) # Get data metrics = [] # type: List[Tuple[int, str, str, str, str, str]] ch = ch_connection() try: for ( mo, iface, load_in, load_out, packets_in, packets_out, errors_in, errors_out, ) in ch.execute(post=interface_sql): if_hash = str(bi_hash(iface)) metrics += [ # (mo, if_hash, "speed", self.humanize_metric(speed)), (mo, if_hash, "load_in", self.humanize_metric(load_in)), (mo, if_hash, "load_out", self.humanize_metric(load_out)), (mo, if_hash, "packets_in", self.humanize_metric(packets_in)), (mo, if_hash, "packets_out", self.humanize_metric(packets_out)), (mo, if_hash, "errors_in", self.humanize_metric(errors_in)), (mo, if_hash, "errors_out", self.humanize_metric(errors_out)), ] except ClickhouseError: pass # Set defaults m_index = set() # type: Set[Tuple[int, str]] for mo_bi_id, iface, _, _ in metrics: m_index.add((int(mo_bi_id), iface)) interface_metrics = { "speed", "load_in", "load_out", "packets_in", "packets_out", "errors_in", "errors_out", } for _, mo_bi_id, iface in query: if (int(mo_bi_id), str(bi_hash(iface))) not in m_index: for metric in interface_metrics: metrics += [(str(mo_bi_id), str(bi_hash(iface)), metric, "-")] # managed object id -> bi id mo_map = {q[0]: q[1] for q in query} # type: Dict[int, int] # Get interface statuses for doc in Interface._get_collection().find( {"$or": [{"managed_object": q[0], "name": q[2]} for q in query]}, { "_id": 0, "managed_object": 1, "name": 1, "admin_status": 1, "oper_status": 1, "in_speed": 1, "out_speed": 1, "full_duplex": 1, }, ): mo = str(mo_map[doc["managed_object"]]) if_hash = str(bi_hash(doc["name"])) status = 0 if "admin_status" in doc and doc["admin_status"]: status = 2 if doc["oper_status"] else 1 duplex = "-" if "full_duplex" in doc: duplex = "Full" if doc["full_duplex"] else "Half" speed = "-" if "in_speed" in doc: speed = self.humanize_metric(doc["in_speed"] * 1000) metrics += [ (mo, if_hash, "speed", speed), (mo, if_hash, "duplex", duplex), (mo, if_hash, "status", status), ] # Get current object statuses obj_statuses = ObjectStatus.get_statuses(list(mo_map)) statuses = {str(mo_map[mo_id]): obj_statuses.get(mo_id, True) for mo_id in obj_statuses} return {"metrics": metrics, "statuses": list(statuses.items())}
def get_objects_metrics(managed_objects): from noc.sa.models.managedobject import ManagedObject """ :param managed_objects: :return: """ if not isinstance(managed_objects, Iterable): managed_objects = [managed_objects] # Object Metrics bi_map = {str(getattr(mo, "bi_id", mo)): mo for mo in managed_objects} query_interval = ManagedObjectProfile.get_max_metrics_interval( set(mo.object_profile.id for mo in ManagedObject.objects.filter( bi_id__in=bi_map.keys()))) * 2 from_date = datetime.datetime.now() - datetime.timedelta( seconds=max(query_interval, 3600)) from_date = from_date.replace(microsecond=0) # @todo Left Join object_profiles = set( mo.object_profile.id for mo in ManagedObject.objects.filter(bi_id__in=bi_map.keys())) msd = {ms.id: ms.table_name for ms in MetricScope.objects.filter()} mts = { str(mt.id): (msd[mt.scope.id], mt.field_name, mt.name) for mt in MetricType.objects.all() } mmm = set() op_fields_map = defaultdict(list) for op in ManagedObjectProfile.objects.filter(id__in=object_profiles): if not op.metrics: continue for mt in op.metrics: mmm.add(mts[mt["metric_type"]]) op_fields_map[op.id] += [mts[mt["metric_type"]][1]] ch = ch_connection() mtable = [] # mo_id, mac, iface, ts metric_map = {} last_ts = {} # mo -> ts for table, fields in itertools.groupby(sorted(mmm, key=lambda x: x[0]), key=lambda x: x[0]): fields = list(fields) SQL = """SELECT managed_object, argMax(ts, ts), arrayStringConcat(path) as path, %s FROM %s WHERE date >= toDate('%s') AND ts >= toDateTime('%s') AND managed_object IN (%s) GROUP BY managed_object, path """ % (", ".join( ["argMax(%s, ts) as %s" % (f[1], f[1]) for f in fields]), table, from_date.date().isoformat(), from_date.isoformat(sep=" "), ", ".join(bi_map)) try: for result in ch.execute(post=SQL): mo_bi_id, ts, path = result[:3] mo = bi_map.get(mo_bi_id) i = 0 for r in result[3:]: f_name = fields[i][2] mtable += [[mo, ts, path, r]] if mo not in metric_map: metric_map[mo] = defaultdict(dict) metric_map[mo][path][f_name] = r last_ts[mo] = max(ts, last_ts.get(mo, ts)) i += 1 except ClickhouseError: pass return metric_map, last_ts
def get_objects_metrics( managed_objects: Union[Iterable, int] ) -> Tuple[Dict["ManagedObject", Dict[str, Dict[str, int]]], Dict[ "ManagedObject", datetime.datetime]]: """ :param managed_objects: :return: Dictionary ManagedObject -> Path -> MetricName -> value """ if not isinstance(managed_objects, Iterable): managed_objects = [managed_objects] # Object Metrics bi_map = {str(getattr(mo, "bi_id", mo)): mo for mo in managed_objects} query_interval = (ManagedObjectProfile.get_max_metrics_interval( set(mo.object_profile.id for mo in ManagedObject.objects.filter(bi_id__in=list(bi_map)))) * 2) from_date = datetime.datetime.now() - datetime.timedelta( seconds=max(query_interval, 3600)) from_date = from_date.replace(microsecond=0) # @todo Left Join object_profiles = set( mo.object_profile.id for mo in ManagedObject.objects.filter(bi_id__in=list(bi_map))) msd: Dict[str, str] = {} # Map ScopeID -> TableName labels_table = set() for ms in MetricScope.objects.filter(): msd[ms.id] = ms.table_name if ms.labels: labels_table.add(ms.table_name) mts: Dict[str, Tuple[str, str, str]] = { str(mt.id): (msd[mt.scope.id], mt.field_name, mt.name) for mt in MetricType.objects.all() } # Map Metric Type ID -> table_name, column_name, MetricType Name mmm = set() op_fields_map: DefaultDict[str, List[str]] = defaultdict(list) for op in ManagedObjectProfile.objects.filter(id__in=object_profiles): if not op.metrics: continue for mt in op.metrics: mmm.add(mts[mt["metric_type"]]) op_fields_map[op.id] += [mts[mt["metric_type"]][1]] ch = ch_connection() mtable = [] # mo_id, mac, iface, ts metric_map = {} last_ts: Dict["ManagedObject", datetime.datetime] = {} # mo -> ts for table, fields in itertools.groupby(sorted(mmm, key=lambda x: x[0]), key=lambda x: x[0]): fields = list(fields) SQL = """SELECT managed_object, argMax(ts, ts), %%s %s FROM %s WHERE date >= toDate('%s') AND ts >= toDateTime('%s') AND managed_object IN (%s) GROUP BY managed_object %%s """ % ( ", ".join(["argMax(%s, ts) as %s" % (f[1], f[1]) for f in fields]), table, from_date.date().isoformat(), from_date.isoformat(sep=" "), ", ".join(bi_map), ) if table in labels_table: # SQL = SQL % ("arrayStringConcat(labels, '|') as ll,", ", labels") SQL = SQL % ( "arrayStringConcat(arrayMap(x -> splitByString('::', x)[-1], labels), '|') as labels,", ", labels", ) else: SQL = SQL % ("", "") try: for result in ch.execute(post=SQL): if table in labels_table: mo_bi_id, ts, labels = result[:3] result = result[3:] else: mo_bi_id, ts = result[:2] labels, result = "", result[2:] mo = bi_map.get(mo_bi_id) i = 0 for r in result: f_name = fields[i][2] mtable += [[mo, ts, labels, r]] if mo not in metric_map: metric_map[mo] = defaultdict(dict) metric_map[mo][labels][f_name] = r last_ts[mo] = max(ts, last_ts.get(mo, ts)) i += 1 except ClickhouseError as e: print(e) return metric_map, last_ts
def get_interface_metrics( managed_objects: Union[Iterable, int], meric_map: Optional[Dict[str, Any]] = None ) -> Tuple[Dict["ManagedObject", Dict[str, Dict[str, Union[float, int]]]], Dict["ManagedObject", datetime.datetime], ]: """ :param managed_objects: ManagedObject list or bi_id list :param meric_map: For customization getting metrics :return: Dictionary ManagedObject -> Path -> MetricName -> value """ # mo = self.object if not meric_map: meric_map = { "table_name": "interface", "map": { "load_in": "Interface | Load | In", "load_out": "Interface | Load | Out", "errors_in": "Interface | Errors | In", "errors_out": "Interface | Errors | Out", }, } if not isinstance(managed_objects, Iterable): managed_objects = [managed_objects] bi_map: Dict[str, "ManagedObject"] = { str(getattr(mo, "bi_id", mo)): mo for mo in managed_objects } query_interval: float = (ManagedObjectProfile.get_max_metrics_interval( set(mo.object_profile.id for mo in ManagedObject.objects.filter(bi_id__in=list(bi_map)))) * 1.5) from_date = datetime.datetime.now() - datetime.timedelta( seconds=max(query_interval, 3600)) from_date = from_date.replace(microsecond=0) SQL = """SELECT managed_object, argMax(ts, ts), splitByString('::', arrayFirst(x -> startsWith(x, 'noc::interface::'), labels))[-1] as iface, labels, %s FROM %s WHERE date >= toDate('%s') AND ts >= toDateTime('%s') AND managed_object IN (%s) AND NOT arrayExists(x -> startsWith(x, 'noc::unit::'), labels) GROUP BY managed_object, labels """ % ( ", ".join( ["argMax(%s, ts) as %s" % (f, f) for f in meric_map["map"].keys()]), meric_map["table_name"], from_date.date().isoformat(), from_date.isoformat(sep=" "), ", ".join(bi_map), ) ch = ch_connection() metric_map: DefaultDict["ManagedObject", Dict[str, Dict[str, Union[int, float]]]] = defaultdict(dict) last_ts: Dict["ManagedObject", datetime.datetime] = {} # mo -> ts metric_fields = list(meric_map["map"].keys()) try: for result in ch.execute(post=SQL): mo_bi_id, ts, iface, labels = result[:4] labels = ast.literal_eval(labels) res = dict(zip(metric_fields, result[4:])) mo = bi_map.get(mo_bi_id) if len(labels) == 1 and metric_map[mo].get(iface): # If only interface metric continue metric_map[mo][iface] = defaultdict(dict) for field, value in res.items(): metric_map[mo][iface][meric_map["map"].get(field)] = ( float(value) if is_float(value) else int(value)) last_ts[mo] = max(ts, last_ts.get(mo, ts)) except ClickhouseError: pass return metric_map, last_ts