def fix(): max_value = NetworkSegment._get_collection().estimated_document_count() for ns in progressbar.progressbar(iter_ids_batch(), max_value=max_value): try: st = SegmentTopology(ns) ObjectData.update_uplinks(st.iter_uplinks()) except Exception as e: print("[%s] %s" % (ns.name, e))
def topology_rca_uplink(self, alarm, alarms, seen=None, ts=None): def can_correlate(a1, a2): return (not config.correlator.topology_rca_window or (a1.timestamp - a2.timestamp).total_seconds() <= config.correlator.topology_rca_window) ts = ts or alarm.timestamp seen = seen or set() self.print(">>> topology_rca(%s, %s)" % (alarm.id, "{%s}" % ", ".join(str(x) for x in seen))) if hasattr(alarm, "_trace_root"): self.print("<<< already correlated") return if alarm.id in seen: self.print("<<< already seen") return # Already correlated seen.add(alarm.id) o_id = alarm.managed_object.id # Get neighbor objects neighbors = set() uplinks = [] ou = ObjectData.get_by_id(object=o_id) if ou and ou.uplinks: uplinks = ou.uplinks neighbors.update(uplinks) for du in ObjectData.get_neighbors(o_id): neighbors.add(du) if not neighbors: self.print("<<< no neighbors") return # Get neighboring alarms na = {} for n in neighbors: a = alarms.get(n) if a and a.timestamp <= ts: na[n] = a self.print(" Neighbor alarms: %s" % ", ".join("%s%s (%s)" % ("U:" if x in uplinks else "", na[x], ManagedObject.get_by_id(x).name) for x in na)) self.print(" Uplinks: %s" % ", ".join(ManagedObject.get_by_id(u).name for u in uplinks)) if uplinks and len([na[o] for o in uplinks if o in na]) == len(uplinks): # All uplinks are faulty # uplinks are ordered according to path length # Correlate with first applicable for u in uplinks: a = na[u] if can_correlate(alarm, a): self.print("+++ SET ROOT %s -> %s" % (alarm.id, a.id)) alarm._trace_root = a.id break # Correlate neighbors' alarms for d in na: self.topology_rca_uplink(na[d], alarms, seen, ts) self.print("<<< done")
def update_uplinks(segment_id): from noc.inv.models.networksegment import NetworkSegment from noc.sa.models.objectdata import ObjectData segment = NetworkSegment.get_by_id(segment_id) if not segment: logger.warning("Segment with id: %s does not exist" % segment_id) return st = SegmentTopology(segment) ObjectData.update_uplinks(st.iter_uplinks())
def check_segment_redundancy(alarm): """ Reset lost_redundancy from segment when all redundant objects are up :param alarm: :return: """ mo = alarm.managed_object seg = mo.segment if not seg.is_redundant or not seg.lost_redundancy: return u = mo.data.uplinks if len(u) < 2: return seg_objects = list(seg.managed_objects.values_list("id", flat=True)) alarms = [ d["managed_object"] for d in ActiveAlarm._get_collection().find( {"managed_object": { "$in": seg_objects }}, { "_id": 0, "managed_object": 1 }) if d["managed_object"] != mo.id ] uplinks = ObjectData.uplinks_for_objects(alarms) if not any(x for x in uplinks.values() if len(x) > 1): logger.info("[%s] Redundancy recovered for %s", alarm.id, seg.name) seg.set_lost_redundancy(False)
def fix(): uplinks = dict((d["_id"], d.get("uplinks", [])) for d in ObjectData._get_collection().find()) seg_status = defaultdict(lambda: False) for mo in ManagedObject.objects.all(): u = uplinks.get(mo.id, []) seg_status[mo.segment] |= len(u) > 1 for seg in seg_status: seg.set_redundancy(seg_status[seg])
def get_downlinks(objects): r = set() # Get all additional objects which may be affected for d in ObjectData._get_collection().find( {"uplinks": { "$in": list(objects) }}, {"_id": 1}): if d["_id"] not in objects: r.add(d["_id"]) if not r: return r # Leave only objects with all uplinks affected rr = set() for d in ObjectData._get_collection().find({"_id": { "$in": list(r) }}, { "_id": 1, "uplinks": 1 }): if len([1 for u in d["uplinks"] if u in objects]) == len(d["uplinks"]): rr.add(d["_id"]) return rr
def get_data(self, request, pool, obj_profile=None, **kwargs): problems = {} # id -> problem if not obj_profile: # Get all managed objects mos = dict( (mo.id, mo) for mo in ManagedObject.objects.filter(is_managed=True, pool=pool) ) if not request.user.is_superuser: mos = dict( (mo.id, mo) for mo in ManagedObject.objects.filter(is_managed=True, pool=pool, administrative_domain__in=UserAccess.get_domains(request.user)) ) else: # Get all managed objects mos = dict( (mo.id, mo) for mo in ManagedObject.objects.filter(is_managed=True, pool=pool, object_profile=obj_profile) ) if not request.user.is_superuser: mos = dict( (mo.id, mo) for mo in ManagedObject.objects.filter(is_managed=True, pool=pool, object_profile=obj_profile, administrative_domain__in=UserAccess.get_domains(request.user)) ) mos_set = set(mos) # Get all managed objects with generic profile for mo in mos: if mos[mo].profile.name == GENERIC_PROFILE: problems[mo] = _("Profile check failed") # Get all managed objects without interfaces if_mo = dict( (x["_id"], x.get("managed_object")) for x in Interface._get_collection().find( {}, {"_id": 1, "managed_object": 1} ) ) for mo in (mos_set - set(problems) - set(if_mo.itervalues())): problems[mo] = _("No interfaces") # Get all managed objects without links linked_mos = set() for d in Link._get_collection().find({}): for i in d["interfaces"]: linked_mos.add(if_mo.get(i)) for mo in (mos_set - set(problems) - linked_mos): problems[mo] = _("No links") # Get all managed objects without uplinks uplinks = {} for d in ObjectData._get_collection().find(): nu = len(d.get("uplinks", [])) if nu: uplinks[d["_id"]] = nu for mo in (mos_set - set(problems) - set(uplinks)): problems[mo] = _("No uplinks") # data = [] for mo_id in problems: if mo_id not in mos: continue mo = mos[mo_id] data += [[ mo.name, mo.address, mo.profile.name, mo.platform.name if mo.platform else "", mo.segment.name if mo.segment else "", problems[mo_id] ]] data = sorted(data) return self.from_dataset( title=self.title, columns=[ "Name", "Address", "Profile", "Platform", "Segment", "Problem" ], data=data, enumerate=True )
def fix(): total = NetworkSegment._get_collection().estimated_document_count() for ns in progressbar.progressbar(NetworkSegment.objects.timeout(False), max_value=total): st = SegmentTopology(ns) ObjectData.update_uplinks(st.iter_uplinks())
def api_report( self, request, reporttype=None, from_date=None, to_date=None, object_profile=None, filter_default=None, exclude_zero=True, interface_profile=None, selector=None, administrative_domain=None, columns=None, description=None, o_format=None, enable_autowidth=False, **kwargs, ): def load(mo_ids): # match = {"links.mo": {"$in": mo_ids}} match = {"int.managed_object": {"$in": mo_ids}} group = { "_id": "$_id", "links": { "$push": { "iface_n": "$int.name", # "iface_id": "$int._id", # "iface_descr": "$int.description", # "iface_speed": "$int.in_speed", # "dis_method": "$discovery_method", # "last_seen": "$last_seen", "mo": "$int.managed_object", "linked_obj": "$linked_objects", } }, } value = (get_db()["noc.links"].with_options( read_preference=ReadPreference.SECONDARY_PREFERRED).aggregate( [ { "$unwind": "$interfaces" }, { "$lookup": { "from": "noc.interfaces", "localField": "interfaces", "foreignField": "_id", "as": "int", } }, { "$match": match }, { "$group": group }, ], allowDiskUse=True, )) res = defaultdict(dict) for v in value: if v["_id"]: for vv in v["links"]: if len(vv["linked_obj"]) == 2: mo = vv["mo"][0] iface = vv["iface_n"] for i in vv["linked_obj"]: if mo != i: res[mo][i] = iface[0] return res def translate_row(row, cmap): return [row[i] for i in cmap] def str_to_float(str): return float("{0:.3f}".format(float(str))) cols = [ "object_id", "object_name", "object_address", "object_platform", "object_adm_domain", "object_segment", "object_container", # "object_hostname", # "object_status", # "profile_name", # "object_profile", # "object_vendor", "iface_name", "iface_description", "iface_speed", "max_load_in", "max_load_in_time", "max_load_out", "max_load_out_time", "avg_load_in", "avg_load_out", "total_in", "total_out", "uplink_iface_name", "uplink_iface_description", "uplink_iface_speed", "uplink_max_load_in", "uplink_max_load_in_time", "uplink_max_load_out", "uplink_max_load_out_time", "uplink_avg_load_in", "uplink_avg_load_out", "uplink_total_in", "uplink_total_out", ] header_row = [ "ID", _("OBJECT_NAME"), _("OBJECT_ADDRESS"), _("OBJECT_PLATFORM"), _("OBJECT_ADMDOMAIN"), _("OBJECT_SEGMENT"), _("CONTAINER_ADDRESS"), _("IFACE_NAME"), _("IFACE_DESCRIPTION"), _("IFACE_SPEED"), _("MAX_LOAD_IN, Mbps"), _("MAX_LOAD_IN_TIME"), _("MAX_LOAD_OUT, Mbps"), _("MAX_LOAD_OUT_TIME"), _("AVG_LOAD_IN, Mbps"), _("AVG_LOAD_OUT, Mbps"), _("TOTAL_IN, Mbyte"), _("TOTAL_OUT, Mbyte"), _("UPLINK_IFACE_NAME"), _("UPLINK_IFACE_DESCRIPTION"), _("UPLINK_IFACE_SPEED"), _("UPLINK_MAX_LOAD_IN, Mbps"), _("UPLINK_MAX_TIME_IN"), _("UPLINK_MAX_LOAD_OUT, Mbps"), _("UPLINK_MAX_TIME_OUT"), _("UPLINK_AVG_LOAD_IN, Mbps"), _("UPLINK_AVG_LOAD_OUT, Mbps"), _("UPLINK_TOTAL_IN, Mbyte"), _("UPLINK_TOTAL_OUT, Mbyte"), ] if columns: cmap = [] for c in columns.split(","): try: cmap += [cols.index(c)] except ValueError: continue else: cmap = list(range(len(cols))) columns_order = columns.split(",") columns_filter = set(columns_order) r = [translate_row(header_row, cmap)] # Date Time Block if not from_date: from_date = datetime.datetime.now() - datetime.timedelta(days=1) else: from_date = datetime.datetime.strptime(from_date, "%d.%m.%Y") if not to_date or from_date == to_date: to_date = from_date + datetime.timedelta(days=1) else: to_date = datetime.datetime.strptime( to_date, "%d.%m.%Y") + datetime.timedelta(days=1) diff = to_date - from_date # Load managed objects mos = ManagedObject.objects.filter(is_managed=True) if not request.user.is_superuser: mos = mos.filter( administrative_domain__in=UserAccess.get_domains(request.user)) if selector: mos = mos.filter( ManagedObjectSelector.objects.get(id=int(selector)).Q) if administrative_domain: mos = mos.filter( administrative_domain__in=AdministrativeDomain.get_nested_ids( int(administrative_domain))) if object_profile: mos = mos.filter(object_profile=object_profile) if interface_profile: interface_profile = InterfaceProfile.objects.filter( id=interface_profile).first() mo_attrs = namedtuple("MOATTRs", [c for c in cols if c.startswith("object")]) containers_address = {} if "object_container" in columns_filter: containers_address = ReportContainerData( set(mos.values_list("id", flat=True))) containers_address = dict(list(containers_address.extract())) moss = {} for row in mos.values_list("bi_id", "name", "address", "platform", "administrative_domain__name", "segment", "id"): moss[row[0]] = mo_attrs(*[ row[6], row[1], row[2], smart_text(Platform.get_by_id(row[3]) if row[3] else ""), row[4], smart_text(NetworkSegment.get_by_id(row[5])) if row[5] else "", containers_address. get(row[6], "") if containers_address and row[6] else "", ]) report_metric = ReportInterfaceMetrics(tuple(sorted(moss)), from_date, to_date, columns=None) report_metric.SELECT_QUERY_MAP = { (0, "managed_object", "id"): "managed_object", (1, "path", "iface_name"): "arrayStringConcat(path)", ( 2, "", "iface_description", ): "dictGetString('interfaceattributes','description' , (managed_object, arrayStringConcat(path)))", ( 3, "", "profile", ): "dictGetString('interfaceattributes', 'profile', (managed_object, arrayStringConcat(path)))", ( 4, "speed", "iface_speed", ): "dictGetUInt64('interfaceattributes', 'in_speed', (managed_object, arrayStringConcat(path)))", (5, "load_in_max", "load_in_max"): "divide(max(load_in),1048576)", (6, "load_out_max", "load_out_max"): "divide(max(load_out),1048576)", (7, "max_load_in_time", "max_load_in_time"): "argMax(ts,load_in)", (8, "max_load_out_time", "max_load_out_time"): "argMax(ts,load_out)", (9, "avg_load_in", "avg_load_in"): "divide(avg(load_in),1048576)", (10, "avg_load_out", "avg_load_out"): "divide(avg(load_out),1048576)", } ifaces_metrics = defaultdict(dict) for row in report_metric.do_query(): avg_in = str_to_float(row[9]) avg_out = str_to_float(row[10]) total_in = avg_in * diff.total_seconds() / 8 total_out = avg_out * diff.total_seconds() / 8 ifaces_metrics[row[0]][row[1]] = { "description": row[2], "profile": row[3], "bandwidth": row[4], "max_load_in": str_to_float(row[5]), "max_load_out": str_to_float(row[6]), "max_load_in_time": row[7], "max_load_out_time": row[8], "avg_load_in": avg_in, "avg_load_out": avg_out, "total_in": float("{0:.1f}".format(total_in)), "total_out": float("{0:.1f}".format(total_out)), } # find uplinks links = {} if cmap[-1] > 17: mos_id = list(mos.values_list("id", flat=True)) uplinks = {obj: [] for obj in mos_id} for d in ObjectData._get_collection().find( {"_id": { "$in": mos_id }}, { "_id": 1, "uplinks": 1 }): uplinks[d["_id"]] = d.get("uplinks", []) rld = load(mos_id) for mo in uplinks: for uplink in uplinks[mo]: if rld[mo]: if mo in links: links[mo] += [rld[mo][uplink]] else: links[mo] = [rld[mo][uplink]] for mo_bi in ifaces_metrics: mo_id = moss[int(mo_bi)] mo_ids = getattr(mo_id, "object_id") for i in ifaces_metrics[mo_bi]: if not exclude_zero: if (ifaces_metrics[mo_bi][i]["max_load_in"] == 0 and ifaces_metrics[mo_bi][i]["max_load_out"] == 0): continue if description: if description not in ifaces_metrics[mo_bi][i][ "description"]: continue if interface_profile: if interface_profile.name not in ifaces_metrics[mo_bi][i][ "profile"]: continue row2 = [ mo_ids, getattr(mo_id, "object_name"), getattr(mo_id, "object_address"), getattr(mo_id, "object_platform"), getattr(mo_id, "object_adm_domain"), getattr(mo_id, "object_segment"), getattr(mo_id, "object_container"), i, ifaces_metrics[mo_bi][i]["description"], ifaces_metrics[mo_bi][i]["bandwidth"], ifaces_metrics[mo_bi][i]["max_load_in"], ifaces_metrics[mo_bi][i]["max_load_in_time"], ifaces_metrics[mo_bi][i]["max_load_out"], ifaces_metrics[mo_bi][i]["max_load_out_time"], ifaces_metrics[mo_bi][i]["avg_load_in"], ifaces_metrics[mo_bi][i]["avg_load_out"], ifaces_metrics[mo_bi][i]["total_in"], ifaces_metrics[mo_bi][i]["total_out"], "", "", "", "", "", "", "", "", "", "", "", ] ss = True if mo_ids in links: for ifname_uplink in links[mo_ids]: if ifname_uplink in ifaces_metrics[mo_bi]: row2[18] = ifname_uplink row2[19] = ifaces_metrics[mo_bi][ifname_uplink][ "description"] row2[20] = ifaces_metrics[mo_bi][ifname_uplink][ "bandwidth"] row2[21] = ifaces_metrics[mo_bi][ifname_uplink][ "max_load_in"] row2[22] = ifaces_metrics[mo_bi][ifname_uplink][ "max_load_in_time"] row2[23] = ifaces_metrics[mo_bi][ifname_uplink][ "max_load_out"] row2[24] = ifaces_metrics[mo_bi][ifname_uplink][ "max_load_out_time"] row2[25] = ifaces_metrics[mo_bi][ifname_uplink][ "avg_load_in"] row2[26] = ifaces_metrics[mo_bi][ifname_uplink][ "avg_load_out"] row2[27] = ifaces_metrics[mo_bi][ifname_uplink][ "total_in"] row2[28] = ifaces_metrics[mo_bi][ifname_uplink][ "total_out"] r += [translate_row(row2, cmap)] ss = False if ss: r += [translate_row(row2, cmap)] filename = "metrics_detail_report_%s" % datetime.datetime.now( ).strftime("%Y%m%d") if o_format == "csv": response = HttpResponse(content_type="text/csv") response[ "Content-Disposition"] = 'attachment; filename="%s.csv"' % filename writer = csv.writer(response, dialect="excel", delimiter=",", quoting=csv.QUOTE_MINIMAL) writer.writerows(r) return response elif o_format == "csv_zip": response = BytesIO() f = TextIOWrapper(TemporaryFile(mode="w+b"), encoding="utf-8") writer = csv.writer(f, dialect="excel", delimiter=";", quotechar='"') writer.writerows(r) f.seek(0) with ZipFile(response, "w", compression=ZIP_DEFLATED) as zf: zf.writestr("%s.csv" % filename, f.read()) zf.filename = "%s.csv.zip" % filename # response = HttpResponse(content_type="text/csv") response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/zip") response[ "Content-Disposition"] = 'attachment; filename="%s.csv.zip"' % filename return response elif o_format == "xlsx": response = BytesIO() wb = xlsxwriter.Workbook(response) cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1}) ws = wb.add_worksheet("Metrics") max_column_data_length = {} for rn, x in enumerate(r): for cn, c in enumerate(x): if rn and (r[0][cn] not in max_column_data_length or len(str(c)) > max_column_data_length[r[0][cn]]): max_column_data_length[r[0][cn]] = len(str(c)) ws.write(rn, cn, c, cf1) ws.autofilter(0, 0, rn, cn) ws.freeze_panes(1, 0) for cn, c in enumerate(r[0]): # Set column width width = get_column_width(c) if enable_autowidth and width < max_column_data_length[c]: width = max_column_data_length[c] ws.set_column(cn, cn, width=width) wb.close() response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/vnd.ms-excel") response[ "Content-Disposition"] = 'attachment; filename="%s.xlsx"' % filename response.close() return response
def handle(self, alarm, delta, trace=False, *args, **kwargs): def nq(s): return s.split("#", 1)[0] if config.fm.enable_rca_neighbor_cache: self.topology_rca = self.topology_rca_neighbor else: self.topology_rca = self.topology_rca_uplink try: a0 = ArchivedAlarm.objects.get(id=alarm[0]) except ArchivedAlarm.DoesNotExist: self.die("Cannot find alarm") t0 = a0.timestamp - datetime.timedelta(seconds=delta) t1 = a0.timestamp + datetime.timedelta(seconds=delta) alarms = {} mos = list(a0.managed_object.segment.managed_objects) for a in ArchivedAlarm.objects.filter( timestamp__gte=t0, timestamp__lte=t1, managed_object__in=[o.id for o in mos]): alarms[a.managed_object.id] = a # Enrich with roots # Get object segment data r = [] for mo in mos: uplink1, uplink2 = "", "" d = ObjectData.get_by_id(mo) if d: uplinks = [ManagedObject.get_by_id(u) for u in d.uplinks] uplinks = [u for u in uplinks if u] if uplinks: uplink1 = nq(uplinks.pop(0).name) if uplinks: uplink2 = nq(uplinks.pop(0).name) a = alarms.get(mo.id) r += [ Record( timestamp=a.timestamp.strftime("%Y-%m-%d %H:%M:%S") if a else "", alarm_id=a.id if a else "", root_id=a.root if a and a.root else "", managed_object=nq(mo.name), address=mo.address, platform=mo.platform, uplink1=uplink1, uplink2=uplink2, ) ] MASK = "%19s | %24s | %24s | %16s | %15s | %20s | %16s | %16s" self.print(MASK % ("ts", "alarm", "root", "object", "address", "platform", "uplink1", "uplink2")) for x in sorted(r, key=operator.attrgetter("timestamp")): self.print(MASK % x) if trace: self.print("Time range: %s -- %s" % (t0, t1)) self.print("Topology RCA Window: %s" % ("%ss" % config.correlator.topology_rca_window if config.correlator.topology_rca_window else "Disabled")) amap = dict((a.id, a) for a in six.itervalues(alarms)) for x in sorted(r, key=operator.attrgetter("timestamp")): if not x.alarm_id: continue self.print("@@@ %s %s %s" % (x.timestamp, x.alarm_id, x.managed_object)) self.topology_rca(amap[x.alarm_id], alarms) # Dump for a in amap: if hasattr(amap[a], "_trace_root"): self.print("%s -> %s" % (a, amap[a]._trace_root))
def fix(): for mo in ManagedObject.objects.all(): ObjectData.refresh_path(mo)
def get_data(self, request, pool=None, obj_profile=None, **kwargs): problems = {} # id -> problem mos = ManagedObject.objects.filter(is_managed=True, pool=pool) if not request.user.is_superuser: mos = mos.filter(administrative_domain__in=UserAccess.get_domains(request.user)) if obj_profile: # Get all managed objects mos = mos.filter(object_profile=obj_profile) mos = { mo[0]: (mo[1], mo[2], Profile.get_by_id(mo[3]), mo[4], mo[5]) for mo in mos.values_list("id", "name", "address", "profile", "platform", "segment") } mos_set = set(mos) # Get all managed objects with generic profile for mo in mos: if mos[mo][2] == GENERIC_PROFILE: problems[mo] = _("Profile check failed") # Get all managed objects without interfaces if_mo = dict( (x["_id"], x.get("managed_object")) for x in Interface._get_collection().find({}, {"_id": 1, "managed_object": 1}) ) for mo in mos_set - set(problems) - set(six.itervalues(if_mo)): problems[mo] = _("No interfaces") # Get all managed objects without links linked_mos = set() for d in Link._get_collection().find({}): for i in d["interfaces"]: linked_mos.add(if_mo.get(i)) for mo in mos_set - set(problems) - linked_mos: problems[mo] = _("No links") # Get all managed objects without uplinks uplinks = {} for d in ObjectData._get_collection().find(): nu = len(d.get("uplinks", [])) if nu: uplinks[d["_id"]] = nu for mo in mos_set - set(problems) - set(uplinks): problems[mo] = _("No uplinks") # data = [] for mo_id in problems: if mo_id not in mos: continue name, address, profile, platform, segment = mos[mo_id] data += [ [ name, address, profile.name, Platform.get_by_id(platform).name if platform else "", NetworkSegment.get_by_id(segment).name if segment else "", problems[mo_id], ] ] data = sorted(data) return self.from_dataset( title=self.title, columns=["Name", "Address", "Profile", "Platform", "Segment", "Problem"], data=data, enumerate=True, )