def object_mappings(self, controller, request, done): """ Handle RPC object_mappings request """ if not controller.stream.is_authenticated: done(controller, error=Error(code=ERR_AUTH_REQUIRED, text="Authentication required")) return logger.info("Object mappings requested") activator = self.get_controller_activator(controller) r = ObjectMappingsResponse() r.expire = self.sae.config.getint("sae", "refresh_event_filter") # Build source filter and ping settings cursor = connection.cursor() cursor.execute( """ SELECT mo.id, mo.trap_source_ip, op.enable_ping, op.ping_interval, mo.collector_id FROM sa_managedobject mo JOIN sa_managedobjectprofile op ON (mo.object_profile_id = op.id) WHERE mo.activator_id = %s AND mo.trap_source_ip IS NOT NULL AND mo.profile_name NOT LIKE 'NOC.%%' """, [activator.id]) cdata = list(cursor) pingable = [x[0] for x in cdata if x[2] and x[3] > 0] c = ObjectStatus._get_collection() statuses = {} while pingable: chunk, pingable = pingable[:500], pingable[500:] statuses.update( dict((r["object"], r["status"]) for r in c.find({"object": { "$in": chunk }}, { "_id": 0, "object": 1, "status": 1 }))) for mo_id, trap_source_ip, enable_ping, ping_interval, collector_id in cdata: if not collector_id: s = r.mappings.add() s.source = trap_source_ip s.object = str(mo_id) if enable_ping and ping_interval > 0: s = r.ping.add() s.address = trap_source_ip s.interval = ping_interval status = statuses.get(mo_id) if status is not None: s.current_status = status # Build event filter for ir in IgnoreEventRules.objects.filter(is_active=True): i = r.ignore_rules.add() i.left_re = ir.left_re i.right_re = ir.right_re logger.info("Object mappings returned") done(controller, response=r)
def get_data(self): # Calculate contained objects summary = { "service": SummaryItem.items_to_dict(self.object.total_services), "subscriber": SummaryItem.items_to_dict(self.object.total_subscribers) } objects = [] for mo in self.object.managed_objects.filter(is_managed=True): ss = ServiceSummary.get_object_summary(mo) objects += [{ "id": mo.id, "name": mo.name, "object": mo, "summary": ss }] # Update object statuses mos = [o["id"] for o in objects] alarms = set(d["managed_object"] for d in ActiveAlarm._get_collection().find({ "managed_object": { "$in": mos } }, {"_id": 0, "managed_object": 1})) o_status = ObjectStatus.get_statuses(mos) for o in objects: if o["id"] in o_status: if o["id"] in alarms: o["status"] = "alarm" else: o["status"] = "up" else: o["status"] = "down" # Calculate children children = [] for ns in NetworkSegment.objects.filter(parent=self.object.id): children += [{ "id": ns.id, "name": ns.name, "object": ns, "summary": { "service": SummaryItem.items_to_dict(ns.total_services), "subscriber": SummaryItem.items_to_dict(ns.total_subscribers), } }] # Calculate VLANs vlans = [] if self.object.vlan_border: vlans = list(VLAN.objects.filter(segment=self.object.id).order_by("vlan")) # return { "object": self.object, "managed_objects": sorted(objects, key=operator.itemgetter("name")), "children": sorted(children, key=operator.itemgetter("name")), "parent": self.object.parent, "summary": summary, "vlans": vlans }
def api_objects_statuses(self, request, objects: List[int]): def get_alarms(objects: List[int]) -> Set[int]: """ Returns a set of objects with alarms """ alarms: Set[int] = set() coll = ActiveAlarm._get_collection() while objects: chunk, objects = objects[:500], objects[500:] a = coll.aggregate( [ {"$match": {"managed_object": {"$in": chunk}}}, {"$group": {"_id": "$managed_object", "count": {"$sum": 1}}}, ] ) alarms.update(d["_id"] for d in a) return alarms def get_maintenance(objects: List[int]) -> Set[int]: """ Returns a set of objects currently in maintenance :param objects: :return: """ now = datetime.datetime.now() so = set(objects) mnt_objects = set() for m in Maintenance._get_collection().find( {"is_completed": False, "start": {"$lte": now}}, {"_id": 0, "affected_objects": 1} ): mnt_objects |= so & {x["object"] for x in m["affected_objects"]} return mnt_objects # Mark all as unknown r = {o: self.ST_UNKNOWN for o in objects} sr = ObjectStatus.get_statuses(objects) sa = get_alarms(objects) mo = get_maintenance(objects) for o in sr: if sr[o]: # Check for alarms if o in sa: r[o] = self.ST_ALARM else: r[o] = self.ST_OK else: r[o] = self.ST_DOWN if o in mo: r[o] |= self.ST_MAINTENANCE return r
def __init__(self, mos, avail_only=False, match=None): """ :param mos: :type mos: ManagedObject.objects.filter() """ self.mo_ids = list(mos.values_list("id", flat=True)) if avail_only: status = ObjectStatus.get_statuses(self.mo_ids) self.mo_ids = [s for s in status if status[s]] self.mos_pools = [Pool.get_by_id(p) for p in set(mos.values_list("pool", flat=True))] self.coll_name = "noc.schedules.discovery.%s" # @todo Good way for pipelines fill self.pipelines = {} self.match = match
def handler(self): # Decode request try: req = ujson.loads(self.request.body) except ValueError: return 400, "Cannot decode JSON" # Validate try: req = Request.clean(req) objects = [int(o) for o in req["objects"]] except ValueError as e: return 400, "Bad request: %s" % e statuses = ObjectStatus.get_statuses(objects) r = { "statuses": [{ "id": str(o), "status": statuses.get(o, False) } for o in objects] } return 200, r
def get_data(self, request, pool=None, obj_profile=None, selector=None, avail_status=None, profile_check_only=None, failed_scripts_only=None, filter_pending_links=None, filter_none_objects=None, filter_view_other=None, **kwargs): data = [] match = None code_map = { "1": "Unknown error", "10000": "Unspecified CLI error", "10005": "Connection refused", "10001": "Authentication failed", "10002": "No super command defined", "10003": "No super privileges", "10004": "SSH Protocol error" } if not pool: pool = Pool.objects.filter()[0] data += [SectionRow(name="Report by %s" % pool.name)] if selector: mos = ManagedObject.objects.filter(selector.Q) else: mos = ManagedObject.objects.filter(pool=pool, is_managed=True) if not request.user.is_superuser: mos = mos.filter( administrative_domain__in=UserAccess.get_domains(request.user)) if obj_profile: mos = mos.filter(object_profile=obj_profile) if filter_view_other: mnp_in = list( ManagedObjectProfile.objects.filter(enable_ping=False)) mos = mos.filter(profile=Profile.objects.get( name=GENERIC_PROFILE)).exclude(object_profile__in=mnp_in) discovery = "noc.services.discovery.jobs.box.job.BoxDiscoveryJob" mos_id = list(mos.values_list("id", flat=True)) if avail_status: avail = ObjectStatus.get_statuses(mos_id) if profile_check_only: match = { "$or": [{ "job.problems.suggest_cli": { "$exists": True } }, { "job.problems.suggest_snmp": { "$exists": True } }, { "job.problems.profile.": { "$regex": "Cannot detect profile" } }, { "job.problems.version.": { "$regex": "Remote error code 1000[1234]" } }] } elif failed_scripts_only: match = { "$and": [{ "job.problems": { "$exists": "true", "$ne": {} } }, { "job.problems.suggest_snmp": { "$exists": False } }, { "job.problems.suggest_cli": { "$exists": False } }] } elif filter_view_other: match = {"job.problems.suggest_snmp": {"$exists": False}} rdp = ReportDiscoveryProblem(mos, avail_only=avail_status, match=match) exclude_method = [] if filter_pending_links: exclude_method += ["lldp", "lacp", "cdp", "huawei_ndp"] for discovery in rdp: mo = ManagedObject.get_by_id(discovery["key"]) for method in ifilterfalse(lambda x: x in exclude_method, discovery["job"][0]["problems"]): problem = discovery["job"][0]["problems"][method] if filter_none_objects and not problem: continue if isinstance(problem, dict) and "" in problem: problem = problem.get("", "") if "Remote error code" in problem: problem = code_map.get(problem.split(" ")[-1], problem) if isinstance(problem, six.string_types): problem = problem.replace("\n", " ").replace("\r", " ") data += [(mo.name, mo.address, mo.profile.name, mo.administrative_domain.name, _("Yes") if mo.get_status() else _("No"), discovery["st"].strftime("%d.%m.%Y %H:%M") if "st" in discovery else "", method, problem)] return self.from_dataset(title=self.title, columns=[ _("Managed Object"), _("Address"), _("Profile"), _("Administrative Domain"), _("Avail"), _("Last successful discovery"), _("Discovery"), _("Error") ], data=data)
def get_ajax_data(self, **kwargs): # Parse query params query = self.decode_query( self.handler.get_argument("key") ) # type: List[Tuple[int, int, str]] # Get metrics from_ts = datetime.datetime.now() - datetime.timedelta(seconds=1800) from_ts = from_ts.replace(microsecond=0) interface_sql = """ SELECT managed_object, path[4] AS iface, argMax(load_in, ts) AS load_in, argMax(load_out, ts) AS load_out, argMax(packets_in, ts) AS packets_in, argMax(packets_out, ts) AS packets_out, argMax(errors_in, ts) AS errors_in, argMax(errors_out, ts) AS errors_out FROM interface WHERE date >= toDate('%s') AND ts >= toDateTime('%s') AND (%s) GROUP BY managed_object, iface """ % ( from_ts.date().isoformat(), from_ts.isoformat(sep=" "), " OR ".join( "(managed_object=%d AND path[4]='%s')" % (q[1], q[2].replace("'", "''")) for q in query ), ) # Get data metrics = [] # type: List[Tuple[int, str, str, str, str, str]] ch = ch_connection() try: for ( mo, iface, load_in, load_out, packets_in, packets_out, errors_in, errors_out, ) in ch.execute(post=interface_sql): if_hash = str(bi_hash(iface)) metrics += [ # (mo, if_hash, "speed", self.humanize_metric(speed)), (mo, if_hash, "load_in", self.humanize_metric(load_in)), (mo, if_hash, "load_out", self.humanize_metric(load_out)), (mo, if_hash, "packets_in", self.humanize_metric(packets_in)), (mo, if_hash, "packets_out", self.humanize_metric(packets_out)), (mo, if_hash, "errors_in", self.humanize_metric(errors_in)), (mo, if_hash, "errors_out", self.humanize_metric(errors_out)), ] except ClickhouseError: pass # Set defaults m_index = set() # type: Set[Tuple[int, str]] for mo_bi_id, iface, _, _ in metrics: m_index.add((int(mo_bi_id), iface)) interface_metrics = { "speed", "load_in", "load_out", "packets_in", "packets_out", "errors_in", "errors_out", } for _, mo_bi_id, iface in query: if (int(mo_bi_id), str(bi_hash(iface))) not in m_index: for metric in interface_metrics: metrics += [(str(mo_bi_id), str(bi_hash(iface)), metric, "-")] # managed object id -> bi id mo_map = {q[0]: q[1] for q in query} # type: Dict[int, int] # Get interface statuses for doc in Interface._get_collection().find( {"$or": [{"managed_object": q[0], "name": q[2]} for q in query]}, { "_id": 0, "managed_object": 1, "name": 1, "admin_status": 1, "oper_status": 1, "in_speed": 1, "out_speed": 1, "full_duplex": 1, }, ): mo = str(mo_map[doc["managed_object"]]) if_hash = str(bi_hash(doc["name"])) status = 0 if "admin_status" in doc and doc["admin_status"]: status = 2 if doc["oper_status"] else 1 duplex = "-" if "full_duplex" in doc: duplex = "Full" if doc["full_duplex"] else "Half" speed = "-" if "in_speed" in doc: speed = self.humanize_metric(doc["in_speed"] * 1000) metrics += [ (mo, if_hash, "speed", speed), (mo, if_hash, "duplex", duplex), (mo, if_hash, "status", status), ] # Get current object statuses obj_statuses = ObjectStatus.get_statuses(list(mo_map)) statuses = {str(mo_map[mo_id]): obj_statuses.get(mo_id, True) for mo_id in obj_statuses} return {"metrics": metrics, "statuses": list(statuses.items())}
def api_report( self, request, o_format, is_managed=None, administrative_domain=None, selector=None, pool=None, segment=None, avail_status=False, columns=None, ids=None, detail_stat=None, enable_autowidth=False, ): def row(row): def qe(v): if v is None: return "" if isinstance(v, unicode): return v.encode("utf-8") elif isinstance(v, datetime.datetime): return v.strftime("%Y-%m-%d %H:%M:%S") elif not isinstance(v, str): return str(v) else: return v return [qe(x) for x in row] def translate_row(row, cmap): return [row[i] for i in cmap] type_columns = ["Up/10G", "Up/1G", "Up/100M", "Up/10M", "Down/-", "-"] cols = [ "id", "object_name", "object_address", "object_hostname", "object_status", "profile_name", "object_profile", "object_vendor", "object_platform", "object_attr_hwversion", "object_version", "object_attr_bootprom", "object_serial", "object_attr_patch", "auth_profile", "avail", "admin_domain", "container", "segment", "phys_interface_count", "link_count", "last_config_ts" # "discovery_problem" # "object_tags" # "sorted_tags" # "object_caps" # "interface_type_count" ] header_row = [ "ID", "OBJECT_NAME", "OBJECT_ADDRESS", "OBJECT_HOSTNAME", "OBJECT_STATUS", "PROFILE_NAME", "OBJECT_PROFILE", "OBJECT_VENDOR", "OBJECT_PLATFORM", "OBJECT_HWVERSION", "OBJECT_VERSION", "OBJECT_BOOTPROM", "OBJECT_SERIAL", "OBJECT_ATTR_PATCH", "AUTH_PROFILE", "AVAIL", "ADMIN_DOMAIN", "CONTAINER", "SEGMENT", "PHYS_INTERFACE_COUNT", "LINK_COUNT", "LAST_CONFIG_TS", ] # "DISCOVERY_PROBLEM" # "ADM_PATH # "DISCOVERY_PROBLEM" # "OBJECT_TAGS" # "SORTED_TAGS" # "OBJECT_CAPS" # "INTERFACE_TYPE_COUNT" if columns: cmap = [] for c in columns.split(","): try: cmap += [cols.index(c)] except ValueError: continue else: cmap = list(range(len(cols))) r = [translate_row(header_row, cmap)] mos = self.get_report_object(request.user, is_managed, administrative_domain, selector, pool, segment, ids) columns_filter = set(columns.split(",")) mos_id = tuple(mos.order_by("id").values_list("id", flat=True)) mos_filter = None if detail_stat: ref = ReportModelFilter() ids = list(six.itervalues(ref.proccessed(detail_stat))) mos_filter = set(mos_id).intersection(ids[0]) mos_id = sorted(mos_filter) avail = {} if "avail" in columns_filter: avail = ObjectStatus.get_statuses(mos_id) link_count = iter(ReportObjectLinkCount(mos_id)) iface_count = iter(ReportObjectIfacesTypeStat(mos_id)) if "container" in columns_filter: container_lookup = iter(ReportContainerData(mos_id)) else: container_lookup = None if "object_serial" in columns_filter: container_serials = iter(ReportContainer(mos_id)) else: container_serials = None if "interface_type_count" in columns_filter: iss = iter(ReportObjectIfacesStatusStat(mos_id)) else: iss = None if "object_attr_patch" in columns_filter or "object_serial" in columns_filter: roa = iter(ReportObjectAttributes(mos_id)) else: roa = None hn = iter(ReportObjectsHostname1(mos_id)) rc = iter(ReportObjectConfig(mos_id)) # ccc = iter(ReportObjectCaps(mos_id)) if "adm_path" in columns_filter: ad_path = ReportAdPath() r[-1].extend([_("ADM_PATH1"), _("ADM_PATH1"), _("ADM_PATH1")]) if "interface_type_count" in columns_filter: r[-1].extend(type_columns) if "object_caps" in columns_filter: object_caps = ReportObjectCaps(mos_id) caps_columns = list(six.itervalues(object_caps.ATTRS)) ccc = iter(object_caps) r[-1].extend(caps_columns) if "object_tags" in columns_filter: r[-1].extend([_("OBJECT_TAGS")]) if "sorted_tags" in columns_filter: tags = set() for s in (ManagedObject.objects.filter().exclude( tags=None).values_list("tags", flat=True).distinct()): tags.update(set(s)) tags_o = sorted([t for t in tags if "{" not in t]) r[-1].extend(tags_o) if "discovery_problem" in columns.split(","): discovery_result = ReportDiscoveryResult(mos_id) discovery_result.safe_output = True discovery_result.unknown_value = ([""] * len(discovery_result.ATTRS), ) dp_columns = discovery_result.ATTRS dp = iter(discovery_result) r[-1].extend(dp_columns) for ( mo_id, name, address, is_managed, sa_profile, o_profile, auth_profile, ad, m_segment, vendor, platform, version, tags, ) in (mos.values_list( "id", "name", "address", "is_managed", "profile", "object_profile__name", "auth_profile__name", "administrative_domain__name", "segment", "vendor", "platform", "version", "tags", ).order_by("id").iterator()): if (mos_filter and mo_id not in mos_filter) or not mos_id: continue if container_serials: mo_serials = next(container_serials) else: mo_serials = [{}] if container_lookup: mo_continer = next(container_lookup) else: mo_continer = ("", ) if roa: serial, hw_ver, boot_prom, patch = next(roa)[0] # noqa else: serial, hw_ver, boot_prom, patch = "", "", "", "" # noqa r.append( translate_row( row([ mo_id, name, address, next(hn)[0], "managed" if is_managed else "unmanaged", Profile.get_by_id(sa_profile), o_profile, Vendor.get_by_id(vendor) if vendor else "", Platform.get_by_id(platform) if platform else "", hw_ver, Firmware.get_by_id(version) if version else "", boot_prom, # Serial mo_serials[0].get("serial", "") or serial, patch or "", auth_profile, _("Yes") if avail.get(mo_id, None) else _("No"), ad, mo_continer[0], NetworkSegment.get_by_id(m_segment) if m_segment else "", next(iface_count)[0], next(link_count)[0], next(rc)[0], ]), cmap, )) if "adm_path" in columns_filter: r[-1].extend([ad] + list(ad_path[ad])) if "interface_type_count" in columns_filter: r[-1].extend(next(iss)[0]) if "object_caps" in columns_filter: r[-1].extend(next(ccc)[0]) if "object_tags" in columns_filter: r[-1].append(",".join(tags if tags else [])) if "sorted_tags" in columns_filter: out_tags = [""] * len(tags_o) try: if tags: for m in tags: out_tags[tags_o.index(m)] = m except ValueError: logger.warning("Bad value for tag: %s", m) r[-1].extend(out_tags) if "discovery_problem" in columns_filter: r[-1].extend(next(dp)[0]) filename = "mo_detail_report_%s" % datetime.datetime.now().strftime( "%Y%m%d") if o_format == "csv": response = HttpResponse(content_type="text/csv") response[ "Content-Disposition"] = 'attachment; filename="%s.csv"' % filename writer = csv.writer(response, dialect="excel", delimiter=";", quotechar='"') writer.writerows(r) return response elif o_format == "xlsx": response = StringIO() wb = xlsxwriter.Workbook(response) cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1}) ws = wb.add_worksheet("Objects") max_column_data_length = {} for rn, x in enumerate(r): for cn, c in enumerate(x): if rn and (r[0][cn] not in max_column_data_length or len(str(c)) > max_column_data_length[r[0][cn]]): max_column_data_length[r[0][cn]] = len(str(c)) ws.write(rn, cn, c, cf1) # for ws.autofilter(0, 0, rn, cn) ws.freeze_panes(1, 0) for cn, c in enumerate(r[0]): # Set column width width = get_column_width(c) if enable_autowidth and width < max_column_data_length[c]: width = max_column_data_length[c] ws.set_column(cn, cn, width=width) wb.close() response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/vnd.ms-excel") # response = HttpResponse( # content_type="application/x-ms-excel") response[ "Content-Disposition"] = 'attachment; filename="%s.xlsx"' % filename response.close() return response
def api_objects_statuses(self, request, objects: List[int]): def get_alarms(objects: List[int]) -> Set[int]: """ Returns a set of objects with alarms """ alarms: Set[int] = set() coll = ActiveAlarm._get_collection() while objects: chunk, objects = objects[:500], objects[500:] a = coll.aggregate([ { "$match": { "managed_object": { "$in": chunk } } }, { "$group": { "_id": "$managed_object", "count": { "$sum": 1 } } }, ]) alarms.update(d["_id"] for d in a) return alarms def get_maintenance(objects: List[int]) -> Set[int]: """ Returns a set of objects currently in maintenance :param objects: :return: """ now = datetime.datetime.now() so = set(objects) mnt_objects = set() pipeline = [ { "$match": { "affected_objects.object": { "$in": list(so) } } }, { "$unwind": "$affected_objects" }, { "$lookup": { "from": "noc.maintenance", "as": "m", "let": { "maintenance": "_id" }, "pipeline": [{ "$match": { "m.is_completed": False, "m.start": { "$lte": now }, "m.stop": { "gte": now }, }, }], }, }, { "$project": { "_id": 0, "object": "$affected_objects.object", } }, { "$group": { "_id": "$object" } }, ] mnt_objects |= so & { x["_id"] for x in AffectedObjects._get_collection().aggregate(pipeline) } return mnt_objects # Mark all as unknown r = {o: self.ST_UNKNOWN for o in objects} sr = ObjectStatus.get_statuses(objects) sa = get_alarms(objects) mo = get_maintenance(objects) for o in sr: if sr[o]: # Check for alarms if o in sa: r[o] = self.ST_ALARM else: r[o] = self.ST_OK else: r[o] = self.ST_DOWN if o in mo: r[o] |= self.ST_MAINTENANCE return r
def api_objects_statuses(self, request, objects): def get_alarms(objects): """ Returns a set of objects with alarms """ r = set() c = ActiveAlarm._get_collection() while objects: chunk, objects = objects[:500], objects[500:] a = c.aggregate([{ "$match": { "managed_object": { "$in": chunk } } }, { "$group": { "_id": "$managed_object", "count": { "$sum": 1 } } }]) r.update([d["_id"] for d in a]) return r def get_maintenance(objects): """ Returns a set of objects currently in maintenance :param objects: :return: """ now = datetime.datetime.now() so = set(objects) r = set() for m in Maintenance._get_collection().find( { "is_completed": False, "start": { "$lte": now } }, { "_id": 0, "affected_objects": 1 }): mo = set(r["object"] for r in m["affected_objects"]) r |= so & mo return r # Mark all as unknown r = dict((o, self.ST_UNKNOWN) for o in objects) sr = ObjectStatus.get_statuses(objects) sa = get_alarms(objects) mo = get_maintenance(objects) for o in sr: if sr[o]: # Check for alarms if o in sa: r[o] = self.ST_ALARM else: r[o] = self.ST_OK else: r[o] = self.ST_DOWN if o in mo: r[o] |= self.ST_MAINTENANCE return r
def get_data(self, request, pool=None, obj_profile=None, available_only=False, **kwargs): problems = {} # id -> problem mos = ManagedObject.objects.filter(is_managed=True, pool=pool) if not request.user.is_superuser: mos = mos.filter( administrative_domain__in=UserAccess.get_domains(request.user)) if obj_profile: # Get all managed objects mos = mos.filter(object_profile=obj_profile) mos = { mo[0]: (mo[1], mo[2], Profile.get_by_id(mo[3]), mo[4], mo[5]) for mo in mos.values_list("id", "name", "address", "profile", "platform", "segment") } mos_set = set(mos) if available_only: statuses = ObjectStatus.get_statuses(list(mos_set)) mos_set = {mo for mo in mos_set if statuses.get(mo)} # Get all managed objects with generic profile for mo in mos: if mos[mo][2] == GENERIC_PROFILE: problems[mo] = _("Profile check failed") # Get all managed objects without interfaces if_mo = { x["_id"]: x.get("managed_object") for x in Interface._get_collection().find({}, { "_id": 1, "managed_object": 1 }) } for mo in mos_set - set(problems) - set(if_mo.values()): problems[mo] = _("No interfaces") # Get all managed objects without links linked_mos = set() for d in Link._get_collection().find({}): for i in d["interfaces"]: linked_mos.add(if_mo.get(i)) for mo in mos_set - set(problems) - linked_mos: problems[mo] = _("No links") # Get all managed objects without uplinks uplinks = {} for d in ObjectData._get_collection().find(): nu = len(d.get("uplinks", [])) if nu: uplinks[d["_id"]] = nu for mo in mos_set - set(problems) - set(uplinks): problems[mo] = _("No uplinks") # data = [] for mo_id in problems: if mo_id not in mos: continue name, address, profile, platform, segment = mos[mo_id] data += [[ name, address, profile.name, Platform.get_by_id(platform).name if platform else "", NetworkSegment.get_by_id(segment).name if segment else "", problems[mo_id], ]] data = sorted(data) return self.from_dataset( title=self.title, columns=[ "Name", "Address", "Profile", "Platform", "Segment", "Problem" ], data=data, enumerate=True, )