def bulk_field_isinmaintenance(self, data): if not data: return data if data[0]["status"] == "A": mtc = set(Maintenance.currently_affected()) for x in data: x["isInMaintenance"] = x["managed_object"] in mtc else: mos = [x["managed_object"] for x in data] pipeline = [ {"$match": {"affected_objects.object": {"$in": mos}}}, {"$unwind": "$affected_objects"}, { "$project": { "_id": 0, "managed_object": "$affected_objects.object", "interval": ["$start", "$stop"], } }, {"$group": {"_id": "$managed_object", "intervals": {"$push": "$interval"}}}, ] mtc = { x["_id"]: x["intervals"] for x in Maintenance._get_collection().aggregate(pipeline) } for x in data: if x["managed_object"] in mtc: left, right = list(zip(*mtc[x["managed_object"]])) x["isInMaintenance"] = bisect.bisect( right, dateutil.parser.parse(x["timestamp"]).replace(tzinfo=None) ) != bisect.bisect( left, dateutil.parser.parse(x["clear_timestamp"]).replace(tzinfo=None) ) else: x["isInMaintenance"] = False return data
def bulk_field_isinmaintenance(self, data): if not data: return data if data[0]["status"] == "A": mtc = set(Maintenance.currently_affected()) for x in data: x["isInMaintenance"] = x["managed_object"] in mtc else: mos = set([x["managed_object"] for x in data]) mtc = {} for mo in list(mos): interval = [] for ao in AffectedObjects._get_collection().find( {"affected_objects.object": { "$eq": mo }}, { "_id": 0, "maintenance": 1 }): m = Maintenance.get_by_id(ao["maintenance"]) interval += [(m.start, m.stop)] if interval: mtc[mo] = interval for x in data: if x["managed_object"] in mtc: left, right = list(zip(*mtc[x["managed_object"]])) x["isInMaintenance"] = bisect.bisect( right, dateutil.parser.parse(x["timestamp"]).replace( tzinfo=None)) != bisect.bisect( left, dateutil.parser.parse( x["clear_timestamp"]).replace(tzinfo=None)) else: x["isInMaintenance"] = False return data
def get_ajax_data(self, **kwargs): object_id = self.handler.get_argument("object_id") if self.current_user.is_superuser: moss = ManagedObject.objects.filter(is_managed=True) else: moss = ManagedObject.objects.filter( is_managed=True, administrative_domain__in=self.get_user_domains()) objects = [] objects_status = { "error": [], "warning": [], "good": [], "maintenance": [] } sss = {"error": {}, "warning": {}, "good": {}, "maintenance": {}} services = defaultdict(list) try: object_root = Object.objects.filter(id=object_id).first() except ValidationError: object_root = None if object_root: con = [str(c) for c in self.get_containers_by_root(object_root.id)] moss = moss.filter(container__in=con).order_by("container") else: moss = moss.exclude(container=None).order_by("container") con = list(moss.values_list("container", flat=True)) mo_ids = list(moss.values_list("id", flat=True)) # Getting Alarms severity dict MO: Severity @todo List alarms if not object_root: alarms = self.get_alarms_info(None, alarms_all=True) else: alarms = self.get_alarms_info(mo_ids) # Get maintenance maintenance = Maintenance.currently_affected() # Getting services if not object_root: services_map = self.get_objects_summary_met(mo_ids, info_all=True) else: services_map = self.get_objects_summary_met(mo_ids) # Getting containers name and coordinates containers = { str(o["_id"]): ( o["name"], { "%s.%s" % (item["interface"], item["attr"]): item["value"] for item in o.get("data", []) }, ) for o in Object.objects. filter(data__match={ "interface": "geopoint" }, id__in=con).read_preference(ReadPreference.SECONDARY_PREFERRED). fields(id=1, name=1, data=1).as_pymongo() } # Main Loop. Get ManagedObject group by container for container, mol in itertools.groupby(moss.values_list( "id", "name", "container").order_by("container"), key=lambda o: o[2]): name, data = containers.get(container, ("", {"geopoint": {}})) x = data.get("geopoint.x") y = data.get("geopoint.y") address = data.get("address.text", "") ss = { "objects": [], "total": 0, "error": 0, "warning": 0, "good": 0, "maintenance": 0 } for mo_id, mo_name, container in mol: # Status by alarm severity # s_service = s_services.get(mo_id, s_def) status = "good" if mo_id in maintenance: status = "maintenance" elif 100 < alarms.get(mo_id, 0) <= 2000: status = "warning" elif alarms.get(mo_id, 0) > 2000: status = "error" objects_status[status] += [mo_id] ss[status] += 1 ss["total"] += 1 services_ss = [ "%s-%s" % (sm, status) for sm in services_map.get(mo_id, [self.fake_service]) ] ss["objects"] += [{ "id": mo_id, "name": mo_name, "status": status, "services": services_ss }] if not x or not y: continue objects += [{ "name": address or name, "id": str(container), "x": x if x > -168 else x + 360, # For Chukotskiy AO "y": y, "objects": [], "total": 0, "error": 0, "warning": 0, "good": 0, "maintenance": 0, }] objects[-1].update(ss) profiles = set() for r in ["error", "warning", "good", "maintenance"]: if not objects_status[r]: continue if not object_root and r == "good": m_services, m_subscribers = ServiceSummary.get_direct_summary( objects_status[r], summary_all=True) else: m_services, m_subscribers = ServiceSummary.get_direct_summary( objects_status[r]) profiles |= set(m_services) sss[r] = m_services for r in sorted(sss, key=lambda k: ("error", "warning", "good", "maintenance").index(k)): for p in profiles: services[p] += [(r, sss[r].get(p, None))] return { "objects": objects, "summary": self.f_glyph_summary({"service": services}), }
def get_ajax_data(self, **kwargs): def update_dict(d, s): for k in s: if k in d: d[k] += s[k] else: d[k] = s[k] zoom = int(self.handler.get_argument("z")) west = float(self.handler.get_argument("w")) east = float(self.handler.get_argument("e")) north = float(self.handler.get_argument("n")) south = float(self.handler.get_argument("s")) ms = int(self.handler.get_argument("maintenance")) active_layers = [ l_r for l_r in self.get_pop_layers() if l_r.min_zoom <= zoom <= l_r.max_zoom ] alarms = [] res = {} services = {} subscribers = {} t_data = defaultdict(list) mos = ManagedObject.objects.filter(is_managed=True).values( "id", "name", "x", "y") if not self.current_user.is_superuser: mos = mos.filter(administrative_domain__in=UserAccess.get_domains( self.current_user)) for mo in mos: res[mo.pop("id")] = mo mos_id = list(res.keys()) if ms == 0: mos_id = list( set(list(res.keys())) - set(Maintenance.currently_affected())) for a in ActiveAlarm._get_collection().find( {"managed_object": { "$in": mos_id, "$exists": True }}, { "_id": 1, "managed_object": 1, "direct_subscribers": 1, "direct_services": 1 }, ): s_sub, s_service = {}, {} if a.get("direct_subscribers"): s_sub = { dsub["profile"]: dsub["summary"] for dsub in a["direct_subscribers"] } if a.get("direct_services"): s_service = { dserv["profile"]: dserv["summary"] for dserv in a["direct_services"] } mo = res.get(a["managed_object"]) if not mo: continue if mo["x"] and mo["y"]: w = ServiceSummary.get_weight({ "subscriber": s_sub, "service": s_service }) # @todo: Should we add the object's weight to summary? # @todo: Check west/south hemisphere if active_layers and west <= mo["x"] <= east and south <= mo[ "y"] <= north: t_data[mo["x"], mo["y"]] += [(mo, w)] else: w = 0 alarms += [{ "alarm_id": str(a.get("_id")), "managed_object": mo["name"], "x": mo["x"], "y": mo["y"], "w": max(w, 1), }] if s_service: update_dict(services, s_service) if s_sub: update_dict(subscribers, s_sub) links = None o_seen = set() points = None o_data = {} if t_data and active_layers: # Create lines bbox = get_bbox(west, east, north, south) lines = [] for d in ObjectConnection._get_collection().find( { "type": "pop_link", "layer": { "$in": [a_l.id for a_l in active_layers] }, "line": { "$geoIntersects": { "$geometry": bbox } }, }, { "_id": 0, "connection": 1, "line": 1 }, ): for c in d["line"]["coordinates"]: if tuple(c) in t_data: for c in d["line"]["coordinates"]: tc = tuple(c) o_data[tc] = t_data.get(tc, []) o_seen.add(tuple(c)) lines += [d["line"]] break if lines: links = geojson.FeatureCollection(features=lines) # Create points points = [] for x, y in o_data: data = {} for mo, w in o_data[x, y]: if mo not in data: data[mo] = w data = sorted(data, key=lambda z: data[z], reverse=True)[:self.TOOLTIP_LIMIT] points += [ geojson.Feature( geometry=geojson.Point(coordinates=[x, y]), properties={ "alarms": len(t_data[x, y]), "objects": [{ "id": mo.id, "name": mo.name, "address": mo.address } for mo in mos], }, ) ] points = geojson.FeatureCollection(features=points) return { "alarms": alarms, "summary": self.f_glyph_summary({ "service": services, "subscriber": subscribers }), "links": links, "pops": points, }
def get_ajax_data(self, **kwargs): def update_dict(d, s): for k in s: if k in d: d[k] -= s[k] else: d[k] = s[k] object_id = self.handler.get_argument("object_id") # zoom = int(self.handler.get_argument("z")) # west = float(self.handler.get_argument("w")) # east = float(self.handler.get_argument("e")) # north = float(self.handler.get_argument("n")) # south = float(self.handler.get_argument("s")) # ms = int(self.handler.get_argument("maintenance")) # active_layers = [l for l in self.get_pop_layers() if l.min_zoom <= zoom <= l.max_zoom] if self.current_user.is_superuser: moss = ManagedObject.objects.filter(is_managed=True) else: moss = ManagedObject.objects.filter( is_managed=True, administrative_domain__in=self.get_user_domains()) objects = [] objects_status = { "error": [], "warning": [], "good": [], "maintenance": [] } sss = {"error": {}, "warning": {}, "good": {}, "maintenance": {}} # s_def = { # "service": {}, # "subscriber": {}, # "interface": {} # } services = defaultdict(list) try: object_root = Object.objects.filter(id=object_id).first() except ValidationError: object_root = None if object_root: con = [str(c) for c in self.get_containers_by_root(object_root.id)] moss = moss.filter(container__in=con).order_by("container") else: moss = moss.exclude(container=None).order_by("container") con = list(moss.values_list("container", flat=True)) mo_ids = list(moss.values_list("id", flat=True)) # Getting Alarms severity dict MO: Severity @todo List alarms if not object_root: alarms = self.get_alarms_info(None, alarms_all=True) else: alarms = self.get_alarms_info(mo_ids) # Get maintenance maintenance = Maintenance.currently_affected() # Getting services if not object_root: services_map = self.get_objects_summary_met(mo_ids, info_all=True) else: services_map = self.get_objects_summary_met(mo_ids) # Getting containers name and coordinates containers = { str(o["_id"]): (o["name"], o["data"]) for o in Object.objects.filter( data__geopoint__exists=True, id__in=con, read_preference=ReadPreference.SECONDARY_PREFERRED, ).fields(id=1, name=1, data__geopoint__x=1, data__geopoint__y=1).as_pymongo() } # Main Loop. Get ManagedObject group by container for container, mol in itertools.groupby(moss.values_list( "id", "name", "container").order_by("container"), key=lambda o: o[2]): name, data = containers.get(container, ("", {"geopoint": {}})) x = data["geopoint"].get("x") y = data["geopoint"].get("y") ss = { "objects": [], "total": 0, "error": 0, "warning": 0, "good": 0, "maintenance": 0 } for mo_id, mo_name, container in mol: # Status by alarm severity # s_service = s_services.get(mo_id, s_def) status = "good" if mo_id in maintenance: status = "maintenance" elif 100 < alarms.get(mo_id) <= 2000: status = "warning" elif alarms.get(mo_id) > 2000: status = "error" objects_status[status] += [mo_id] # update_dict(sss[status], s_service["service"]) ss[status] += 1 ss["total"] += 1 services_ss = [ "%s-%s" % (sm, status) for sm in services_map.get(mo_id, [self.fake_service]) ] ss["objects"] += [{ "id": mo_id, "name": mo_name, "status": status, "services": services_ss }] if not x or not y: continue objects += [{ "name": name, "id": str(container), "x": x if x > -168 else x + 360, # For Chukotskiy AO "y": y, "objects": [], "total": 0, "error": 0, "warning": 0, "good": 0, "maintenance": 0, }] objects[-1].update(ss) profiles = set() for r in ["error", "warning", "good", "maintenance"]: if not objects_status[r]: continue if not object_root and r == "good": m_services, m_subscribers = ServiceSummary.get_direct_summary( objects_status[r], summary_all=True) else: m_services, m_subscribers = ServiceSummary.get_direct_summary( objects_status[r]) # update_dict(s_services["service"], m["serivce"]) # if not object_root and r == "good": # for s in s_services["service"]: # if s in m["service"]: # s_services["service"][s] -= m["service"][s] # m = s_services profiles |= set(m_services) sss[r] = m_services for r in sorted(sss, key=lambda k: ("error", "warning", "good", "maintenance").index(k)): # for p in sss[r]: for p in profiles: services[p] += [(r, sss[r].get(p, None))] return { "objects": objects, "summary": self.f_glyph_summary({ "service": services # "subscriber": subscribers }), }
def cleaned_query(self, q): q = q.copy() status = q["status"] if "status" in q else "A" for p in self.ignored_params: if p in q: del q[p] for p in (self.limit_param, self.page_param, self.start_param, self.format_param, self.sort_param, self.query_param, self.only_param): if p in q: del q[p] # Normalize parameters for p in q: qp = p.split("__")[0] if qp in self.clean_fields: q[p] = self.clean_fields[qp].clean(q[p]) # Exclude maintenance if "maintenance" not in q: q["maintenance"] = "hide" if q["maintenance"] == "hide": q["managed_object__nin"] = Maintenance.currently_affected() elif q["maintenance"] == "only": q["managed_object__in"] = Maintenance.currently_affected() del q["maintenance"] if "administrative_domain" in q: q["adm_path"] = int(q["administrative_domain"]) q.pop("administrative_domain") if "segment" in q: q["segment_path"] = bson.ObjectId(q["segment"]) q.pop("segment") if "managedobjectselector" in q: s = SelectorCache.objects.filter( selector=q["managedobjectselector"]).values_list("object") if "managed_object__in" in q: q["managed_object__in"] = list( set(q["managed_object__in"]).intersection(s)) else: q["managed_object__in"] = s q.pop("managedobjectselector") if "cleared_after" in q: q["clear_timestamp__gte"] = datetime.datetime.now( ) - datetime.timedelta(seconds=int(q["cleared_after"])) q.pop("cleared_after") # if "wait_tt" in q: q["wait_tt__exists"] = True q["wait_ts__exists"] = False del q["wait_tt"] # if "collapse" in q: c = q["collapse"] del q["collapse"] if c != "0": q["root__exists"] = False if status == "C": if ("timestamp__gte" not in q and "timestamp__lte" not in q and "escalation_tt__contains" not in q and "managed_object" not in q): q["timestamp__gte"] = datetime.datetime.now( ) - self.DEFAULT_ARCH_ALARM return q
def instance_to_dict(self, o, fields=None): s = AlarmSeverity.get_severity(o.severity) n_events = (ActiveEvent.objects.filter(alarms=o.id).count() + ArchivedEvent.objects.filter(alarms=o.id).count()) mtc = o.managed_object.id in Maintenance.currently_affected() if o.status == "C": # For archived alarms mtc = Maintenance.objects.filter( start__lte=o.clear_timestamp, stop__lte=o.timestamp, affected_objects__in=[ MaintenanceObject(object=o.managed_object) ]).count() > 0 d = { "id": str(o.id), "status": o.status, "managed_object": o.managed_object.id, "managed_object__label": o.managed_object.name, "administrative_domain": o.managed_object.administrative_domain_id, "administrative_domain__label": o.managed_object.administrative_domain.name, "severity": o.severity, "severity__label": s.name, "alarm_class": str(o.alarm_class.id), "alarm_class__label": o.alarm_class.name, "timestamp": self.to_json(o.timestamp), "subject": o.subject, "events": n_events, "duration": o.duration, "clear_timestamp": self.to_json(o.clear_timestamp) if o.status == "C" else None, "row_class": s.style.css_class_name, "segment__label": o.managed_object.segment.name, "segment": str(o.managed_object.segment.id), "location_1": self.location(o.managed_object.container.id)[0] if o.managed_object.container else "", "location_2": self.location(o.managed_object.container.id)[1] if o.managed_object.container else "", "escalation_tt": o.escalation_tt, "escalation_error": o.escalation_error, "platform": o.managed_object.platform.name if o.managed_object.platform else "", "address": o.managed_object.address, "isInMaintenance": mtc, "summary": self.f_glyph_summary({ "subscriber": SummaryItem.items_to_dict(o.total_subscribers), "service": SummaryItem.items_to_dict(o.total_services) }), "total_objects": sum(x.summary for x in o.total_objects) } if fields: d = dict((k, d[k]) for k in fields) return d
def get_ajax_data(self, **kwargs): def update_dict(d, s): for k in s: if k in d: d[k] += s[k] else: d[k] = s[k] zoom = int(self.handler.get_argument("z")) west = float(self.handler.get_argument("w")) east = float(self.handler.get_argument("e")) north = float(self.handler.get_argument("n")) south = float(self.handler.get_argument("s")) ms = int(self.handler.get_argument("maintenance")) active_layers = [ l for l in self.get_pop_layers() if l.min_zoom <= zoom <= l.max_zoom ] alarms = [] services = {} subscribers = {} t_data = defaultdict(list) if self.current_user.is_superuser: qs = ActiveAlarm.objects.all() else: qs = ActiveAlarm.objects.filter( adm_path__in=self.get_user_domains()) if ms == 0: # Filter out equipment under maintenance qs = qs.filter( managed_object__nin=Maintenance.currently_affected()) for a in qs.only("id", "managed_object", "direct_subscribers", "direct_services"): s_sub, s_service = {}, {} if a.direct_subscribers: s_sub = SummaryItem.items_to_dict(a.direct_subscribers) if a.direct_services: s_service = SummaryItem.items_to_dict(a.direct_services) mo = a.managed_object if not mo: continue if mo.x and mo.y: w = ServiceSummary.get_weight({ "subscriber": s_sub, "service": s_service }) # @todo: Should we add the object's weight to summary? # @todo: Check west/south hemisphere if active_layers and west <= mo.x <= east and south <= mo.y <= north: t_data[mo.x, mo.y] += [(mo, w)] else: w = 0 alarms += [{ "alarm_id": str(a.id), "managed_object": mo.name, "x": mo.x, "y": mo.y, "w": max(w, 1), }] if s_service: update_dict(services, s_service) if s_sub: update_dict(subscribers, s_sub) links = None o_seen = set() points = None o_data = {} if t_data and active_layers: # Create lines bbox = get_bbox(west, east, north, south) lines = [] for d in ObjectConnection._get_collection().find( { "type": "pop_link", "layer": { "$in": [l.id for l in active_layers] }, "line": { "$geoIntersects": { "$geometry": bbox } }, }, { "_id": 0, "connection": 1, "line": 1 }, ): for c in d["line"]["coordinates"]: if tuple(c) in t_data: for c in d["line"]["coordinates"]: tc = tuple(c) o_data[tc] = t_data.get(tc, []) o_seen.add(tuple(c)) lines += [d["line"]] break if lines: links = geojson.FeatureCollection(features=lines) # Create points points = [] for x, y in o_data: mos = {} for mo, w in o_data[x, y]: if mo not in mos: mos[mo] = w mos = sorted(mos, key=lambda z: mos[z], reverse=True)[:self.TOOLTIP_LIMIT] points += [ geojson.Feature( geometry=geojson.Point(coordinates=[x, y]), properties={ "alarms": len(t_data[x, y]), "objects": [{ "id": mo.id, "name": mo.name, "address": mo.address } for mo in mos], }, ) ] points = geojson.FeatureCollection(features=points) return { "alarms": alarms, "summary": self.f_glyph_summary({ "service": services, "subscriber": subscribers }), "links": links, "pops": points, }
def api_report( self, request, from_date, to_date, o_format, min_duration=0, max_duration=0, min_objects=0, min_subscribers=0, segment=None, administrative_domain=None, selector=None, ex_selector=None, columns=None, source="both", alarm_class=None, subscribers=None, enable_autowidth=False, ): def row(row, container_path, segment_path): def qe(v): if v is None: return "" if isinstance(v, unicode): return v.encode("utf-8") elif isinstance(v, datetime.datetime): return v.strftime("%Y-%m-%d %H:%M:%S") elif not isinstance(v, str): return str(v) else: return v r = [qe(x) for x in row] if len(container_path) < self.CONTAINER_PATH_DEPTH: container_path += [""] * (self.CONTAINER_PATH_DEPTH - len(container_path)) else: container_path = container_path[:self.CONTAINER_PATH_DEPTH] if len(segment_path) < self.SEGMENT_PATH_DEPTH: segment_path += [""] * (self.SEGMENT_PATH_DEPTH - len(segment_path)) else: segment_path = segment_path[:self.SEGMENT_PATH_DEPTH] return r + container_path + segment_path def translate_row(row, cmap): return [row[i] for i in cmap] cols = ([ "id", "root_id", "from_ts", "to_ts", "duration_sec", "object_name", "object_address", "object_hostname", "object_profile", "object_admdomain", "object_platform", "object_version", "alarm_class", "alarm_subject", "maintenance", "objects", "subscribers", "tt", "escalation_ts", "location", "container_address", ] + ["container_%d" % i for i in range(self.CONTAINER_PATH_DEPTH)] + ["segment_%d" % i for i in range(self.SEGMENT_PATH_DEPTH)]) header_row = ( [ "ID", _("ROOT_ID"), _("FROM_TS"), _("TO_TS"), _("DURATION_SEC"), _("OBJECT_NAME"), _("OBJECT_ADDRESS"), _("OBJECT_HOSTNAME"), _("OBJECT_PROFILE"), _("OBJECT_ADMDOMAIN"), _("OBJECT_PLATFORM"), _("OBJECT_VERSION"), _("ALARM_CLASS"), _("ALARM_SUBJECT"), _("MAINTENANCE"), _("OBJECTS"), _("SUBSCRIBERS"), _("TT"), _("ESCALATION_TS"), _("LOCATION"), _("CONTAINER_ADDRESS"), ] + ["CONTAINER_%d" % i for i in range(self.CONTAINER_PATH_DEPTH)] + ["SEGMENT_%d" % i for i in range(self.SEGMENT_PATH_DEPTH)]) if columns: cmap = [] for c in columns.split(","): try: cmap += [cols.index(c)] except ValueError: continue else: cmap = list(range(len(cols))) subscribers_profile = self.default_subscribers_profile if subscribers: subscribers_profile = set( SubscriberProfile.objects.filter( id__in=subscribers.split(",")).scalar("id")) r = [translate_row(header_row, cmap)] fd = datetime.datetime.strptime( to_date, "%d.%m.%Y") + datetime.timedelta(days=1) match = { "timestamp": { "$gte": datetime.datetime.strptime(from_date, "%d.%m.%Y"), "$lte": fd } } match_duration = {"duration": {"$gte": min_duration}} if max_duration: match_duration = { "duration": { "$gte": min_duration, "$lte": max_duration } } mos = ManagedObject.objects.filter(is_managed=True) if segment: try: match["segment_path"] = bson.ObjectId(segment) except bson.errors.InvalidId: pass ads = [] if administrative_domain: if administrative_domain.isdigit(): administrative_domain = [int(administrative_domain)] ads = AdministrativeDomain.get_nested_ids( administrative_domain[0]) if not request.user.is_superuser: user_ads = UserAccess.get_domains(request.user) if administrative_domain and ads: if administrative_domain[0] not in user_ads: ads = list(set(ads) & set(user_ads)) else: ads = administrative_domain else: ads = user_ads if ads: mos = mos.filter(administrative_domain__in=ads) if selector: selector = ManagedObjectSelector.get_by_id(int(selector)) mos = mos.filter(selector.Q) if ex_selector: ex_selector = ManagedObjectSelector.get_by_id(int(ex_selector)) mos = mos.exclude(ex_selector.Q) # Working if Administrative domain set if ads: try: match["adm_path"] = {"$in": ads} # @todo More 2 level hierarhy except bson.errors.InvalidId: pass mos_id = list(mos.order_by("id").values_list("id", flat=True)) mo_hostname = {} maintenance = [] if mos_id and (selector or ex_selector): match["managed_object"] = {"$in": mos_id} if "maintenance" in columns.split(","): maintenance = Maintenance.currently_affected() if "object_hostname" in columns.split(","): mo_hostname = ReportObjectsHostname1(sync_ids=mos_id) mo_hostname = mo_hostname.get_dictionary() moss = ReportAlarmObjects(mos_id).get_all() # container_lookup = ReportContainer(mos_id) container_lookup = None subject = "alarm_subject" in columns loc = AlarmApplication([]) if source in ["archive", "both"]: # Archived Alarms for a in (ArchivedAlarm._get_collection().with_options( read_preference=ReadPreference.SECONDARY_PREFERRED ).aggregate([ { "$match": match }, { "$addFields": { "duration": { "$divide": [ { "$subtract": ["$clear_timestamp", "$timestamp"] }, 1000, ] } } }, { "$match": match_duration }, # {"$sort": {"timestamp": 1}} ])): if int(a["managed_object"]) not in moss: continue dt = a["clear_timestamp"] - a["timestamp"] duration = int(dt.total_seconds()) total_objects = sum(ss["summary"] for ss in a["total_objects"]) if min_objects and total_objects < min_objects: continue total_subscribers = sum( ss["summary"] for ss in a["total_subscribers"] if subscribers_profile and ss["profile"] in subscribers_profile) if min_subscribers and total_subscribers < min_subscribers: continue if "segment_" in columns.split( ",") or "container_" in columns.split(","): path = ObjectPath.get_path(a["managed_object"]) if path: segment_path = [ NetworkSegment.get_by_id(s).name for s in path.segment_path if NetworkSegment.get_by_id(s) ] container_path = [ Object.get_by_id(s).name for s in path.container_path if Object.get_by_id(s) ] else: segment_path = [] container_path = [] else: segment_path = [] container_path = [] r += [ translate_row( row( [ str(a["_id"]), str(a["root"]) if a.get("root") else "", a["timestamp"], a["clear_timestamp"], str(duration), moss[a["managed_object"]][0], moss[a["managed_object"]][1], mo_hostname.get(a["managed_object"], ""), Profile.get_by_id( moss[a["managed_object"]][3]).name if moss[a["managed_object"]][5] else "", moss[a["managed_object"]][6], Platform.get_by_id( moss[a["managed_object"]][9]) if moss[a["managed_object"]][9] else "", Firmware.get_by_id( moss[a["managed_object"]][10]) if moss[a["managed_object"]][10] else "", AlarmClass.get_by_id(a["alarm_class"]).name, ArchivedAlarm.objects.get( id=a["_id"]).subject if subject else "", "", total_objects, total_subscribers, a.get("escalation_tt"), a.get("escalation_ts"), ", ".join(l for l in ( loc.location(moss[a["managed_object"]][5] ) if moss[a["managed_object"]] [5] is not None else "") if l), container_lookup[a["managed_object"]].get( "text", "") if container_lookup else "", ], container_path, segment_path, ), cmap, ) ] # Active Alarms if source in ["active", "both"]: for a in (ActiveAlarm._get_collection().with_options( read_preference=ReadPreference.SECONDARY_PREFERRED). aggregate([ { "$match": match }, { "$addFields": { "duration": { "$divide": [{ "$subtract": [fd, "$timestamp"] }, 1000] } } }, { "$match": match_duration }, # {"$sort": {"timestamp": 1}} ])): dt = fd - a["timestamp"] duration = int(dt.total_seconds()) total_objects = sum(ss["summary"] for ss in a["total_objects"]) if min_objects and total_objects < min_objects: continue total_subscribers = sum( ss["summary"] for ss in a["total_subscribers"] if subscribers_profile and ss["profile"] in subscribers_profile) if min_subscribers and total_subscribers < min_subscribers: continue if "segment_" in columns.split( ",") or "container_" in columns.split(","): path = ObjectPath.get_path(a["managed_object"]) if path: segment_path = [ NetworkSegment.get_by_id(s).name for s in path.segment_path if NetworkSegment.get_by_id(s) ] container_path = [ Object.get_by_id(s).name for s in path.container_path if Object.get_by_id(s) ] else: segment_path = [] container_path = [] else: segment_path = [] container_path = [] r += [ translate_row( row( [ str(a["_id"]), str(a["root"]) if a.get("root") else "", a["timestamp"], # a["clear_timestamp"], "", str(duration), moss[a["managed_object"]][0], moss[a["managed_object"]][1], mo_hostname.get(a["managed_object"], ""), Profile.get_by_id(moss[a["managed_object"]][3]) if moss[a["managed_object"]][5] else "", moss[a["managed_object"]][6], Platform.get_by_id( moss[a["managed_object"]][9]) if moss[a["managed_object"]][9] else "", Firmware.get_by_id( moss[a["managed_object"]][10]) if moss[a["managed_object"]][10] else "", AlarmClass.get_by_id(a["alarm_class"]).name, ActiveAlarm.objects.get( id=a["_id"]).subject if subject else None, "Yes" if a["managed_object"] in maintenance else "No", total_objects, total_subscribers, a.get("escalation_tt"), a.get("escalation_ts"), ", ".join(l for l in ( loc.location(moss[a["managed_object"]][5] ) if moss[a["managed_object"]] [5] is not None else "") if l), container_lookup[a["managed_object"]].get( "text", "") if container_lookup else "", ], container_path, segment_path, ), cmap, ) ] if o_format == "csv": response = HttpResponse(content_type="text/csv") response[ "Content-Disposition"] = 'attachment; filename="alarms.csv"' writer = csv.writer(response) writer.writerows(r) return response elif o_format == "xlsx": response = StringIO() wb = xlsxwriter.Workbook(response) cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1}) ws = wb.add_worksheet("Alarms") max_column_data_length = {} for rn, x in enumerate(r): for cn, c in enumerate(x): if rn and (r[0][cn] not in max_column_data_length or len(str(c)) > max_column_data_length[r[0][cn]]): max_column_data_length[r[0][cn]] = len(str(c)) ws.write(rn, cn, c, cf1) ws.autofilter(0, 0, rn, cn) ws.freeze_panes(1, 0) for cn, c in enumerate(r[0]): # Set column width width = get_column_width(c) if enable_autowidth and width < max_column_data_length[c]: width = max_column_data_length[c] ws.set_column(cn, cn, width=width) wb.close() response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/vnd.ms-excel") response[ "Content-Disposition"] = 'attachment; filename="alarms.xlsx"' response.close() return response
def cleaned_query(self, q): q = q.copy() status = q["status"] if "status" in q else "A" for p in self.ignored_params: if p in q: del q[p] for p in ( self.limit_param, self.page_param, self.start_param, self.format_param, self.sort_param, self.query_param, self.only_param, ): if p in q: del q[p] # Extract IN # extjs not working with same parameter name in query for p in list(q): if p.endswith("__in") and self.rx_oper_splitter.match(p): field = self.rx_oper_splitter.match(p).group("field") + "__in" if field not in q: q[field] = [q[p]] else: q[field] += [q[p]] del q[p] # Normalize parameters for p in list(q): qp = p.split("__")[0] if qp in self.clean_fields: q[p] = self.clean_fields[qp].clean(q[p]) # Advanced filter for p in self.advanced_filter_params: params = [] for x in list(q): if x.startswith(p): params += [q[x]] del q[x] if params: af = self.advanced_filter(self.advanced_filter_params[p], params) if "__raw__" in q and "__raw__" in af: # Multiple raw query q["__raw__"].update(af["__raw__"]) del af["__raw__"] q.update(af) # Exclude maintenance if "maintenance" not in q: q["maintenance"] = "hide" if q["maintenance"] == "hide" and status == "A": q["managed_object__nin"] = Maintenance.currently_affected() elif q["maintenance"] == "only" and status == "A": q["managed_object__in"] = Maintenance.currently_affected() del q["maintenance"] if "administrative_domain" in q: if q["administrative_domain"] != "_root_": q["adm_path"] = int(q["administrative_domain"]) q.pop("administrative_domain") if "administrative_domain__in" in q: if "_root_" not in q["administrative_domain__in"]: q["adm_path__in"] = q["administrative_domain__in"] q.pop("administrative_domain__in") if "segment" in q: if q["segment"] != "_root_": q["segment_path"] = bson.ObjectId(q["segment"]) q.pop("segment") if "managedobjectselector" in q: s = SelectorCache.objects.filter( selector=q["managedobjectselector"]).values_list("object") if "managed_object__in" in q: q["managed_object__in"] = list( set(q["managed_object__in"]).intersection(s)) else: q["managed_object__in"] = s q.pop("managedobjectselector") if "cleared_after" in q: q["clear_timestamp__gte"] = datetime.datetime.now( ) - datetime.timedelta(seconds=int(q["cleared_after"])) q.pop("cleared_after") # if "wait_tt" in q: q["wait_tt__exists"] = True q["wait_ts__exists"] = False del q["wait_tt"] # if "collapse" in q: c = q["collapse"] del q["collapse"] if c != "0": q["root__exists"] = False if status == "C": if ("timestamp__gte" not in q and "timestamp__lte" not in q and "escalation_tt__contains" not in q and "managed_object" not in q): q["timestamp__gte"] = datetime.datetime.now( ) - self.DEFAULT_ARCH_ALARM return q
def api_report( self, request, from_date, to_date, o_format, min_duration=0, max_duration=0, min_objects=0, min_subscribers=0, segment=None, administrative_domain=None, selector=None, ex_selector=None, columns=None, source="both", alarm_class=None, subscribers=None, enable_autowidth=False, ): def row(row, container_path, segment_path): def qe(v): if v is None: return "" if isinstance(v, str): return smart_text(v) elif isinstance(v, datetime.datetime): return v.strftime("%Y-%m-%d %H:%M:%S") elif not isinstance(v, str): return smart_text(v) else: return v r = [qe(x) for x in row] if len(container_path) < self.CONTAINER_PATH_DEPTH: container_path += [""] * (self.CONTAINER_PATH_DEPTH - len(container_path)) else: container_path = container_path[:self.CONTAINER_PATH_DEPTH] if len(segment_path) < self.SEGMENT_PATH_DEPTH: segment_path += [""] * (self.SEGMENT_PATH_DEPTH - len(segment_path)) else: segment_path = segment_path[:self.SEGMENT_PATH_DEPTH] return r + container_path + segment_path def translate_row(row, cmap): return [row[i] for i in cmap] cols = ([ "id", "root_id", "from_ts", "to_ts", "duration_sec", "object_name", "object_address", "object_hostname", "object_profile", "object_admdomain", "object_platform", "object_version", "alarm_class", "alarm_subject", "maintenance", "objects", "subscribers", "tt", "escalation_ts", "location", "container_address", ] + ["container_%d" % i for i in range(self.CONTAINER_PATH_DEPTH)] + ["segment_%d" % i for i in range(self.SEGMENT_PATH_DEPTH)]) header_row = ( [ "ID", _("ROOT_ID"), _("FROM_TS"), _("TO_TS"), _("DURATION_SEC"), _("OBJECT_NAME"), _("OBJECT_ADDRESS"), _("OBJECT_HOSTNAME"), _("OBJECT_PROFILE"), _("OBJECT_ADMDOMAIN"), _("OBJECT_PLATFORM"), _("OBJECT_VERSION"), _("ALARM_CLASS"), _("ALARM_SUBJECT"), _("MAINTENANCE"), _("OBJECTS"), _("SUBSCRIBERS"), _("TT"), _("ESCALATION_TS"), _("LOCATION"), _("CONTAINER_ADDRESS"), ] + ["CONTAINER_%d" % i for i in range(self.CONTAINER_PATH_DEPTH)] + ["SEGMENT_%d" % i for i in range(self.SEGMENT_PATH_DEPTH)]) if columns: cmap = [] for c in columns.split(","): try: cmap += [cols.index(c)] except ValueError: continue else: cmap = list(range(len(cols))) subscribers_profile = self.default_subscribers_profile if subscribers: subscribers_profile = set( SubscriberProfile.objects.filter( id__in=subscribers.split(",")).scalar("id")) r = [translate_row(header_row, cmap)] fd = datetime.datetime.strptime( to_date, "%d.%m.%Y") + datetime.timedelta(days=1) match = { "timestamp": { "$gte": datetime.datetime.strptime(from_date, "%d.%m.%Y"), "$lte": fd } } match_duration = {"duration": {"$gte": min_duration}} if max_duration: match_duration = { "duration": { "$gte": min_duration, "$lte": max_duration } } mos = ManagedObject.objects.filter(is_managed=True) if segment: try: match["segment_path"] = bson.ObjectId(segment) except bson.errors.InvalidId: pass ads = [] if administrative_domain: if administrative_domain.isdigit(): administrative_domain = [int(administrative_domain)] ads = AdministrativeDomain.get_nested_ids( administrative_domain[0]) if not request.user.is_superuser: user_ads = UserAccess.get_domains(request.user) if administrative_domain and ads: if administrative_domain[0] not in user_ads: ads = list(set(ads) & set(user_ads)) if not ads: return HttpResponse( "<html><body>Permission denied: Invalid Administrative Domain</html></body>" ) else: ads = user_ads if ads: mos = mos.filter(administrative_domain__in=ads) if selector: selector = ManagedObjectSelector.get_by_id(int(selector)) mos = mos.filter(selector.Q) if ex_selector: ex_selector = ManagedObjectSelector.get_by_id(int(ex_selector)) mos = mos.exclude(ex_selector.Q) # Working if Administrative domain set if ads: try: match["adm_path"] = {"$in": ads} # @todo More 2 level hierarhy except bson.errors.InvalidId: pass mos_id = list(mos.order_by("id").values_list("id", flat=True)) mo_hostname = {} maintenance = [] if mos_id and (selector or ex_selector): match["managed_object"] = {"$in": mos_id} if "maintenance" in columns.split(","): maintenance = Maintenance.currently_affected() if "object_hostname" in columns.split(","): mo_hostname = ReportObjectsHostname1(sync_ids=mos_id) mo_hostname = mo_hostname.get_dictionary() moss = ReportAlarmObjects(mos_id).get_all() # container_lookup = ReportContainer(mos_id) container_lookup = None subject = "alarm_subject" in columns loc = AlarmApplication([]) if source in ["archive", "both"]: # Archived Alarms for a in (ArchivedAlarm._get_collection().with_options( read_preference=ReadPreference.SECONDARY_PREFERRED ).aggregate([ { "$match": match }, { "$addFields": { "duration": { "$divide": [ { "$subtract": ["$clear_timestamp", "$timestamp"] }, 1000, ] } } }, { "$match": match_duration }, # {"$sort": {"timestamp": 1}} ])): if int(a["managed_object"]) not in moss: continue dt = a["clear_timestamp"] - a["timestamp"] duration = int(dt.total_seconds()) total_objects = sum(ss["summary"] for ss in a["total_objects"]) if min_objects and total_objects < min_objects: continue total_subscribers = sum( ss["summary"] for ss in a["total_subscribers"] if subscribers_profile and ss["profile"] in subscribers_profile) if min_subscribers and total_subscribers < min_subscribers: continue if "segment_" in columns.split( ",") or "container_" in columns.split(","): path = ObjectPath.get_path(a["managed_object"]) if path: segment_path = [ NetworkSegment.get_by_id(s).name for s in path.segment_path if NetworkSegment.get_by_id(s) ] container_path = [ Object.get_by_id(s).name for s in path.container_path if Object.get_by_id(s) ] else: segment_path = [] container_path = [] else: segment_path = [] container_path = [] r += [ translate_row( row( [ smart_text(a["_id"]), smart_text(a["root"]) if a.get("root") else "", a["timestamp"], a["clear_timestamp"], smart_text(duration), moss[a["managed_object"]][0], moss[a["managed_object"]][1], smart_text( mo_hostname.get(a["managed_object"], "")), Profile.get_by_id( moss[a["managed_object"]][3]).name if moss[a["managed_object"]][5] else "", moss[a["managed_object"]][6], Platform.get_by_id( moss[a["managed_object"]][9]) if moss[a["managed_object"]][9] else "", smart_text( Firmware.get_by_id( moss[a["managed_object"]][10]).version) if moss[a["managed_object"]][10] else "", AlarmClass.get_by_id(a["alarm_class"]).name, ArchivedAlarm.objects.get( id=a["_id"]).subject if subject else "", "", total_objects, total_subscribers, a.get("escalation_tt"), a.get("escalation_ts"), ", ".join(ll for ll in ( loc.location(moss[a["managed_object"]][5] ) if moss[a["managed_object"]] [5] is not None else "") if ll), container_lookup[a["managed_object"]].get( "text", "") if container_lookup else "", ], container_path, segment_path, ), cmap, ) ] # Active Alarms if source in ["active", "both"]: datenow = datetime.datetime.now() for a in (ActiveAlarm._get_collection().with_options( read_preference=ReadPreference.SECONDARY_PREFERRED). aggregate([ { "$match": match }, { "$addFields": { "duration": { "$divide": [{ "$subtract": [fd, "$timestamp"] }, 1000] } } }, { "$match": match_duration }, # {"$sort": {"timestamp": 1}} ])): dt = datenow - a["timestamp"] duration = int(dt.total_seconds()) total_objects = sum(ss["summary"] for ss in a["total_objects"]) if min_objects and total_objects < min_objects: continue total_subscribers = sum( ss["summary"] for ss in a["total_subscribers"] if subscribers_profile and ss["profile"] in subscribers_profile) if min_subscribers and total_subscribers < min_subscribers: continue if "segment_" in columns.split( ",") or "container_" in columns.split(","): path = ObjectPath.get_path(a["managed_object"]) if path: segment_path = [ NetworkSegment.get_by_id(s).name for s in path.segment_path if NetworkSegment.get_by_id(s) ] container_path = [ Object.get_by_id(s).name for s in path.container_path if Object.get_by_id(s) ] else: segment_path = [] container_path = [] else: segment_path = [] container_path = [] r += [ translate_row( row( [ smart_text(a["_id"]), smart_text(a["root"]) if a.get("root") else "", a["timestamp"], # a["clear_timestamp"], "", smart_text(duration), moss[a["managed_object"]][0], moss[a["managed_object"]][1], smart_text( mo_hostname.get(a["managed_object"], "")), Profile.get_by_id(moss[a["managed_object"]][3]) if moss[a["managed_object"]][5] else "", moss[a["managed_object"]][6], Platform.get_by_id( moss[a["managed_object"]][9]) if moss[a["managed_object"]][9] else "", smart_text( Firmware.get_by_id( moss[a["managed_object"]][10]).version) if moss[a["managed_object"]][10] else "", AlarmClass.get_by_id(a["alarm_class"]).name, ActiveAlarm.objects.get( id=a["_id"]).subject if subject else None, "Yes" if a["managed_object"] in maintenance else "No", total_objects, total_subscribers, a.get("escalation_tt"), a.get("escalation_ts"), ", ".join(ll for ll in ( loc.location(moss[a["managed_object"]][5] ) if moss[a["managed_object"]] [5] is not None else "") if ll), container_lookup[a["managed_object"]].get( "text", "") if container_lookup else "", ], container_path, segment_path, ), cmap, ) ] if source in ["long_archive"]: o_format = "csv_zip" columns = [ "ALARM_ID", "MO_ID", "OBJECT_PROFILE", "VENDOR", "PLATFORM", "VERSION", "OPEN_TIMESTAMP", "CLOSE_TIMESTAMP", "LOCATION", "", "POOL", "ADM_DOMAIN", "MO_NAME", "IP", "ESCALATION_TT", "DURATION", "SEVERITY", "REBOOTS", ] from noc.core.clickhouse.connect import connection ch = connection() fd = datetime.datetime.strptime(from_date, "%d.%m.%Y") td = datetime.datetime.strptime( to_date, "%d.%m.%Y") + datetime.timedelta(days=1) if td - fd > datetime.timedelta(days=390): return HttpResponseBadRequest( _("Report more than 1 year not allowed. If nedeed - request it from Administrator" )) ac = AlarmClass.objects.get( name="NOC | Managed Object | Ping Failed") subs = ", ".join( "subscribers.summary[indexOf(subscribers.profile, '%s')] as `%s`" % (sp.bi_id, sp.name) for sp in SubscriberProfile.objects.filter().order_by("name")) if subs: columns += [ sp.name for sp in SubscriberProfile.objects.filter().order_by("name") ] r = ch.execute(LONG_ARCHIVE_QUERY % ( ", %s" % subs if subs else "", fd.date().isoformat(), td.date().isoformat(), ac.bi_id, )) filename = "alarms.csv" if o_format == "csv": response = HttpResponse(content_type="text/csv") response[ "Content-Disposition"] = 'attachment; filename="%s"' % filename writer = csv.writer(response) writer.writerows(r) return response elif o_format == "csv_zip": response = BytesIO() f = TextIOWrapper(TemporaryFile(mode="w+b"), encoding="utf-8") writer = csv.writer(f, dialect="excel", delimiter=";", quotechar='"') writer.writerow(columns) writer.writerows(r) f.seek(0) with ZipFile(response, "w", compression=ZIP_DEFLATED) as zf: zf.writestr(filename, f.read()) zf.filename = "%s.zip" % filename # response = HttpResponse(content_type="text/csv") response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/zip") response[ "Content-Disposition"] = 'attachment; filename="%s.zip"' % filename return response elif o_format == "xlsx": response = BytesIO() wb = xlsxwriter.Workbook(response) cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1}) ws = wb.add_worksheet("Alarms") max_column_data_length = {} for rn, x in enumerate(r): for cn, c in enumerate(x): if rn and (r[0][cn] not in max_column_data_length or len(str(c)) > max_column_data_length[r[0][cn]]): max_column_data_length[r[0][cn]] = len(str(c)) ws.write(rn, cn, c, cf1) ws.autofilter(0, 0, rn, cn) ws.freeze_panes(1, 0) for cn, c in enumerate(r[0]): # Set column width width = get_column_width(c) if enable_autowidth and width < max_column_data_length[c]: width = max_column_data_length[c] ws.set_column(cn, cn, width=width) wb.close() response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/vnd.ms-excel") response[ "Content-Disposition"] = 'attachment; filename="alarms.xlsx"' response.close() return response