def clean(self, force=False): # Archive super().clean() # Clean if force: print("Clean ArchivedAlarm collection before %s" % self.clean_ts) ArchivedAlarm._get_collection().remove( {"clear_timestamp": { "$lte": self.clean_ts }})
def iter_archived_items(self): for d in ArchivedAlarm._get_collection().find( {"clear_timestamp": { "$lte": self.clean_ts }}, no_cursor_timeout=True): yield d
def get_start(cls): d = ArchivedAlarm._get_collection().find_one({}, { "_id": 0, "timestamp": 1 }, sort=[("timestamp", 1)]) if not d: return None return d.get("timestamp")
def handle_clean(self, options, events): before = options.get("before") if before: datetime.datetime.strptime(before, "%Y-%m-%d") else: self.print("Before is not set, use default") before = datetime.datetime.now() - DEFAULT_CLEAN force = options.get("force") aa = ActiveAlarm._get_collection() ah = ArchivedAlarm._get_collection() ae = ActiveEvent._get_collection() event_ts = ae.find_one({"timestamp": {"$lte": before}}, limit=1, sort=[("timestamp", 1)]) event_ts = event_ts["timestamp"] print("[%s] Cleaned before %s ... \n" % ("events", before), end="") bulk = [] window = CLEAN_WINDOW while event_ts < before: refer_event_ids = [] for e in [aa, ah]: for ee in e.find( {"timestamp": {"$gte": event_ts, "$lte": event_ts + CLEAN_WINDOW}}, {"opening_event": 1, "closing_event": 1}, ): if "opening_event" in ee: refer_event_ids += [ee["opening_event"]] if "closing_event" in ee: refer_event_ids += [ee["closing_event"]] try: clear_qs = { "timestamp": {"$gte": event_ts, "$lte": event_ts + CLEAN_WINDOW}, "_id": {"$nin": refer_event_ids}, } self.print( "Interval: %s, %s; Count: %d" % (event_ts, event_ts + CLEAN_WINDOW, ae.count(clear_qs)) ) bulk += [DeleteMany(clear_qs)] event_ts += window if window != CLEAN_WINDOW: window = CLEAN_WINDOW except DocumentTooLarge: window = window // 2 if window < datetime.timedelta(hours=1): self.die("Too many events for delete in interval %s" % window) event_ts -= window if force: self.print("All data before %s from active events will be Remove..\n" % before) for i in reversed(range(1, 10)): self.print("%d\n" % i) time.sleep(1) ae.bulk_write(bulk)
def iter_data(self): if self.use_archive: coll = [ self._archive_db.get_collection(coll_name) for coll_name in self.find_archived_collections( self.start, self.stop) ] else: coll = [ArchivedAlarm._get_collection()] for c in coll: for d in c.find( { "clear_timestamp": { "$gt": self.start, "$lte": self.stop } }, no_cursor_timeout=True).sort("clear_timestamp"): yield d
def escalate(alarm_id, escalation_id, escalation_delay, *args, **kwargs): def log(message, *args): msg = message % args logger.info("[%s] %s", alarm_id, msg) alarm.log_message(msg, to_save=True) def summary_to_list(summary, model): r = [] for k in summary: p = model.get_by_id(k.profile) if not p or getattr(p, "show_in_summary", True) is False: continue r += [{ "profile": p.name, "summary": k.summary, "order": (getattr(p, "display_order", 100), -k.summary), }] return sorted(r, key=operator.itemgetter("order")) logger.info("[%s] Performing escalations", alarm_id) alarm = get_alarm(alarm_id) if alarm is None: logger.info("[%s] Missing alarm, skipping", alarm_id) metrics["escalation_missed_alarm"] += 1 return if alarm.status == "C": logger.info("[%s] Alarm is closed, skipping", alarm_id) metrics["escalation_already_closed"] += 1 return if alarm.root: log("[%s] Alarm is not root cause, skipping", alarm_id) metrics["escalation_alarm_is_not_root"] += 1 return # escalation = AlarmEscalation.get_by_id(escalation_id) if not escalation: log("Escalation %s is not found, skipping", escalation_id) metrics["escalation_not_found"] += 1 return if alarm.managed_object.tt_system: sample = alarm.managed_object.tt_system.telemetry_sample else: sample = PARENT_SAMPLE with Span(client="escalator", sample=sample) as ctx: alarm.set_escalation_context() # Evaluate escalation chain mo = alarm.managed_object for a in escalation.escalations: if a.delay != escalation_delay: continue # Try other type # Check administrative domain if a.administrative_domain and a.administrative_domain.id not in alarm.adm_path: continue # Check severity if a.min_severity and alarm.severity < a.min_severity: continue # Check selector if a.selector and not SelectorCache.is_in_selector(mo, a.selector): continue # Check time pattern if a.time_pattern and not a.time_pattern.match(alarm.timestamp): continue # Render escalation message if not a.template: log("No escalation template, skipping") continue # Check global limits # @todo: Move into escalator service # @todo: Process per-ttsystem limits ets = datetime.datetime.now() - datetime.timedelta( seconds=config.escalator.ets) ae = ActiveAlarm._get_collection().count_documents( {"escalation_ts": { "$gte": ets }}) ae += ArchivedAlarm._get_collection().count_documents( {"escalation_ts": { "$gte": ets }}) if ae >= config.escalator.tt_escalation_limit: logger.error( "Escalation limit exceeded (%s/%s). Skipping", ae, config.escalator.tt_escalation_limit, ) metrics["escalation_throttled"] += 1 alarm.set_escalation_error( "Escalation limit exceeded (%s/%s). Skipping" % (ae, config.escalator.tt_escalation_limit)) return # Check whether consequences has escalations cons_escalated = sorted(alarm.iter_escalated(), key=operator.attrgetter("timestamp")) affected_objects = sorted(alarm.iter_affected(), key=operator.attrgetter("name")) # segment = alarm.managed_object.segment if segment.is_redundant: uplinks = alarm.managed_object.data.uplinks lost_redundancy = len(uplinks) > 1 affected_subscribers = summary_to_list( segment.total_subscribers, SubscriberProfile) affected_services = summary_to_list(segment.total_services, ServiceProfile) else: lost_redundancy = False affected_subscribers = [] affected_services = [] # ctx = { "alarm": alarm, "affected_objects": affected_objects, "cons_escalated": cons_escalated, "total_objects": summary_to_list(alarm.total_objects, ManagedObjectProfile), "total_subscribers": summary_to_list(alarm.total_subscribers, SubscriberProfile), "total_services": summary_to_list(alarm.total_services, ServiceProfile), "tt": None, "lost_redundancy": lost_redundancy, "affected_subscribers": affected_subscribers, "affected_services": affected_services, } # Escalate to TT if a.create_tt and mo.can_escalate(): tt_id = None if alarm.escalation_tt: log("Already escalated with TT #%s", alarm.escalation_tt) else: pre_reason = escalation.get_pre_reason(mo.tt_system) active_maintenance = Maintenance.get_object_maintenance(mo) if active_maintenance: for m in active_maintenance: log( "Object is under maintenance: %s (%s-%s)", m.subject, m.start, m.stop, ) metrics["escalation_stop_on_maintenance"] += 1 elif pre_reason is not None: subject = a.template.render_subject(**ctx) body = a.template.render_body(**ctx) logger.debug( "[%s] Escalation message:\nSubject: %s\n%s", alarm_id, subject, body) log("Creating TT in system %s", mo.tt_system.name) tts = mo.tt_system.get_system() try: try: tt_id = tts.create_tt( queue=mo.tt_queue, obj=mo.tt_system_id, reason=pre_reason, subject=subject, body=body, login="******", timestamp=alarm.timestamp, ) except TemporaryTTError as e: metrics["escalation_tt_retry"] += 1 log( "Temporary error detected. Retry after %ss", RETRY_TIMEOUT) mo.tt_system.register_failure() Job.retry_after(get_next_retry(), str(e)) ctx["tt"] = "%s:%s" % (mo.tt_system.name, tt_id) alarm.escalate( ctx["tt"], close_tt=a.close_tt, wait_tt=ctx["tt"] if a.wait_tt else None, ) if tts.promote_group_tt and a.promote_group_tt: # Create group TT log("Promoting to group tt") gtt = tts.create_group_tt( tt_id, alarm.timestamp) # Append affected objects for ao in alarm.iter_affected(): if ao.can_escalate(True): if ao.tt_system == mo.tt_system: log( "Appending object %s to group tt %s", ao.name, gtt) try: tts.add_to_group_tt( gtt, ao.tt_system_id) except TTError as e: alarm.set_escalation_error( "[%s] %s" % (mo.tt_system.name, e)) else: log( "Cannot append object %s to group tt %s: Belongs to other TT system", ao.name, gtt, ) else: log( "Cannot append object %s to group tt %s: Escalations are disabled", ao.name, gtt, ) metrics["escalation_tt_create"] += 1 except TTError as e: log("Failed to create TT: %s", e) metrics["escalation_tt_fail"] += 1 alarm.log_message("Failed to escalate: %s" % e, to_save=True) alarm.set_escalation_error("[%s] %s" % (mo.tt_system.name, e)) else: log("Cannot find pre reason") metrics["escalation_tt_fail"] += 1 if tt_id and cons_escalated: # Notify consequences for ca in cons_escalated: c_tt_name, c_tt_id = ca.escalation_tt.split(":") cts = TTSystem.get_by_name(c_tt_name) if cts: tts = cts.get_system() try: log("Appending comment to TT %s", tt_id) tts.add_comment(c_tt_id, body="Covered by TT %s" % tt_id, login="******") metrics["escalation_tt_comment"] += 1 except NotImplementedError: log( "Cannot add comment to %s: Feature not implemented", ca.escalation_tt, ) metrics["escalation_tt_comment_fail"] += 1 except TTError as e: log("Failed to add comment to %s: %s", ca.escalation_tt, e) metrics["escalation_tt_comment_fail"] += 1 else: log( "Failed to add comment to %s: Invalid TT system", ca.escalation_tt) metrics["escalation_tt_comment_fail"] += 1 # Send notification if a.notification_group and mo.can_notify(): subject = a.template.render_subject(**ctx) body = a.template.render_body(**ctx) logger.debug("[%s] Notification message:\nSubject: %s\n%s", alarm_id, subject, body) log("Sending notification to group %s", a.notification_group.name) a.notification_group.notify(subject, body) alarm.set_clear_notification(a.notification_group, a.clear_template) metrics["escalation_notify"] += 1 # if a.stop_processing: logger.debug("Stopping processing") break nalarm = get_alarm(alarm_id) if nalarm and nalarm.status == "C": nalarm.log_message( "Alarm has been closed during escalation. Try to deescalate") logger.info( "[%s] Alarm has been closed during escalation. Try to deescalate", alarm.id) metrics["escalation_closed_while_escalated"] += 1 if tt_id and not nalarm.escalation_tt: nalarm.escalation_ts = datetime.datetime.now() nalarm.escalation_tt = "%s:%s" % (mo.tt_system.name, tt_id) nalarm.save() if not nalarm.escalation_close_ts and not nalarm.escalation_close_error: notify_close( alarm_id=alarm_id, tt_id=nalarm.escalation_tt, subject="Closing", body="Closing", notification_group_id=alarm.clear_notification_group.id if alarm.clear_notification_group else None, close_tt=alarm.close_tt, ) elif nalarm == "A" and not nalarm.escalation_tt and tt_id: logger.error("[%s] Alarm without escalation TT: %s", alarm.id, tt_id) logger.info("[%s] Escalations loop end", alarm_id)
def api_report( self, request, from_date, to_date, o_format, administrative_domain=None, columns=None, source="both", alarm_class=None, enable_autowidth=False, ): def row(row): def qe(v): if v is None: return "" if isinstance(v, str): return smart_text(v) elif isinstance(v, datetime.datetime): return v.strftime("%Y-%m-%d %H:%M:%S") elif not isinstance(v, str): return smart_text(v) else: return v return [qe(x) for x in row] def translate_row(row, cmap): return [row[i] for i in cmap] cols = [ "id", "alarm_class", "alarm_from_ts", "alarm_to_ts", "alarm_tt", "object_name", "object_address", "object_admdomain", "log_timestamp", "log_source", "log_message", # "tt", # "escalation_ts", ] header_row = [ "ID", _("ALARM_CLASS"), _("ALARM_FROM_TS"), _("ALARM_TO_TS"), _("ALARM_TT"), _("OBJECT_NAME"), _("OBJECT_ADDRESS"), _("OBJECT_ADMDOMAIN"), _("LOG_TIMESTAMP"), _("LOG_SOURCE"), _("LOG_MESSAGE"), ] if columns: cmap = [] for c in columns.split(","): try: cmap += [cols.index(c)] except ValueError: continue else: cmap = list(range(len(cols))) r = [translate_row(header_row, cmap)] fd = datetime.datetime.strptime( to_date, "%d.%m.%Y") + datetime.timedelta(days=1) match = { "timestamp": { "$gte": datetime.datetime.strptime(from_date, "%d.%m.%Y"), "$lte": fd } } mos = ManagedObject.objects.filter() ads = [] if administrative_domain: if administrative_domain.isdigit(): administrative_domain = [int(administrative_domain)] ads = AdministrativeDomain.get_nested_ids( administrative_domain[0]) if not request.user.is_superuser: user_ads = UserAccess.get_domains(request.user) if administrative_domain and ads: if administrative_domain[0] not in user_ads: ads = list(set(ads) & set(user_ads)) if not ads: return HttpResponse( "<html><body>Permission denied: Invalid Administrative Domain</html></body>" ) else: ads = user_ads if ads: mos = mos.filter(administrative_domain__in=ads) # Working if Administrative domain set if ads: try: match["adm_path"] = {"$in": ads} # @todo More 2 level hierarhy except bson.errors.InvalidId: pass addr_map = { mo[0]: (mo[1], mo[2]) for mo in mos.values_list("id", "name", "address") } # Active Alarms coll = ActiveAlarm._get_collection() for aa in coll.aggregate([ { "$match": match }, { "$unwind": "$log" }, { "$match": { "log.source": { "$exists": True, "$ne": None } } }, { "$project": { "timestamp": 1, "managed_object": 1, "alarm_class": 1, "escalation_tt": 1, "adm_path": 1, "log": 1, } }, { "$sort": { "_id": 1, "log.timestamp": 1 } }, ]): r += [ translate_row( row([ smart_text(aa["_id"]), AlarmClass.get_by_id(aa["alarm_class"]).name, aa["timestamp"], "", aa.get("escalation_tt", ""), addr_map[aa["managed_object"]][0], addr_map[aa["managed_object"]][1], AdministrativeDomain.get_by_id( aa["adm_path"][-1]).name, aa["log"]["timestamp"], aa["log"]["source"], aa["log"]["message"], ]), cmap, ) ] # Active Alarms coll = ArchivedAlarm._get_collection() for aa in coll.aggregate([ { "$match": match }, { "$unwind": "$log" }, { "$match": { "log.source": { "$exists": True } } }, { "$project": { "timestamp": 1, "clear_timestamp": 1, "managed_object": 1, "alarm_class": 1, "escalation_tt": 1, "adm_path": 1, "log": 1, } }, { "$sort": { "_id": 1, "log.timestamp": 1 } }, ]): r += [ translate_row( row([ smart_text(aa["_id"]), AlarmClass.get_by_id(aa["alarm_class"]).name, aa["timestamp"], aa["clear_timestamp"], aa.get("escalation_tt", ""), addr_map[aa["managed_object"]][0], addr_map[aa["managed_object"]][1], AdministrativeDomain.get_by_id( aa["adm_path"][-1]).name, aa["log"]["timestamp"], aa["log"]["source"], aa["log"]["message"], ]), cmap, ) ] filename = "alarm_comments.csv" if o_format == "csv": response = HttpResponse(content_type="text/csv") response[ "Content-Disposition"] = 'attachment; filename="%s"' % filename writer = csv.writer(response) writer.writerows(r) return response elif o_format == "csv_zip": response = BytesIO() f = TextIOWrapper(TemporaryFile(mode="w+b"), encoding="utf-8") writer = csv.writer(f, dialect="excel", delimiter=";", quotechar='"') writer.writerow(columns) writer.writerows(r) f.seek(0) with ZipFile(response, "w", compression=ZIP_DEFLATED) as zf: zf.writestr(filename, f.read()) zf.filename = "%s.zip" % filename # response = HttpResponse(content_type="text/csv") response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/zip") response[ "Content-Disposition"] = 'attachment; filename="%s.zip"' % filename return response elif o_format == "xlsx": response = BytesIO() wb = xlsxwriter.Workbook(response) cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1}) ws = wb.add_worksheet("Alarms") max_column_data_length = {} for rn, x in enumerate(r): for cn, c in enumerate(x): if rn and (r[0][cn] not in max_column_data_length or len(str(c)) > max_column_data_length[r[0][cn]]): max_column_data_length[r[0][cn]] = len(str(c)) ws.write(rn, cn, c, cf1) ws.autofilter(0, 0, rn, cn) ws.freeze_panes(1, 0) for cn, c in enumerate(r[0]): # Set column width width = get_column_width(c) if enable_autowidth and width < max_column_data_length[c]: width = max_column_data_length[c] ws.set_column(cn, cn, width=width) wb.close() response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/vnd.ms-excel") response[ "Content-Disposition"] = 'attachment; filename="alarm_comments.xlsx"' response.close() return response
def api_report( self, request, from_date, to_date, o_format, min_duration=0, max_duration=0, min_objects=0, min_subscribers=0, segment=None, administrative_domain=None, selector=None, ex_selector=None, columns=None, source="both", alarm_class=None, subscribers=None, enable_autowidth=False, ): def row(row, container_path, segment_path): def qe(v): if v is None: return "" if isinstance(v, unicode): return v.encode("utf-8") elif isinstance(v, datetime.datetime): return v.strftime("%Y-%m-%d %H:%M:%S") elif not isinstance(v, str): return str(v) else: return v r = [qe(x) for x in row] if len(container_path) < self.CONTAINER_PATH_DEPTH: container_path += [""] * (self.CONTAINER_PATH_DEPTH - len(container_path)) else: container_path = container_path[:self.CONTAINER_PATH_DEPTH] if len(segment_path) < self.SEGMENT_PATH_DEPTH: segment_path += [""] * (self.SEGMENT_PATH_DEPTH - len(segment_path)) else: segment_path = segment_path[:self.SEGMENT_PATH_DEPTH] return r + container_path + segment_path def translate_row(row, cmap): return [row[i] for i in cmap] cols = ([ "id", "root_id", "from_ts", "to_ts", "duration_sec", "object_name", "object_address", "object_hostname", "object_profile", "object_admdomain", "object_platform", "object_version", "alarm_class", "alarm_subject", "maintenance", "objects", "subscribers", "tt", "escalation_ts", "location", "container_address", ] + ["container_%d" % i for i in range(self.CONTAINER_PATH_DEPTH)] + ["segment_%d" % i for i in range(self.SEGMENT_PATH_DEPTH)]) header_row = ( [ "ID", _("ROOT_ID"), _("FROM_TS"), _("TO_TS"), _("DURATION_SEC"), _("OBJECT_NAME"), _("OBJECT_ADDRESS"), _("OBJECT_HOSTNAME"), _("OBJECT_PROFILE"), _("OBJECT_ADMDOMAIN"), _("OBJECT_PLATFORM"), _("OBJECT_VERSION"), _("ALARM_CLASS"), _("ALARM_SUBJECT"), _("MAINTENANCE"), _("OBJECTS"), _("SUBSCRIBERS"), _("TT"), _("ESCALATION_TS"), _("LOCATION"), _("CONTAINER_ADDRESS"), ] + ["CONTAINER_%d" % i for i in range(self.CONTAINER_PATH_DEPTH)] + ["SEGMENT_%d" % i for i in range(self.SEGMENT_PATH_DEPTH)]) if columns: cmap = [] for c in columns.split(","): try: cmap += [cols.index(c)] except ValueError: continue else: cmap = list(range(len(cols))) subscribers_profile = self.default_subscribers_profile if subscribers: subscribers_profile = set( SubscriberProfile.objects.filter( id__in=subscribers.split(",")).scalar("id")) r = [translate_row(header_row, cmap)] fd = datetime.datetime.strptime( to_date, "%d.%m.%Y") + datetime.timedelta(days=1) match = { "timestamp": { "$gte": datetime.datetime.strptime(from_date, "%d.%m.%Y"), "$lte": fd } } match_duration = {"duration": {"$gte": min_duration}} if max_duration: match_duration = { "duration": { "$gte": min_duration, "$lte": max_duration } } mos = ManagedObject.objects.filter(is_managed=True) if segment: try: match["segment_path"] = bson.ObjectId(segment) except bson.errors.InvalidId: pass ads = [] if administrative_domain: if administrative_domain.isdigit(): administrative_domain = [int(administrative_domain)] ads = AdministrativeDomain.get_nested_ids( administrative_domain[0]) if not request.user.is_superuser: user_ads = UserAccess.get_domains(request.user) if administrative_domain and ads: if administrative_domain[0] not in user_ads: ads = list(set(ads) & set(user_ads)) else: ads = administrative_domain else: ads = user_ads if ads: mos = mos.filter(administrative_domain__in=ads) if selector: selector = ManagedObjectSelector.get_by_id(int(selector)) mos = mos.filter(selector.Q) if ex_selector: ex_selector = ManagedObjectSelector.get_by_id(int(ex_selector)) mos = mos.exclude(ex_selector.Q) # Working if Administrative domain set if ads: try: match["adm_path"] = {"$in": ads} # @todo More 2 level hierarhy except bson.errors.InvalidId: pass mos_id = list(mos.order_by("id").values_list("id", flat=True)) mo_hostname = {} maintenance = [] if mos_id and (selector or ex_selector): match["managed_object"] = {"$in": mos_id} if "maintenance" in columns.split(","): maintenance = Maintenance.currently_affected() if "object_hostname" in columns.split(","): mo_hostname = ReportObjectsHostname1(sync_ids=mos_id) mo_hostname = mo_hostname.get_dictionary() moss = ReportAlarmObjects(mos_id).get_all() # container_lookup = ReportContainer(mos_id) container_lookup = None subject = "alarm_subject" in columns loc = AlarmApplication([]) if source in ["archive", "both"]: # Archived Alarms for a in (ArchivedAlarm._get_collection().with_options( read_preference=ReadPreference.SECONDARY_PREFERRED ).aggregate([ { "$match": match }, { "$addFields": { "duration": { "$divide": [ { "$subtract": ["$clear_timestamp", "$timestamp"] }, 1000, ] } } }, { "$match": match_duration }, # {"$sort": {"timestamp": 1}} ])): if int(a["managed_object"]) not in moss: continue dt = a["clear_timestamp"] - a["timestamp"] duration = int(dt.total_seconds()) total_objects = sum(ss["summary"] for ss in a["total_objects"]) if min_objects and total_objects < min_objects: continue total_subscribers = sum( ss["summary"] for ss in a["total_subscribers"] if subscribers_profile and ss["profile"] in subscribers_profile) if min_subscribers and total_subscribers < min_subscribers: continue if "segment_" in columns.split( ",") or "container_" in columns.split(","): path = ObjectPath.get_path(a["managed_object"]) if path: segment_path = [ NetworkSegment.get_by_id(s).name for s in path.segment_path if NetworkSegment.get_by_id(s) ] container_path = [ Object.get_by_id(s).name for s in path.container_path if Object.get_by_id(s) ] else: segment_path = [] container_path = [] else: segment_path = [] container_path = [] r += [ translate_row( row( [ str(a["_id"]), str(a["root"]) if a.get("root") else "", a["timestamp"], a["clear_timestamp"], str(duration), moss[a["managed_object"]][0], moss[a["managed_object"]][1], mo_hostname.get(a["managed_object"], ""), Profile.get_by_id( moss[a["managed_object"]][3]).name if moss[a["managed_object"]][5] else "", moss[a["managed_object"]][6], Platform.get_by_id( moss[a["managed_object"]][9]) if moss[a["managed_object"]][9] else "", Firmware.get_by_id( moss[a["managed_object"]][10]) if moss[a["managed_object"]][10] else "", AlarmClass.get_by_id(a["alarm_class"]).name, ArchivedAlarm.objects.get( id=a["_id"]).subject if subject else "", "", total_objects, total_subscribers, a.get("escalation_tt"), a.get("escalation_ts"), ", ".join(l for l in ( loc.location(moss[a["managed_object"]][5] ) if moss[a["managed_object"]] [5] is not None else "") if l), container_lookup[a["managed_object"]].get( "text", "") if container_lookup else "", ], container_path, segment_path, ), cmap, ) ] # Active Alarms if source in ["active", "both"]: for a in (ActiveAlarm._get_collection().with_options( read_preference=ReadPreference.SECONDARY_PREFERRED). aggregate([ { "$match": match }, { "$addFields": { "duration": { "$divide": [{ "$subtract": [fd, "$timestamp"] }, 1000] } } }, { "$match": match_duration }, # {"$sort": {"timestamp": 1}} ])): dt = fd - a["timestamp"] duration = int(dt.total_seconds()) total_objects = sum(ss["summary"] for ss in a["total_objects"]) if min_objects and total_objects < min_objects: continue total_subscribers = sum( ss["summary"] for ss in a["total_subscribers"] if subscribers_profile and ss["profile"] in subscribers_profile) if min_subscribers and total_subscribers < min_subscribers: continue if "segment_" in columns.split( ",") or "container_" in columns.split(","): path = ObjectPath.get_path(a["managed_object"]) if path: segment_path = [ NetworkSegment.get_by_id(s).name for s in path.segment_path if NetworkSegment.get_by_id(s) ] container_path = [ Object.get_by_id(s).name for s in path.container_path if Object.get_by_id(s) ] else: segment_path = [] container_path = [] else: segment_path = [] container_path = [] r += [ translate_row( row( [ str(a["_id"]), str(a["root"]) if a.get("root") else "", a["timestamp"], # a["clear_timestamp"], "", str(duration), moss[a["managed_object"]][0], moss[a["managed_object"]][1], mo_hostname.get(a["managed_object"], ""), Profile.get_by_id(moss[a["managed_object"]][3]) if moss[a["managed_object"]][5] else "", moss[a["managed_object"]][6], Platform.get_by_id( moss[a["managed_object"]][9]) if moss[a["managed_object"]][9] else "", Firmware.get_by_id( moss[a["managed_object"]][10]) if moss[a["managed_object"]][10] else "", AlarmClass.get_by_id(a["alarm_class"]).name, ActiveAlarm.objects.get( id=a["_id"]).subject if subject else None, "Yes" if a["managed_object"] in maintenance else "No", total_objects, total_subscribers, a.get("escalation_tt"), a.get("escalation_ts"), ", ".join(l for l in ( loc.location(moss[a["managed_object"]][5] ) if moss[a["managed_object"]] [5] is not None else "") if l), container_lookup[a["managed_object"]].get( "text", "") if container_lookup else "", ], container_path, segment_path, ), cmap, ) ] if o_format == "csv": response = HttpResponse(content_type="text/csv") response[ "Content-Disposition"] = 'attachment; filename="alarms.csv"' writer = csv.writer(response) writer.writerows(r) return response elif o_format == "xlsx": response = StringIO() wb = xlsxwriter.Workbook(response) cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1}) ws = wb.add_worksheet("Alarms") max_column_data_length = {} for rn, x in enumerate(r): for cn, c in enumerate(x): if rn and (r[0][cn] not in max_column_data_length or len(str(c)) > max_column_data_length[r[0][cn]]): max_column_data_length[r[0][cn]] = len(str(c)) ws.write(rn, cn, c, cf1) ws.autofilter(0, 0, rn, cn) ws.freeze_panes(1, 0) for cn, c in enumerate(r[0]): # Set column width width = get_column_width(c) if enable_autowidth and width < max_column_data_length[c]: width = max_column_data_length[c] ws.set_column(cn, cn, width=width) wb.close() response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/vnd.ms-excel") response[ "Content-Disposition"] = 'attachment; filename="alarms.xlsx"' response.close() return response
def api_report( self, request, from_date, to_date, o_format, min_duration=0, max_duration=0, min_objects=0, min_subscribers=0, segment=None, administrative_domain=None, selector=None, ex_selector=None, columns=None, source="both", alarm_class=None, subscribers=None, enable_autowidth=False, ): def row(row, container_path, segment_path): def qe(v): if v is None: return "" if isinstance(v, str): return smart_text(v) elif isinstance(v, datetime.datetime): return v.strftime("%Y-%m-%d %H:%M:%S") elif not isinstance(v, str): return smart_text(v) else: return v r = [qe(x) for x in row] if len(container_path) < self.CONTAINER_PATH_DEPTH: container_path += [""] * (self.CONTAINER_PATH_DEPTH - len(container_path)) else: container_path = container_path[:self.CONTAINER_PATH_DEPTH] if len(segment_path) < self.SEGMENT_PATH_DEPTH: segment_path += [""] * (self.SEGMENT_PATH_DEPTH - len(segment_path)) else: segment_path = segment_path[:self.SEGMENT_PATH_DEPTH] return r + container_path + segment_path def translate_row(row, cmap): return [row[i] for i in cmap] cols = ([ "id", "root_id", "from_ts", "to_ts", "duration_sec", "object_name", "object_address", "object_hostname", "object_profile", "object_admdomain", "object_platform", "object_version", "alarm_class", "alarm_subject", "maintenance", "objects", "subscribers", "tt", "escalation_ts", "location", "container_address", ] + ["container_%d" % i for i in range(self.CONTAINER_PATH_DEPTH)] + ["segment_%d" % i for i in range(self.SEGMENT_PATH_DEPTH)]) header_row = ( [ "ID", _("ROOT_ID"), _("FROM_TS"), _("TO_TS"), _("DURATION_SEC"), _("OBJECT_NAME"), _("OBJECT_ADDRESS"), _("OBJECT_HOSTNAME"), _("OBJECT_PROFILE"), _("OBJECT_ADMDOMAIN"), _("OBJECT_PLATFORM"), _("OBJECT_VERSION"), _("ALARM_CLASS"), _("ALARM_SUBJECT"), _("MAINTENANCE"), _("OBJECTS"), _("SUBSCRIBERS"), _("TT"), _("ESCALATION_TS"), _("LOCATION"), _("CONTAINER_ADDRESS"), ] + ["CONTAINER_%d" % i for i in range(self.CONTAINER_PATH_DEPTH)] + ["SEGMENT_%d" % i for i in range(self.SEGMENT_PATH_DEPTH)]) if columns: cmap = [] for c in columns.split(","): try: cmap += [cols.index(c)] except ValueError: continue else: cmap = list(range(len(cols))) subscribers_profile = self.default_subscribers_profile if subscribers: subscribers_profile = set( SubscriberProfile.objects.filter( id__in=subscribers.split(",")).scalar("id")) r = [translate_row(header_row, cmap)] fd = datetime.datetime.strptime( to_date, "%d.%m.%Y") + datetime.timedelta(days=1) match = { "timestamp": { "$gte": datetime.datetime.strptime(from_date, "%d.%m.%Y"), "$lte": fd } } match_duration = {"duration": {"$gte": min_duration}} if max_duration: match_duration = { "duration": { "$gte": min_duration, "$lte": max_duration } } mos = ManagedObject.objects.filter(is_managed=True) if segment: try: match["segment_path"] = bson.ObjectId(segment) except bson.errors.InvalidId: pass ads = [] if administrative_domain: if administrative_domain.isdigit(): administrative_domain = [int(administrative_domain)] ads = AdministrativeDomain.get_nested_ids( administrative_domain[0]) if not request.user.is_superuser: user_ads = UserAccess.get_domains(request.user) if administrative_domain and ads: if administrative_domain[0] not in user_ads: ads = list(set(ads) & set(user_ads)) if not ads: return HttpResponse( "<html><body>Permission denied: Invalid Administrative Domain</html></body>" ) else: ads = user_ads if ads: mos = mos.filter(administrative_domain__in=ads) if selector: selector = ManagedObjectSelector.get_by_id(int(selector)) mos = mos.filter(selector.Q) if ex_selector: ex_selector = ManagedObjectSelector.get_by_id(int(ex_selector)) mos = mos.exclude(ex_selector.Q) # Working if Administrative domain set if ads: try: match["adm_path"] = {"$in": ads} # @todo More 2 level hierarhy except bson.errors.InvalidId: pass mos_id = list(mos.order_by("id").values_list("id", flat=True)) mo_hostname = {} maintenance = [] if mos_id and (selector or ex_selector): match["managed_object"] = {"$in": mos_id} if "maintenance" in columns.split(","): maintenance = Maintenance.currently_affected() if "object_hostname" in columns.split(","): mo_hostname = ReportObjectsHostname1(sync_ids=mos_id) mo_hostname = mo_hostname.get_dictionary() moss = ReportAlarmObjects(mos_id).get_all() # container_lookup = ReportContainer(mos_id) container_lookup = None subject = "alarm_subject" in columns loc = AlarmApplication([]) if source in ["archive", "both"]: # Archived Alarms for a in (ArchivedAlarm._get_collection().with_options( read_preference=ReadPreference.SECONDARY_PREFERRED ).aggregate([ { "$match": match }, { "$addFields": { "duration": { "$divide": [ { "$subtract": ["$clear_timestamp", "$timestamp"] }, 1000, ] } } }, { "$match": match_duration }, # {"$sort": {"timestamp": 1}} ])): if int(a["managed_object"]) not in moss: continue dt = a["clear_timestamp"] - a["timestamp"] duration = int(dt.total_seconds()) total_objects = sum(ss["summary"] for ss in a["total_objects"]) if min_objects and total_objects < min_objects: continue total_subscribers = sum( ss["summary"] for ss in a["total_subscribers"] if subscribers_profile and ss["profile"] in subscribers_profile) if min_subscribers and total_subscribers < min_subscribers: continue if "segment_" in columns.split( ",") or "container_" in columns.split(","): path = ObjectPath.get_path(a["managed_object"]) if path: segment_path = [ NetworkSegment.get_by_id(s).name for s in path.segment_path if NetworkSegment.get_by_id(s) ] container_path = [ Object.get_by_id(s).name for s in path.container_path if Object.get_by_id(s) ] else: segment_path = [] container_path = [] else: segment_path = [] container_path = [] r += [ translate_row( row( [ smart_text(a["_id"]), smart_text(a["root"]) if a.get("root") else "", a["timestamp"], a["clear_timestamp"], smart_text(duration), moss[a["managed_object"]][0], moss[a["managed_object"]][1], smart_text( mo_hostname.get(a["managed_object"], "")), Profile.get_by_id( moss[a["managed_object"]][3]).name if moss[a["managed_object"]][5] else "", moss[a["managed_object"]][6], Platform.get_by_id( moss[a["managed_object"]][9]) if moss[a["managed_object"]][9] else "", smart_text( Firmware.get_by_id( moss[a["managed_object"]][10]).version) if moss[a["managed_object"]][10] else "", AlarmClass.get_by_id(a["alarm_class"]).name, ArchivedAlarm.objects.get( id=a["_id"]).subject if subject else "", "", total_objects, total_subscribers, a.get("escalation_tt"), a.get("escalation_ts"), ", ".join(ll for ll in ( loc.location(moss[a["managed_object"]][5] ) if moss[a["managed_object"]] [5] is not None else "") if ll), container_lookup[a["managed_object"]].get( "text", "") if container_lookup else "", ], container_path, segment_path, ), cmap, ) ] # Active Alarms if source in ["active", "both"]: datenow = datetime.datetime.now() for a in (ActiveAlarm._get_collection().with_options( read_preference=ReadPreference.SECONDARY_PREFERRED). aggregate([ { "$match": match }, { "$addFields": { "duration": { "$divide": [{ "$subtract": [fd, "$timestamp"] }, 1000] } } }, { "$match": match_duration }, # {"$sort": {"timestamp": 1}} ])): dt = datenow - a["timestamp"] duration = int(dt.total_seconds()) total_objects = sum(ss["summary"] for ss in a["total_objects"]) if min_objects and total_objects < min_objects: continue total_subscribers = sum( ss["summary"] for ss in a["total_subscribers"] if subscribers_profile and ss["profile"] in subscribers_profile) if min_subscribers and total_subscribers < min_subscribers: continue if "segment_" in columns.split( ",") or "container_" in columns.split(","): path = ObjectPath.get_path(a["managed_object"]) if path: segment_path = [ NetworkSegment.get_by_id(s).name for s in path.segment_path if NetworkSegment.get_by_id(s) ] container_path = [ Object.get_by_id(s).name for s in path.container_path if Object.get_by_id(s) ] else: segment_path = [] container_path = [] else: segment_path = [] container_path = [] r += [ translate_row( row( [ smart_text(a["_id"]), smart_text(a["root"]) if a.get("root") else "", a["timestamp"], # a["clear_timestamp"], "", smart_text(duration), moss[a["managed_object"]][0], moss[a["managed_object"]][1], smart_text( mo_hostname.get(a["managed_object"], "")), Profile.get_by_id(moss[a["managed_object"]][3]) if moss[a["managed_object"]][5] else "", moss[a["managed_object"]][6], Platform.get_by_id( moss[a["managed_object"]][9]) if moss[a["managed_object"]][9] else "", smart_text( Firmware.get_by_id( moss[a["managed_object"]][10]).version) if moss[a["managed_object"]][10] else "", AlarmClass.get_by_id(a["alarm_class"]).name, ActiveAlarm.objects.get( id=a["_id"]).subject if subject else None, "Yes" if a["managed_object"] in maintenance else "No", total_objects, total_subscribers, a.get("escalation_tt"), a.get("escalation_ts"), ", ".join(ll for ll in ( loc.location(moss[a["managed_object"]][5] ) if moss[a["managed_object"]] [5] is not None else "") if ll), container_lookup[a["managed_object"]].get( "text", "") if container_lookup else "", ], container_path, segment_path, ), cmap, ) ] if source in ["long_archive"]: o_format = "csv_zip" columns = [ "ALARM_ID", "MO_ID", "OBJECT_PROFILE", "VENDOR", "PLATFORM", "VERSION", "OPEN_TIMESTAMP", "CLOSE_TIMESTAMP", "LOCATION", "", "POOL", "ADM_DOMAIN", "MO_NAME", "IP", "ESCALATION_TT", "DURATION", "SEVERITY", "REBOOTS", ] from noc.core.clickhouse.connect import connection ch = connection() fd = datetime.datetime.strptime(from_date, "%d.%m.%Y") td = datetime.datetime.strptime( to_date, "%d.%m.%Y") + datetime.timedelta(days=1) if td - fd > datetime.timedelta(days=390): return HttpResponseBadRequest( _("Report more than 1 year not allowed. If nedeed - request it from Administrator" )) ac = AlarmClass.objects.get( name="NOC | Managed Object | Ping Failed") subs = ", ".join( "subscribers.summary[indexOf(subscribers.profile, '%s')] as `%s`" % (sp.bi_id, sp.name) for sp in SubscriberProfile.objects.filter().order_by("name")) if subs: columns += [ sp.name for sp in SubscriberProfile.objects.filter().order_by("name") ] r = ch.execute(LONG_ARCHIVE_QUERY % ( ", %s" % subs if subs else "", fd.date().isoformat(), td.date().isoformat(), ac.bi_id, )) filename = "alarms.csv" if o_format == "csv": response = HttpResponse(content_type="text/csv") response[ "Content-Disposition"] = 'attachment; filename="%s"' % filename writer = csv.writer(response) writer.writerows(r) return response elif o_format == "csv_zip": response = BytesIO() f = TextIOWrapper(TemporaryFile(mode="w+b"), encoding="utf-8") writer = csv.writer(f, dialect="excel", delimiter=";", quotechar='"') writer.writerow(columns) writer.writerows(r) f.seek(0) with ZipFile(response, "w", compression=ZIP_DEFLATED) as zf: zf.writestr(filename, f.read()) zf.filename = "%s.zip" % filename # response = HttpResponse(content_type="text/csv") response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/zip") response[ "Content-Disposition"] = 'attachment; filename="%s.zip"' % filename return response elif o_format == "xlsx": response = BytesIO() wb = xlsxwriter.Workbook(response) cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1}) ws = wb.add_worksheet("Alarms") max_column_data_length = {} for rn, x in enumerate(r): for cn, c in enumerate(x): if rn and (r[0][cn] not in max_column_data_length or len(str(c)) > max_column_data_length[r[0][cn]]): max_column_data_length[r[0][cn]] = len(str(c)) ws.write(rn, cn, c, cf1) ws.autofilter(0, 0, rn, cn) ws.freeze_panes(1, 0) for cn, c in enumerate(r[0]): # Set column width width = get_column_width(c) if enable_autowidth and width < max_column_data_length[c]: width = max_column_data_length[c] ws.set_column(cn, cn, width=width) wb.close() response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/vnd.ms-excel") response[ "Content-Disposition"] = 'attachment; filename="alarms.xlsx"' response.close() return response