def get_data(self, **kwargs): c = EventClass.objects.filter(name="Unknown | SNMP Trap").first() pipeline = [{ "$match": { "event_class": c.id } }, { "$project": { "vars": 1 } }, { "$group": { "_id": "$vars.trap_oid", "count": { "$sum": 1 } } }] oids = ActiveEvent._get_collection().aggregate(pipeline) data = [(e["_id"], MIB.get_name(e["_id"]), e["count"]) for e in oids] data = sorted(data, key=lambda x: -x[2]) return self.from_dataset(title=self.title, columns=[ "OID", "Name", TableColumn("Count", format="integer", align="right", total="sum") ], data=data)
def get_data(self, **kwargs): c = EventClass.objects.filter(name="Unknown | SNMP Trap").first() # Переделать на agregate Функция считает число OID'ов в переменных аварий # и проверяет их на опознанность pipeline = [{ "$match": { "event_class": c.id } }, { "$project": { "vars": 1 } }, { "$group": { "_id": "$vars.trap_oid", "count": { "$sum": 1 } } }] oids = ActiveEvent._get_collection().aggregate(pipeline) d = [(e["_id"], MIB.get_name(e["_id"]), e["count"]) for e in oids] print d data = [(o, n, c) for o, n, c in d if self.rx_unclassified.search(n)] return self.from_dataset(title=self.title, columns=[ "OID", "Name", TableColumn("Count", format="integer", align="right", total="sum") ], data=data)
def handle_clean(self, options, events): before = options.get("before") if before: datetime.datetime.strptime(before, "%Y-%m-%d") else: self.print("Before is not set, use default") before = datetime.datetime.now() - DEFAULT_CLEAN force = options.get("force") aa = ActiveAlarm._get_collection() ah = ArchivedAlarm._get_collection() ae = ActiveEvent._get_collection() event_ts = ae.find_one({"timestamp": {"$lte": before}}, limit=1, sort=[("timestamp", 1)]) event_ts = event_ts["timestamp"] print("[%s] Cleaned before %s ... \n" % ("events", before), end="") bulk = [] window = CLEAN_WINDOW while event_ts < before: refer_event_ids = [] for e in [aa, ah]: for ee in e.find( {"timestamp": {"$gte": event_ts, "$lte": event_ts + CLEAN_WINDOW}}, {"opening_event": 1, "closing_event": 1}, ): if "opening_event" in ee: refer_event_ids += [ee["opening_event"]] if "closing_event" in ee: refer_event_ids += [ee["closing_event"]] try: clear_qs = { "timestamp": {"$gte": event_ts, "$lte": event_ts + CLEAN_WINDOW}, "_id": {"$nin": refer_event_ids}, } self.print( "Interval: %s, %s; Count: %d" % (event_ts, event_ts + CLEAN_WINDOW, ae.count(clear_qs)) ) bulk += [DeleteMany(clear_qs)] event_ts += window if window != CLEAN_WINDOW: window = CLEAN_WINDOW except DocumentTooLarge: window = window // 2 if window < datetime.timedelta(hours=1): self.die("Too many events for delete in interval %s" % window) event_ts -= window if force: self.print("All data before %s from active events will be Remove..\n" % before) for i in reversed(range(1, 10)): self.print("%d\n" % i) time.sleep(1) ae.bulk_write(bulk)