Ejemplo n.º 1
0
def get_segment(name):
    ns = NetworkSegment(name=name,
                        parent=get_root_segment(),
                        profile=get_segment_profile())
    ns.save()
    return ns
Ejemplo n.º 2
0
    def api_report(
        self,
        request,
        from_date,
        to_date,
        o_format,
        min_duration=0,
        max_duration=0,
        min_objects=0,
        min_subscribers=0,
        segment=None,
        administrative_domain=None,
        selector=None,
        ex_selector=None,
        columns=None,
        source="both",
        alarm_class=None,
        subscribers=None,
        enable_autowidth=False,
    ):
        def row(row, container_path, segment_path):
            def qe(v):
                if v is None:
                    return ""
                if isinstance(v, unicode):
                    return v.encode("utf-8")
                elif isinstance(v, datetime.datetime):
                    return v.strftime("%Y-%m-%d %H:%M:%S")
                elif not isinstance(v, str):
                    return str(v)
                else:
                    return v

            r = [qe(x) for x in row]
            if len(container_path) < self.CONTAINER_PATH_DEPTH:
                container_path += [""] * (self.CONTAINER_PATH_DEPTH -
                                          len(container_path))
            else:
                container_path = container_path[:self.CONTAINER_PATH_DEPTH]
            if len(segment_path) < self.SEGMENT_PATH_DEPTH:
                segment_path += [""] * (self.SEGMENT_PATH_DEPTH -
                                        len(segment_path))
            else:
                segment_path = segment_path[:self.SEGMENT_PATH_DEPTH]
            return r + container_path + segment_path

        def translate_row(row, cmap):
            return [row[i] for i in cmap]

        cols = ([
            "id",
            "root_id",
            "from_ts",
            "to_ts",
            "duration_sec",
            "object_name",
            "object_address",
            "object_hostname",
            "object_profile",
            "object_admdomain",
            "object_platform",
            "object_version",
            "alarm_class",
            "alarm_subject",
            "maintenance",
            "objects",
            "subscribers",
            "tt",
            "escalation_ts",
            "location",
            "container_address",
        ] + ["container_%d" % i for i in range(self.CONTAINER_PATH_DEPTH)] +
                ["segment_%d" % i for i in range(self.SEGMENT_PATH_DEPTH)])

        header_row = (
            [
                "ID",
                _("ROOT_ID"),
                _("FROM_TS"),
                _("TO_TS"),
                _("DURATION_SEC"),
                _("OBJECT_NAME"),
                _("OBJECT_ADDRESS"),
                _("OBJECT_HOSTNAME"),
                _("OBJECT_PROFILE"),
                _("OBJECT_ADMDOMAIN"),
                _("OBJECT_PLATFORM"),
                _("OBJECT_VERSION"),
                _("ALARM_CLASS"),
                _("ALARM_SUBJECT"),
                _("MAINTENANCE"),
                _("OBJECTS"),
                _("SUBSCRIBERS"),
                _("TT"),
                _("ESCALATION_TS"),
                _("LOCATION"),
                _("CONTAINER_ADDRESS"),
            ] + ["CONTAINER_%d" % i
                 for i in range(self.CONTAINER_PATH_DEPTH)] +
            ["SEGMENT_%d" % i for i in range(self.SEGMENT_PATH_DEPTH)])

        if columns:
            cmap = []
            for c in columns.split(","):
                try:
                    cmap += [cols.index(c)]
                except ValueError:
                    continue
        else:
            cmap = list(range(len(cols)))
        subscribers_profile = self.default_subscribers_profile
        if subscribers:
            subscribers_profile = set(
                SubscriberProfile.objects.filter(
                    id__in=subscribers.split(",")).scalar("id"))
        r = [translate_row(header_row, cmap)]
        fd = datetime.datetime.strptime(
            to_date, "%d.%m.%Y") + datetime.timedelta(days=1)
        match = {
            "timestamp": {
                "$gte": datetime.datetime.strptime(from_date, "%d.%m.%Y"),
                "$lte": fd
            }
        }

        match_duration = {"duration": {"$gte": min_duration}}
        if max_duration:
            match_duration = {
                "duration": {
                    "$gte": min_duration,
                    "$lte": max_duration
                }
            }
        mos = ManagedObject.objects.filter(is_managed=True)

        if segment:
            try:
                match["segment_path"] = bson.ObjectId(segment)
            except bson.errors.InvalidId:
                pass

        ads = []
        if administrative_domain:
            if administrative_domain.isdigit():
                administrative_domain = [int(administrative_domain)]
                ads = AdministrativeDomain.get_nested_ids(
                    administrative_domain[0])

        if not request.user.is_superuser:
            user_ads = UserAccess.get_domains(request.user)
            if administrative_domain and ads:
                if administrative_domain[0] not in user_ads:
                    ads = list(set(ads) & set(user_ads))
                else:
                    ads = administrative_domain
            else:
                ads = user_ads
        if ads:
            mos = mos.filter(administrative_domain__in=ads)
        if selector:
            selector = ManagedObjectSelector.get_by_id(int(selector))
            mos = mos.filter(selector.Q)
        if ex_selector:
            ex_selector = ManagedObjectSelector.get_by_id(int(ex_selector))
            mos = mos.exclude(ex_selector.Q)

        # Working if Administrative domain set
        if ads:
            try:
                match["adm_path"] = {"$in": ads}
                # @todo More 2 level hierarhy
            except bson.errors.InvalidId:
                pass

        mos_id = list(mos.order_by("id").values_list("id", flat=True))
        mo_hostname = {}
        maintenance = []
        if mos_id and (selector or ex_selector):
            match["managed_object"] = {"$in": mos_id}
        if "maintenance" in columns.split(","):
            maintenance = Maintenance.currently_affected()
        if "object_hostname" in columns.split(","):
            mo_hostname = ReportObjectsHostname1(sync_ids=mos_id)
            mo_hostname = mo_hostname.get_dictionary()
        moss = ReportAlarmObjects(mos_id).get_all()
        # container_lookup = ReportContainer(mos_id)
        container_lookup = None
        subject = "alarm_subject" in columns
        loc = AlarmApplication([])
        if source in ["archive", "both"]:
            # Archived Alarms
            for a in (ArchivedAlarm._get_collection().with_options(
                    read_preference=ReadPreference.SECONDARY_PREFERRED
            ).aggregate([
                {
                    "$match": match
                },
                {
                    "$addFields": {
                        "duration": {
                            "$divide": [
                                {
                                    "$subtract":
                                    ["$clear_timestamp", "$timestamp"]
                                },
                                1000,
                            ]
                        }
                    }
                },
                {
                    "$match": match_duration
                },
                    # {"$sort": {"timestamp": 1}}
            ])):
                if int(a["managed_object"]) not in moss:
                    continue
                dt = a["clear_timestamp"] - a["timestamp"]
                duration = int(dt.total_seconds())
                total_objects = sum(ss["summary"] for ss in a["total_objects"])
                if min_objects and total_objects < min_objects:
                    continue
                total_subscribers = sum(
                    ss["summary"] for ss in a["total_subscribers"]
                    if subscribers_profile
                    and ss["profile"] in subscribers_profile)
                if min_subscribers and total_subscribers < min_subscribers:
                    continue
                if "segment_" in columns.split(
                        ",") or "container_" in columns.split(","):
                    path = ObjectPath.get_path(a["managed_object"])
                    if path:
                        segment_path = [
                            NetworkSegment.get_by_id(s).name
                            for s in path.segment_path
                            if NetworkSegment.get_by_id(s)
                        ]
                        container_path = [
                            Object.get_by_id(s).name
                            for s in path.container_path if Object.get_by_id(s)
                        ]
                    else:
                        segment_path = []
                        container_path = []
                else:
                    segment_path = []
                    container_path = []
                r += [
                    translate_row(
                        row(
                            [
                                str(a["_id"]),
                                str(a["root"]) if a.get("root") else "",
                                a["timestamp"],
                                a["clear_timestamp"],
                                str(duration),
                                moss[a["managed_object"]][0],
                                moss[a["managed_object"]][1],
                                mo_hostname.get(a["managed_object"], ""),
                                Profile.get_by_id(
                                    moss[a["managed_object"]][3]).name
                                if moss[a["managed_object"]][5] else "",
                                moss[a["managed_object"]][6],
                                Platform.get_by_id(
                                    moss[a["managed_object"]][9])
                                if moss[a["managed_object"]][9] else "",
                                Firmware.get_by_id(
                                    moss[a["managed_object"]][10])
                                if moss[a["managed_object"]][10] else "",
                                AlarmClass.get_by_id(a["alarm_class"]).name,
                                ArchivedAlarm.objects.get(
                                    id=a["_id"]).subject if subject else "",
                                "",
                                total_objects,
                                total_subscribers,
                                a.get("escalation_tt"),
                                a.get("escalation_ts"),
                                ", ".join(l for l in (
                                    loc.location(moss[a["managed_object"]][5]
                                                 ) if moss[a["managed_object"]]
                                    [5] is not None else "") if l),
                                container_lookup[a["managed_object"]].get(
                                    "text", "") if container_lookup else "",
                            ],
                            container_path,
                            segment_path,
                        ),
                        cmap,
                    )
                ]
        # Active Alarms
        if source in ["active", "both"]:
            for a in (ActiveAlarm._get_collection().with_options(
                    read_preference=ReadPreference.SECONDARY_PREFERRED).
                      aggregate([
                          {
                              "$match": match
                          },
                          {
                              "$addFields": {
                                  "duration": {
                                      "$divide": [{
                                          "$subtract": [fd, "$timestamp"]
                                      }, 1000]
                                  }
                              }
                          },
                          {
                              "$match": match_duration
                          },
                          # {"$sort": {"timestamp": 1}}
                      ])):
                dt = fd - a["timestamp"]
                duration = int(dt.total_seconds())
                total_objects = sum(ss["summary"] for ss in a["total_objects"])
                if min_objects and total_objects < min_objects:
                    continue
                total_subscribers = sum(
                    ss["summary"] for ss in a["total_subscribers"]
                    if subscribers_profile
                    and ss["profile"] in subscribers_profile)
                if min_subscribers and total_subscribers < min_subscribers:
                    continue
                if "segment_" in columns.split(
                        ",") or "container_" in columns.split(","):
                    path = ObjectPath.get_path(a["managed_object"])
                    if path:
                        segment_path = [
                            NetworkSegment.get_by_id(s).name
                            for s in path.segment_path
                            if NetworkSegment.get_by_id(s)
                        ]
                        container_path = [
                            Object.get_by_id(s).name
                            for s in path.container_path if Object.get_by_id(s)
                        ]
                    else:
                        segment_path = []
                        container_path = []
                else:
                    segment_path = []
                    container_path = []
                r += [
                    translate_row(
                        row(
                            [
                                str(a["_id"]),
                                str(a["root"]) if a.get("root") else "",
                                a["timestamp"],
                                # a["clear_timestamp"],
                                "",
                                str(duration),
                                moss[a["managed_object"]][0],
                                moss[a["managed_object"]][1],
                                mo_hostname.get(a["managed_object"], ""),
                                Profile.get_by_id(moss[a["managed_object"]][3])
                                if moss[a["managed_object"]][5] else "",
                                moss[a["managed_object"]][6],
                                Platform.get_by_id(
                                    moss[a["managed_object"]][9])
                                if moss[a["managed_object"]][9] else "",
                                Firmware.get_by_id(
                                    moss[a["managed_object"]][10])
                                if moss[a["managed_object"]][10] else "",
                                AlarmClass.get_by_id(a["alarm_class"]).name,
                                ActiveAlarm.objects.get(
                                    id=a["_id"]).subject if subject else None,
                                "Yes" if a["managed_object"] in maintenance
                                else "No",
                                total_objects,
                                total_subscribers,
                                a.get("escalation_tt"),
                                a.get("escalation_ts"),
                                ", ".join(l for l in (
                                    loc.location(moss[a["managed_object"]][5]
                                                 ) if moss[a["managed_object"]]
                                    [5] is not None else "") if l),
                                container_lookup[a["managed_object"]].get(
                                    "text", "") if container_lookup else "",
                            ],
                            container_path,
                            segment_path,
                        ),
                        cmap,
                    )
                ]

        if o_format == "csv":
            response = HttpResponse(content_type="text/csv")
            response[
                "Content-Disposition"] = 'attachment; filename="alarms.csv"'
            writer = csv.writer(response)
            writer.writerows(r)
            return response
        elif o_format == "xlsx":
            response = StringIO()
            wb = xlsxwriter.Workbook(response)
            cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1})
            ws = wb.add_worksheet("Alarms")
            max_column_data_length = {}
            for rn, x in enumerate(r):
                for cn, c in enumerate(x):
                    if rn and (r[0][cn] not in max_column_data_length or
                               len(str(c)) > max_column_data_length[r[0][cn]]):
                        max_column_data_length[r[0][cn]] = len(str(c))
                    ws.write(rn, cn, c, cf1)
            ws.autofilter(0, 0, rn, cn)
            ws.freeze_panes(1, 0)
            for cn, c in enumerate(r[0]):
                # Set column width
                width = get_column_width(c)
                if enable_autowidth and width < max_column_data_length[c]:
                    width = max_column_data_length[c]
                ws.set_column(cn, cn, width=width)
            wb.close()
            response.seek(0)
            response = HttpResponse(response.getvalue(),
                                    content_type="application/vnd.ms-excel")
            response[
                "Content-Disposition"] = 'attachment; filename="alarms.xlsx"'
            response.close()
            return response
Ejemplo n.º 3
0
def test_base_trial():
    attacker = NetworkSegment(name="attacker")
    target = NetworkSegment(name="target")
    policy = BaseBioSegPolicy(attacker, target)
    with pytest.raises(NotImplementedError):
        policy.trial()
Ejemplo n.º 4
0
def get_root_segment():
    ns = NetworkSegment(name="Discovery Tests", profile=get_segment_profile())
    ns.save()
    return ns
Ejemplo n.º 5
0
 def api_alarm(self, request, id):
     alarm = get_alarm(id)
     if not alarm:
         self.response_not_found()
     user = request.user
     d = self.instance_to_dict(alarm)
     d["body"] = alarm.body
     d["symptoms"] = alarm.alarm_class.symptoms
     d["probable_causes"] = alarm.alarm_class.probable_causes
     d["recommended_actions"] = alarm.alarm_class.recommended_actions
     d["vars"] = sorted(alarm.vars.items())
     d["status"] = alarm.status
     d["status__label"] = {"A": "Active", "C": "Cleared"}[alarm.status]
     # Managed object properties
     mo = alarm.managed_object
     d["managed_object_address"] = mo.address
     d["managed_object_profile"] = mo.profile.name
     d["managed_object_platform"] = mo.platform.name if mo.platform else ""
     d["managed_object_version"] = mo.version.version if mo.version else ""
     d["segment"] = mo.segment.name
     d["segment_id"] = str(mo.segment.id)
     d["segment_path"] = " | ".join(
         NetworkSegment.get_by_id(p).name
         for p in NetworkSegment.get_path(mo.segment))
     if mo.container:
         cp = []
         c = mo.container.id
         while c:
             try:
                 o = Object.objects.get(id=c)
                 if o.container:
                     cp.insert(0, o.name)
                 c = o.container.id if o.container else None
             except DoesNotExist:
                 break
         d["container_path"] = " | ".join(cp)
         if not self.location(mo.container.id)[0]:
             d["address_path"] = None
         else:
             d["address_path"] = ", ".join(self.location(mo.container.id))
     d["tags"] = mo.labels
     # Log
     if alarm.log:
         d["log"] = [{
             "timestamp": self.to_json(ll.timestamp),
             "from_status": ll.from_status,
             "to_status": ll.to_status,
             "source": getattr(ll, "source", ""),
             "message": ll.message,
         } for ll in alarm.log]
     # Events
     events = []
     for ec in ActiveEvent, ArchivedEvent:
         for e in ec.objects.filter(alarms=alarm.id):
             events += [{
                 "id": str(e.id),
                 "event_class": str(e.event_class.id),
                 "event_class__label": e.event_class.name,
                 "timestamp": self.to_json(e.timestamp),
                 "status": e.status,
                 "managed_object": e.managed_object.id,
                 "managed_object__label": e.managed_object.name,
                 "subject": e.subject,
             }]
     if events:
         d["events"] = events
     # Alarms
     children = self.get_nested_alarms(alarm)
     if children:
         d["alarms"] = {"expanded": True, "children": children}
     # Subscribers
     if alarm.status == "A":
         d["subscribers"] = self.get_alarm_subscribers(alarm)
         d["is_subscribed"] = user in alarm.subscribers
     # Apply plugins
     plugins = []
     acp = alarm.alarm_class.plugins or []
     acp += [self.diagnostic_plugin]
     for p in acp:
         if p.name in self.plugins:
             plugin = self.plugins[p.name]
             dd = plugin.get_data(alarm, p.config)
             if "plugins" in dd:
                 plugins += dd["plugins"]
                 del dd["plugins"]
             d.update(dd)
     if plugins:
         d["plugins"] = plugins
     return d
Ejemplo n.º 6
0
def test_keep_trial():
    attacker = NetworkSegment(name="attacker")
    target = NetworkSegment(name="target")
    policy = KeepBioSegPolicy(attacker, target)
    assert policy.trial() == "keep"
Ejemplo n.º 7
0
 def clean(self):
     super(VLAN, self).clean()
     self.segment = NetworkSegment.get_border_segment(self.segment)
     if self.translation_rule and not self.parent:
         self.translation_rule = None
Ejemplo n.º 8
0
Archivo: views.py Proyecto: 0pt1on/noc
    def api_report(
        self,
        request,
        o_format,
        is_managed=None,
        administrative_domain=None,
        selector=None,
        pool=None,
        segment=None,
        avail_status=False,
        columns=None,
        ids=None,
        enable_autowidth=False,
    ):
        def row(row):
            def qe(v):
                if v is None:
                    return ""
                if isinstance(v, unicode):
                    return v.encode("utf-8")
                elif isinstance(v, datetime.datetime):
                    return v.strftime("%Y-%m-%d %H:%M:%S")
                elif not isinstance(v, str):
                    return str(v)
                else:
                    return v

            return [qe(x) for x in row]

        def translate_row(row, cmap):
            return [row[i] for i in cmap]

        type_columns = ["Up/10G", "Up/1G", "Up/100M", "Down/-", "-"]

        cols = [
            "object1_admin_domain",
            # "id",
            "object1_name",
            "object1_address",
            "object1_platform",
            "object1_segment",
            "object1_tags",
            "object1_iface",
            "object1_descr",
            "object1_speed",
            "object2_admin_domain",
            "object2_name",
            "object2_address",
            "object2_platform",
            "object2_segment",
            "object2_tags",
            "object2_iface",
            "object2_descr",
            "object2_speed",
            "link_proto",
            "last_seen",
        ]

        header_row = [
            "OBJECT1_ADMIN_DOMAIN",
            "OBJECT1_NAME",
            "OBJECT1_ADDRESS",
            "OBJECT1_PLATFORM",
            "OBJECT1_SEGMENT",
            "OBJECT1_TAGS",
            "OBJECT1_IFACE",
            "OBJECT1_DESCR",
            "OBJECT1_SPEED",
            "OBJECT2_ADMIN_DOMAIN",
            "OBJECT2_NAME",
            "OBJECT2_ADDRESS",
            "OBJECT2_PLATFORM",
            "OBJECT2_SEGMENT",
            "OBJECT2_TAGS",
            "OBJECT2_IFACE",
            "OBJECT2_DESCR",
            "OBJECT2_SPEED",
            "LINK_PROTO",
            "LAST_SEEN",
        ]

        if columns:
            cmap = []
            for c in columns.split(","):
                try:
                    cmap += [cols.index(c)]
                except ValueError:
                    continue
        else:
            cmap = list(range(len(cols)))
        r = [translate_row(header_row, cmap)]
        if "interface_type_count" in columns.split(","):
            r[-1].extend(type_columns)
        # self.logger.info(r)
        # self.logger.info("---------------------------------")
        # print("-----------%s------------%s" % (administrative_domain, columns))

        p = Pool.get_by_name(pool or "default")
        mos = ManagedObject.objects.filter()
        if request.user.is_superuser and not administrative_domain and not selector and not segment:
            mos = ManagedObject.objects.filter(pool=p)
        if ids:
            mos = ManagedObject.objects.filter(id__in=[ids])
        if is_managed is not None:
            mos = ManagedObject.objects.filter(is_managed=is_managed)
        if pool:
            mos = mos.filter(pool=p)
        if not request.user.is_superuser:
            mos = mos.filter(
                administrative_domain__in=UserAccess.get_domains(request.user))
        if administrative_domain:
            ads = AdministrativeDomain.get_nested_ids(
                int(administrative_domain))
            mos = mos.filter(administrative_domain__in=ads)
        if selector:
            selector = ManagedObjectSelector.get_by_id(int(selector))
            mos = mos.filter(selector.Q)
        if segment:
            segment = NetworkSegment.objects.filter(id=segment).first()
            if segment:
                mos = mos.filter(segment__in=segment.get_nested_ids())
        mos_id = list(mos.values_list("id", flat=True))

        rld = ReportLinksDetail(mos_id)
        mo_resolv = dict((mo[0], mo[1:])
                         for mo in ManagedObject.objects.filter().values_list(
                             "id",
                             "administrative_domain__name",
                             "name",
                             "address",
                             "segment",
                             "platform",
                             "tags",
                         ))

        for link in rld.out:
            if len(rld.out[link]) != 2:
                # Multilink or bad link
                continue
            s1, s2 = rld.out[link]
            seg1, seg2 = None, None
            if "object1_segment" in columns.split(
                    ",") or "object2_segment" in columns.split(","):
                seg1, seg2 = mo_resolv[s1["mo"][0]][3], mo_resolv[s2["mo"]
                                                                  [0]][3]
            plat1, plat2 = None, None
            if "object1_platform" in columns.split(
                    ",") or "object2_platform" in columns.split(","):
                plat1, plat2 = mo_resolv[s1["mo"][0]][4], mo_resolv[s2["mo"]
                                                                    [0]][4]
            r += [
                translate_row(
                    row([
                        mo_resolv[s1["mo"][0]][0],
                        mo_resolv[s1["mo"][0]][1],
                        mo_resolv[s1["mo"][0]][2],
                        "" if not plat1 else Platform.get_by_id(plat1),
                        "" if not seg1 else NetworkSegment.get_by_id(seg1),
                        ";".join(mo_resolv[s1["mo"][0]][5]),
                        s1["iface_n"][0],
                        s1.get("iface_descr")[0]
                        if s1.get("iface_descr") else "",
                        s1.get("iface_speed")[0]
                        if s1.get("iface_speed") else 0,
                        mo_resolv[s2["mo"][0]][0],
                        mo_resolv[s2["mo"][0]][1],
                        mo_resolv[s2["mo"][0]][2],
                        "" if not plat2 else Platform.get_by_id(plat2),
                        "" if not seg2 else NetworkSegment.get_by_id(seg2),
                        ";".join(mo_resolv[s2["mo"][0]][5]),
                        s2["iface_n"][0],
                        s2.get("iface_descr")[0]
                        if s2.get("iface_descr") else "",
                        s2.get("iface_speed")[0]
                        if s2.get("iface_speed") else 0,
                        s2.get("dis_method", ""),
                        s2.get("last_seen", ""),
                    ]),
                    cmap,
                )
            ]
        filename = "links_detail_report_%s" % datetime.datetime.now().strftime(
            "%Y%m%d")
        if o_format == "csv":
            response = HttpResponse(content_type="text/csv")
            response[
                "Content-Disposition"] = 'attachment; filename="%s.csv"' % filename
            writer = csv.writer(response,
                                dialect="excel",
                                delimiter=",",
                                quoting=csv.QUOTE_MINIMAL)
            writer.writerows(r)
            return response
        elif o_format == "xlsx":
            response = StringIO()
            wb = xlsxwriter.Workbook(response)
            cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1})
            ws = wb.add_worksheet("Objects")
            max_column_data_length = {}
            for rn, x in enumerate(r):
                for cn, c in enumerate(x):
                    if rn and (r[0][cn] not in max_column_data_length or
                               len(str(c)) > max_column_data_length[r[0][cn]]):
                        max_column_data_length[r[0][cn]] = len(str(c))
                    ws.write(rn, cn, c, cf1)
            # for
            ws.autofilter(0, 0, rn, cn)
            ws.freeze_panes(1, 0)
            for cn, c in enumerate(r[0]):
                # Set column width
                width = get_column_width(c)
                if enable_autowidth and width < max_column_data_length[c]:
                    width = max_column_data_length[c]
                ws.set_column(cn, cn, width=width)
            wb.close()
            response.seek(0)
            response = HttpResponse(response.getvalue(),
                                    content_type="application/vnd.ms-excel")
            # response = HttpResponse(
            #     content_type="application/x-ms-excel")
            response[
                "Content-Disposition"] = 'attachment; filename="%s.xlsx"' % filename
            response.close()
            return response
Ejemplo n.º 9
0
    def _refresh_object(cls, managed_object):
        from noc.sa.models.managedobject import ManagedObject
        from noc.inv.models.networksegment import NetworkSegment

        def to_dict(v):
            return {r["profile"]: r["summary"] for r in v}

        def to_list(v):
            return [{"profile": k, "summary": v[k]} for k in sorted(v)]

        if hasattr(managed_object, "id"):
            managed_object = managed_object.id
        coll = ServiceSummary._get_collection()
        bulk = []
        # Get existing summary
        old_summary = {
            x["interface"]: x
            for x in coll.find(
                {"managed_object": managed_object},
                {
                    "_id": 1,
                    "interface": 1,
                    "service": 1,
                    "subscriber": 1
                },
                comment=
                "[servicesummary._refresh_object] Refresh summary of services for managed object",
            ) if "interface" in x
        }
        # Get actual summary
        new_summary = ServiceSummary.build_summary_for_object(managed_object)
        # Merge summaries
        for iface in old_summary:
            if iface not in new_summary:
                # Stale, delete
                bulk += [DeleteOne({"_id": old_summary[iface]["_id"]})]
                continue
            oi = old_summary[iface]
            old_services = to_dict(oi["service"])
            old_subs = to_dict(oi["subscriber"])
            ni = new_summary[iface]
            if old_services != ni["service"] or old_subs != ni["subscriber"]:
                # Changed, update
                bulk += [
                    UpdateOne(
                        {"_id": oi["_id"]},
                        {
                            "$set": {
                                "service": to_list(ni["service"]),
                                "subscriber": to_list(ni["subscriber"]),
                            }
                        },
                    )
                ]
            # Mark as processed
            del new_summary[iface]
        # Process new items
        bulk += [
            InsertOne({
                "managed_object": managed_object,
                "interface": iface,
                "service": to_list(new_summary[iface]["service"]),
                "subscriber": to_list(new_summary[iface]["subscriber"]),
            }) for iface in new_summary
        ]
        if bulk:
            logger.info("Committing changes to database")
            try:
                r = coll.bulk_write(bulk, ordered=False)
                logger.info("Database has been synced")
                logger.info("Modify: %d, Deleted: %d", r.modified_count,
                            r.deleted_count)
            except BulkWriteError as e:
                logger.error("Bulk write error: '%s'", e.details)
                logger.error("Stopping check")
        mo = ManagedObject.get_by_id(managed_object)
        NetworkSegment.update_summary(mo.segment)
Ejemplo n.º 10
0
    def get_data(self, request, pool=None, obj_profile=None, **kwargs):
        problems = {}  # id -> problem

        mos = ManagedObject.objects.filter(is_managed=True, pool=pool)
        if not request.user.is_superuser:
            mos = mos.filter(administrative_domain__in=UserAccess.get_domains(request.user))
        if obj_profile:
            # Get all managed objects
            mos = mos.filter(object_profile=obj_profile)
        mos = {
            mo[0]: (mo[1], mo[2], Profile.get_by_id(mo[3]), mo[4], mo[5])
            for mo in mos.values_list("id", "name", "address", "profile", "platform", "segment")
        }
        mos_set = set(mos)
        # Get all managed objects with generic profile
        for mo in mos:
            if mos[mo][2] == GENERIC_PROFILE:
                problems[mo] = _("Profile check failed")
        # Get all managed objects without interfaces
        if_mo = dict(
            (x["_id"], x.get("managed_object"))
            for x in Interface._get_collection().find({}, {"_id": 1, "managed_object": 1})
        )
        for mo in mos_set - set(problems) - set(six.itervalues(if_mo)):
            problems[mo] = _("No interfaces")
        # Get all managed objects without links
        linked_mos = set()
        for d in Link._get_collection().find({}):
            for i in d["interfaces"]:
                linked_mos.add(if_mo.get(i))
        for mo in mos_set - set(problems) - linked_mos:
            problems[mo] = _("No links")
        # Get all managed objects without uplinks
        uplinks = {}
        for d in ObjectData._get_collection().find():
            nu = len(d.get("uplinks", []))
            if nu:
                uplinks[d["_id"]] = nu
        for mo in mos_set - set(problems) - set(uplinks):
            problems[mo] = _("No uplinks")
        #
        data = []
        for mo_id in problems:
            if mo_id not in mos:
                continue
            name, address, profile, platform, segment = mos[mo_id]
            data += [
                [
                    name,
                    address,
                    profile.name,
                    Platform.get_by_id(platform).name if platform else "",
                    NetworkSegment.get_by_id(segment).name if segment else "",
                    problems[mo_id],
                ]
            ]
        data = sorted(data)
        return self.from_dataset(
            title=self.title,
            columns=["Name", "Address", "Profile", "Platform", "Segment", "Problem"],
            data=data,
            enumerate=True,
        )
Ejemplo n.º 11
0
    def api_report(self,
                   request,
                   reporttype=None,
                   from_date=None,
                   to_date=None,
                   object_profile=None,
                   filter_default=None,
                   exclude_zero=None,
                   interface_profile=None,
                   selector=None,
                   administrative_domain=None,
                   columns=None,
                   o_format=None,
                   enable_autowidth=False,
                   **kwargs):
        def translate_row(row, cmap):
            return [row[i] for i in cmap]

        map_table = {
            "load_interfaces": r"/Interface\s\|\sLoad\s\|\s[In|Out]/",
            "load_cpu": r"/[CPU|Memory]\s\|\sUsage/",
            "errors": r"/Interface\s\|\s[Errors|Discards]\s\|\s[In|Out]/",
            "ping": r"/Ping\s\|\sRTT/",
        }
        cols = [
            "id",
            "object_name",
            "object_address",
            "object_platform",
            "object_adm_domain",
            "object_segment",
            "object_container",
            # "object_hostname",
            # "object_status",
            # "profile_name",
            # "object_profile",
            # "object_vendor",
            "iface_name",
            "iface_description",
            "iface_speed",
            "load_in",
            "load_in_p",
            "load_out",
            "load_out_p",
            "errors_in",
            "errors_out",
            "slot",
            "cpu_usage",
            "memory_usage",
            "ping_rtt",
            "ping_attempts",
            "interface_flap",
            "interface_load_url",
        ]

        header_row = [
            "ID",
            "OBJECT_NAME",
            "OBJECT_ADDRESS",
            "OBJECT_PLATFORM",
            "OBJECT_ADM_DOMAIN",
            "OBJECT_SEGMENT",
            "OBJECT_CONTAINER",
            "IFACE_NAME",
            "IFACE_DESCRIPTION",
            "IFACE_SPEED",
            "LOAD_IN",
            "LOAD_IN_P",
            "LOAD_OUT",
            "LOAD_OUT_P",
            "ERRORS_IN",
            "ERRORS_OUT",
            "CPU_USAGE",
            "MEMORY_USAGE",
            "PING_RTT",
            "PING_ATTEMPTS",
            "INTERFACE_FLAP",
            "INTERFACE_LOAD_URL",
        ]

        if columns:
            cmap = []
            for c in columns.split(","):
                try:
                    cmap += [cols.index(c)]
                except ValueError:
                    continue
        else:
            cmap = list(range(len(cols)))
        columns_order = columns.split(",")
        columns_filter = set(columns_order)
        r = [translate_row(header_row, cmap)]
        object_columns = [c for c in columns_order if c.startswith("object")]

        # Date Time Block
        if not from_date:
            from_date = datetime.datetime.now() - datetime.timedelta(days=1)
        else:
            from_date = datetime.datetime.strptime(from_date, "%d.%m.%Y")
        if not to_date or from_date == to_date:
            to_date = from_date + datetime.timedelta(days=1)
        else:
            to_date = datetime.datetime.strptime(
                to_date, "%d.%m.%Y") + datetime.timedelta(days=1)
        # interval = (to_date - from_date).days
        ts_from_date = time.mktime(from_date.timetuple())
        ts_to_date = time.mktime(to_date.timetuple())

        # Load managed objects
        mos = ManagedObject.objects.filter(is_managed=True)
        if not request.user.is_superuser:
            mos = mos.filter(
                administrative_domain__in=UserAccess.get_domains(request.user))
        if selector:
            mos = mos.filter(
                ManagedObjectSelector.objects.get(id=int(selector)).Q)
        if administrative_domain:
            mos = mos.filter(
                administrative_domain__in=AdministrativeDomain.get_nested_ids(
                    int(administrative_domain)))
        if object_profile:
            mos = mos.filter(object_profile=object_profile)
        # iface_dict = {}

        d_url = {
            "path": "/ui/grafana/dashboard/script/report.js",
            "rname": map_table[reporttype],
            "from": str(int(ts_from_date * 1000)),
            "to": str(int(ts_to_date * 1000)),
            # o.name.replace("#", "%23")
            "biid": "",
            "oname": "",
            "iname": "",
        }

        report_map = {
            "load_interfaces": {
                "url": "%(path)s?title=interface&biid=%(biid)s"
                "&obj=%(oname)s&iface=%(iname)s&from=%(from)s&to=%(to)s",
                "q_group": ["interface"],
                "q_select": {
                    (0, "managed_object", "id"): "managed_object",
                    (1, "path", "iface_name"): "arrayStringConcat(path)",
                },
            },
            "errors": {
                "url":
                """%(path)s?title=errors&biid=%(biid)s&obj=%(oname)s&iface=%(iname)s&from=%(from)s&to=%(to)s""",
                "q_group": ["interface"],
            },
            "load_cpu": {
                "url":
                """%(path)s?title=cpu&biid=%(biid)s&obj=%(oname)s&from=%(from)s&to=%(to)s""",
                "q_select": {
                    (0, "managed_object", "id"): "managed_object",
                    (1, "path", "slot"): "arrayStringConcat(path)",
                },
            },
            "ping": {
                "url":
                """%(path)s?title=ping&biid=%(biid)s&obj=%(oname)s&from=%(from)s&to=%(to)s""",
                "q_select": {
                    (0, "managed_object", "id"): "managed_object"
                },
            },
        }

        query_map = {
            # "iface_description": ('', 'iface_description', "''"),
            "iface_description": (
                "",
                "iface_description",
                "dictGetString('interfaceattributes','description' , (managed_object, arrayStringConcat(path)))",
            ),
            "iface_speed": (
                "speed",
                "iface_speed",
                "if(max(speed) = 0, dictGetUInt64('interfaceattributes', 'in_speed', "
                "(managed_object, arrayStringConcat(path))), max(speed))",
            ),
            "load_in":
            ("load_in", "l_in", "round(quantile(0.90)(load_in), 0)"),
            "load_in_p": (
                "load_in",
                "l_in_p",
                "replaceOne(toString(round(quantile(0.90)(load_in) / "
                "if(max(speed) = 0, dictGetUInt64('interfaceattributes', 'in_speed', "
                "(managed_object, arrayStringConcat(path))), max(speed)), 4) * 100), '.', ',')",
            ),
            "load_out":
            ("load_out", "l_out", "round(quantile(0.90)(load_out), 0)"),
            "load_out_p": (
                "load_out",
                "l_out_p",
                "replaceOne(toString(round(quantile(0.90)(load_out) / "
                "if(max(speed) = 0, dictGetUInt64('interfaceattributes', 'in_speed', "
                "(managed_object, arrayStringConcat(path))), max(speed)), 4) * 100), '.', ',')",
            ),
            "errors_in": ("errors_in", "err_in", "quantile(0.90)(errors_in)"),
            "errors_out":
            ("errors_out", "err_out", "quantile(0.90)(errors_out)"),
            "cpu_usage": ("usage", "cpu_usage", "quantile(0.90)(usage)"),
            "ping_rtt":
            ("rtt", "ping_rtt", "round(quantile(0.90)(rtt) / 1000, 2)"),
            "ping_attempts": ("attempts", "ping_attempts", "avg(attempts)"),
        }
        query_fields = []
        for c in report_map[reporttype]["q_select"]:
            query_fields += [c[2]]
        field_shift = len(query_fields)  # deny replacing field
        for c in columns.split(","):
            if c not in query_map:
                continue
            field, alias, func = query_map[c]
            report_map[reporttype]["q_select"][(columns_order.index(c) +
                                                field_shift, field,
                                                alias)] = func
            query_fields += [c]
        metrics_attrs = namedtuple("METRICSATTRs", query_fields)

        mo_attrs = namedtuple("MOATTRs",
                              [c for c in cols if c.startswith("object")])
        containers_address = {}
        if "object_container" in columns_filter:
            containers_address = ReportContainerData(
                set(mos.values_list("id", flat=True)))
            containers_address = dict(list(containers_address.extract()))
        moss = {}
        for row in mos.values_list("bi_id", "name", "address", "platform",
                                   "administrative_domain__name", "segment",
                                   "id"):
            moss[row[0]] = mo_attrs(*[
                row[1],
                row[2],
                str(Platform.get_by_id(row[3]) if row[3] else ""),
                row[4],
                str(NetworkSegment.get_by_id(row[5])) if row[5] else "",
                containers_address.
                get(row[6], "") if containers_address and row[6] else "",
            ])
        url = report_map[reporttype].get("url", "")
        report_metric = self.metric_source[reporttype](tuple(sorted(moss)),
                                                       from_date,
                                                       to_date,
                                                       columns=None)
        report_metric.SELECT_QUERY_MAP = report_map[reporttype]["q_select"]
        if exclude_zero and reporttype == "load_interfaces":
            report_metric.CUSTOM_FILTER["having"] += [
                "max(load_in) != 0 AND max(load_out) != 0"
            ]
        if interface_profile:
            interface_profile = InterfaceProfile.objects.filter(
                id=interface_profile).first()
            report_metric.CUSTOM_FILTER["having"] += [
                "dictGetString('interfaceattributes', 'profile', "
                "(managed_object, arrayStringConcat(path))) = '%s'" %
                interface_profile.name
            ]
        # OBJECT_PLATFORM, ADMIN_DOMAIN, SEGMENT, OBJECT_HOSTNAME
        for row in report_metric.do_query():
            mm = metrics_attrs(*row)
            mo = moss[int(mm.id)]
            res = []
            for y in columns_order:
                if y in object_columns:
                    res += [getattr(mo, y)]
                else:
                    res += [getattr(mm, y)]
            if "interface_load_url" in columns_filter:
                d_url["biid"] = mm.id
                d_url["oname"] = mo[2].replace("#", "%23")
                # res += [url % d_url, interval]
                res.insert(columns_order.index("interface_load_url"),
                           url % d_url)
            r += [res]
        filename = "metrics_detail_report_%s" % datetime.datetime.now(
        ).strftime("%Y%m%d")
        if o_format == "csv":
            response = HttpResponse(content_type="text/csv")
            response[
                "Content-Disposition"] = 'attachment; filename="%s.csv"' % filename
            writer = csv.writer(response,
                                dialect="excel",
                                delimiter=",",
                                quoting=csv.QUOTE_MINIMAL)
            writer.writerows(r)
            return response
        elif o_format == "xlsx":
            response = StringIO()
            wb = xlsxwriter.Workbook(response)
            cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1})
            ws = wb.add_worksheet("Alarms")
            max_column_data_length = {}
            for rn, x in enumerate(r):
                for cn, c in enumerate(x):
                    if rn and (r[0][cn] not in max_column_data_length or
                               len(str(c)) > max_column_data_length[r[0][cn]]):
                        max_column_data_length[r[0][cn]] = len(str(c))
                    ws.write(rn, cn, c, cf1)
            ws.autofilter(0, 0, rn, cn)
            ws.freeze_panes(1, 0)
            for cn, c in enumerate(r[0]):
                # Set column width
                width = get_column_width(c)
                if enable_autowidth and width < max_column_data_length[c]:
                    width = max_column_data_length[c]
                ws.set_column(cn, cn, width=width)
            wb.close()
            response.seek(0)
            response = HttpResponse(response.getvalue(),
                                    content_type="application/vnd.ms-excel")
            response[
                "Content-Disposition"] = 'attachment; filename="%s.xlsx"' % filename
            response.close()
            return response
Ejemplo n.º 12
0
 def q_seg(id):
     ns = NetworkSegment.get_by_id(id)
     if ns:
         return ns.name
     return str(id)
Ejemplo n.º 13
0
    def api_report(
        self,
        request,
        o_format,
        is_managed=None,
        administrative_domain=None,
        selector=None,
        pool=None,
        segment=None,
        avail_status=False,
        columns=None,
        ids=None,
        detail_stat=None,
        enable_autowidth=False,
    ):
        def row(row):
            def qe(v):
                if v is None:
                    return ""
                if isinstance(v, six.text_type):
                    return v.encode("utf-8")
                elif isinstance(v, datetime.datetime):
                    return v.strftime("%Y-%m-%d %H:%M:%S")
                elif not isinstance(v, str):
                    return str(v)
                else:
                    return v

            return [qe(x) for x in row]

        def translate_row(row, cmap):
            return [row[i] for i in cmap]

        type_columns = ["Up/10G", "Up/1G", "Up/100M", "Up/10M", "Down/-", "-"]
        cols = [
            "id",
            "object_name",
            "object_address",
            "object_hostname",
            "object_status",
            "profile_name",
            "object_profile",
            "object_vendor",
            "object_platform",
            "object_attr_hwversion",
            "object_version",
            "object_attr_bootprom",
            "object_serial",
            "object_attr_patch",
            "auth_profile",
            "avail",
            "admin_domain",
            "container",
            "segment",
            "phys_interface_count",
            "link_count",
            "last_config_ts"
            # "discovery_problem"
            # "object_tags"
            # "sorted_tags"
            # "object_caps"
            # "interface_type_count"
        ]

        header_row = [
            "ID",
            "OBJECT_NAME",
            "OBJECT_ADDRESS",
            "OBJECT_HOSTNAME",
            "OBJECT_STATUS",
            "PROFILE_NAME",
            "OBJECT_PROFILE",
            "OBJECT_VENDOR",
            "OBJECT_PLATFORM",
            "OBJECT_HWVERSION",
            "OBJECT_VERSION",
            "OBJECT_BOOTPROM",
            "OBJECT_SERIAL",
            "OBJECT_ATTR_PATCH",
            "AUTH_PROFILE",
            "AVAIL",
            "ADMIN_DOMAIN",
            "CONTAINER",
            "SEGMENT",
            "PHYS_INTERFACE_COUNT",
            "LINK_COUNT",
            "LAST_CONFIG_TS",
        ]
        # "DISCOVERY_PROBLEM"
        # "ADM_PATH
        # "DISCOVERY_PROBLEM"
        # "OBJECT_TAGS"
        # "SORTED_TAGS"
        # "OBJECT_CAPS"
        # "INTERFACE_TYPE_COUNT"

        if columns:
            cmap = []
            for c in columns.split(","):
                try:
                    cmap += [cols.index(c)]
                except ValueError:
                    continue
        else:
            cmap = list(range(len(cols)))
        r = [translate_row(header_row, cmap)]
        mos = self.get_report_object(
            request.user, is_managed, administrative_domain, selector, pool, segment, ids
        )
        columns_filter = set(columns.split(","))
        mos_id = tuple(mos.order_by("id").values_list("id", flat=True))
        mos_filter = None
        if detail_stat:
            ref = ReportModelFilter()
            ids = list(six.itervalues(ref.proccessed(detail_stat)))
            mos_filter = set(mos_id).intersection(ids[0])
            mos_id = sorted(mos_filter)
        avail = {}
        if "avail" in columns_filter:
            avail = ObjectStatus.get_statuses(mos_id)
        link_count = iter(ReportObjectLinkCount(mos_id))
        iface_count = iter(ReportObjectIfacesTypeStat(mos_id))
        if "container" in columns_filter:
            container_lookup = iter(ReportContainerData(mos_id))
        else:
            container_lookup = None
        if "object_serial" in columns_filter:
            container_serials = iter(ReportContainer(mos_id))
        else:
            container_serials = None
        if "interface_type_count" in columns_filter:
            iss = iter(ReportObjectIfacesStatusStat(mos_id))
        else:
            iss = None
        if "object_attr_patch" in columns_filter or "object_serial" in columns_filter:
            roa = iter(ReportObjectAttributes(mos_id))
        else:
            roa = None
        hn = iter(ReportObjectsHostname1(mos_id))
        rc = iter(ReportObjectConfig(mos_id))
        # ccc = iter(ReportObjectCaps(mos_id))
        if "adm_path" in columns_filter:
            ad_path = ReportAdPath()
            r[-1].extend([_("ADM_PATH1"), _("ADM_PATH1"), _("ADM_PATH1")])
        if "interface_type_count" in columns_filter:
            r[-1].extend(type_columns)
        if "object_caps" in columns_filter:
            object_caps = ReportObjectCaps(mos_id)
            caps_columns = list(six.itervalues(object_caps.ATTRS))
            ccc = iter(object_caps)
            r[-1].extend(caps_columns)
        if "object_tags" in columns_filter:
            r[-1].extend([_("OBJECT_TAGS")])
        if "sorted_tags" in columns_filter:
            tags = set()
            for s in (
                ManagedObject.objects.filter()
                .exclude(tags=None)
                .values_list("tags", flat=True)
                .distinct()
            ):
                tags.update(set(s))
            tags_o = sorted([t for t in tags if "{" not in t])
            r[-1].extend(tags_o)
        if "discovery_problem" in columns.split(","):
            discovery_result = ReportDiscoveryResult(mos_id)
            discovery_result.safe_output = True
            discovery_result.unknown_value = ([""] * len(discovery_result.ATTRS),)
            dp_columns = discovery_result.ATTRS
            dp = iter(discovery_result)
            r[-1].extend(dp_columns)
        for (
            mo_id,
            name,
            address,
            is_managed,
            sa_profile,
            o_profile,
            auth_profile,
            ad,
            m_segment,
            vendor,
            platform,
            version,
            tags,
        ) in (
            mos.values_list(
                "id",
                "name",
                "address",
                "is_managed",
                "profile",
                "object_profile__name",
                "auth_profile__name",
                "administrative_domain__name",
                "segment",
                "vendor",
                "platform",
                "version",
                "tags",
            )
            .order_by("id")
            .iterator()
        ):
            if (mos_filter and mo_id not in mos_filter) or not mos_id:
                continue
            if container_serials:
                mo_serials = next(container_serials)
            else:
                mo_serials = [{}]
            if container_lookup:
                mo_continer = next(container_lookup)
            else:
                mo_continer = ("",)
            if roa:
                serial, hw_ver, boot_prom, patch = next(roa)[0]  # noqa
            else:
                serial, hw_ver, boot_prom, patch = "", "", "", ""  # noqa
            r.append(
                translate_row(
                    row(
                        [
                            mo_id,
                            name,
                            address,
                            next(hn)[0],
                            "managed" if is_managed else "unmanaged",
                            Profile.get_by_id(sa_profile),
                            o_profile,
                            Vendor.get_by_id(vendor) if vendor else "",
                            Platform.get_by_id(platform) if platform else "",
                            hw_ver,
                            Firmware.get_by_id(version) if version else "",
                            boot_prom,
                            # Serial
                            mo_serials[0].get("serial", "") or serial,
                            patch or "",
                            auth_profile,
                            _("Yes") if avail.get(mo_id, None) else _("No"),
                            ad,
                            mo_continer[0],
                            NetworkSegment.get_by_id(m_segment) if m_segment else "",
                            next(iface_count)[0],
                            next(link_count)[0],
                            next(rc)[0],
                        ]
                    ),
                    cmap,
                )
            )
            if "adm_path" in columns_filter:
                r[-1].extend([ad] + list(ad_path[ad]))
            if "interface_type_count" in columns_filter:
                r[-1].extend(next(iss)[0])
            if "object_caps" in columns_filter:
                r[-1].extend(next(ccc)[0])
            if "object_tags" in columns_filter:
                r[-1].append(",".join(tags if tags else []))
            if "sorted_tags" in columns_filter:
                out_tags = [""] * len(tags_o)
                try:
                    if tags:
                        for m in tags:
                            out_tags[tags_o.index(m)] = m
                except ValueError:
                    logger.warning("Bad value for tag: %s", m)
                r[-1].extend(out_tags)
            if "discovery_problem" in columns_filter:
                r[-1].extend(next(dp)[0])
        filename = "mo_detail_report_%s" % datetime.datetime.now().strftime("%Y%m%d")
        if o_format == "csv":
            response = HttpResponse(content_type="text/csv")
            response["Content-Disposition"] = 'attachment; filename="%s.csv"' % filename
            writer = csv.writer(response, dialect="excel", delimiter=";", quotechar='"')
            writer.writerows(r)
            return response
        elif o_format == "xlsx":
            response = StringIO()
            wb = xlsxwriter.Workbook(response)
            cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1})
            ws = wb.add_worksheet("Objects")
            max_column_data_length = {}
            for rn, x in enumerate(r):
                for cn, c in enumerate(x):
                    if rn and (
                        r[0][cn] not in max_column_data_length
                        or len(str(c)) > max_column_data_length[r[0][cn]]
                    ):
                        max_column_data_length[r[0][cn]] = len(str(c))
                    ws.write(rn, cn, c, cf1)
            # for
            ws.autofilter(0, 0, rn, cn)
            ws.freeze_panes(1, 0)
            for cn, c in enumerate(r[0]):
                # Set column width
                width = get_column_width(c)
                if enable_autowidth and width < max_column_data_length[c]:
                    width = max_column_data_length[c]
                ws.set_column(cn, cn, width=width)
            wb.close()
            response.seek(0)
            response = HttpResponse(response.getvalue(), content_type="application/vnd.ms-excel")
            # response = HttpResponse(
            #     content_type="application/x-ms-excel")
            response["Content-Disposition"] = 'attachment; filename="%s.xlsx"' % filename
            response.close()
            return response
Ejemplo n.º 14
0
def fix():
    for ns in NetworkSegment.objects.all():
        NetworkSegment.update_summary(ns)
Ejemplo n.º 15
0
Archivo: views.py Proyecto: nbashev/noc
    def api_report(
        self,
        request,
        from_date,
        to_date,
        o_format,
        min_duration=0,
        max_duration=0,
        min_objects=0,
        min_subscribers=0,
        segment=None,
        administrative_domain=None,
        selector=None,
        ex_selector=None,
        columns=None,
        source="both",
        alarm_class=None,
        subscribers=None,
        enable_autowidth=False,
    ):
        def row(row, container_path, segment_path):
            def qe(v):
                if v is None:
                    return ""
                if isinstance(v, str):
                    return smart_text(v)
                elif isinstance(v, datetime.datetime):
                    return v.strftime("%Y-%m-%d %H:%M:%S")
                elif not isinstance(v, str):
                    return smart_text(v)
                else:
                    return v

            r = [qe(x) for x in row]
            if len(container_path) < self.CONTAINER_PATH_DEPTH:
                container_path += [""] * (self.CONTAINER_PATH_DEPTH -
                                          len(container_path))
            else:
                container_path = container_path[:self.CONTAINER_PATH_DEPTH]
            if len(segment_path) < self.SEGMENT_PATH_DEPTH:
                segment_path += [""] * (self.SEGMENT_PATH_DEPTH -
                                        len(segment_path))
            else:
                segment_path = segment_path[:self.SEGMENT_PATH_DEPTH]
            return r + container_path + segment_path

        def translate_row(row, cmap):
            return [row[i] for i in cmap]

        cols = ([
            "id",
            "root_id",
            "from_ts",
            "to_ts",
            "duration_sec",
            "object_name",
            "object_address",
            "object_hostname",
            "object_profile",
            "object_admdomain",
            "object_platform",
            "object_version",
            "alarm_class",
            "alarm_subject",
            "maintenance",
            "objects",
            "subscribers",
            "tt",
            "escalation_ts",
            "location",
            "container_address",
        ] + ["container_%d" % i for i in range(self.CONTAINER_PATH_DEPTH)] +
                ["segment_%d" % i for i in range(self.SEGMENT_PATH_DEPTH)])

        header_row = (
            [
                "ID",
                _("ROOT_ID"),
                _("FROM_TS"),
                _("TO_TS"),
                _("DURATION_SEC"),
                _("OBJECT_NAME"),
                _("OBJECT_ADDRESS"),
                _("OBJECT_HOSTNAME"),
                _("OBJECT_PROFILE"),
                _("OBJECT_ADMDOMAIN"),
                _("OBJECT_PLATFORM"),
                _("OBJECT_VERSION"),
                _("ALARM_CLASS"),
                _("ALARM_SUBJECT"),
                _("MAINTENANCE"),
                _("OBJECTS"),
                _("SUBSCRIBERS"),
                _("TT"),
                _("ESCALATION_TS"),
                _("LOCATION"),
                _("CONTAINER_ADDRESS"),
            ] + ["CONTAINER_%d" % i
                 for i in range(self.CONTAINER_PATH_DEPTH)] +
            ["SEGMENT_%d" % i for i in range(self.SEGMENT_PATH_DEPTH)])

        if columns:
            cmap = []
            for c in columns.split(","):
                try:
                    cmap += [cols.index(c)]
                except ValueError:
                    continue
        else:
            cmap = list(range(len(cols)))
        subscribers_profile = self.default_subscribers_profile
        if subscribers:
            subscribers_profile = set(
                SubscriberProfile.objects.filter(
                    id__in=subscribers.split(",")).scalar("id"))
        r = [translate_row(header_row, cmap)]
        fd = datetime.datetime.strptime(
            to_date, "%d.%m.%Y") + datetime.timedelta(days=1)
        match = {
            "timestamp": {
                "$gte": datetime.datetime.strptime(from_date, "%d.%m.%Y"),
                "$lte": fd
            }
        }

        match_duration = {"duration": {"$gte": min_duration}}
        if max_duration:
            match_duration = {
                "duration": {
                    "$gte": min_duration,
                    "$lte": max_duration
                }
            }
        mos = ManagedObject.objects.filter(is_managed=True)

        if segment:
            try:
                match["segment_path"] = bson.ObjectId(segment)
            except bson.errors.InvalidId:
                pass

        ads = []
        if administrative_domain:
            if administrative_domain.isdigit():
                administrative_domain = [int(administrative_domain)]
                ads = AdministrativeDomain.get_nested_ids(
                    administrative_domain[0])

        if not request.user.is_superuser:
            user_ads = UserAccess.get_domains(request.user)
            if administrative_domain and ads:
                if administrative_domain[0] not in user_ads:
                    ads = list(set(ads) & set(user_ads))
                    if not ads:
                        return HttpResponse(
                            "<html><body>Permission denied: Invalid Administrative Domain</html></body>"
                        )
            else:
                ads = user_ads
        if ads:
            mos = mos.filter(administrative_domain__in=ads)
        if selector:
            selector = ManagedObjectSelector.get_by_id(int(selector))
            mos = mos.filter(selector.Q)
        if ex_selector:
            ex_selector = ManagedObjectSelector.get_by_id(int(ex_selector))
            mos = mos.exclude(ex_selector.Q)

        # Working if Administrative domain set
        if ads:
            try:
                match["adm_path"] = {"$in": ads}
                # @todo More 2 level hierarhy
            except bson.errors.InvalidId:
                pass

        mos_id = list(mos.order_by("id").values_list("id", flat=True))
        mo_hostname = {}
        maintenance = []
        if mos_id and (selector or ex_selector):
            match["managed_object"] = {"$in": mos_id}
        if "maintenance" in columns.split(","):
            maintenance = Maintenance.currently_affected()
        if "object_hostname" in columns.split(","):
            mo_hostname = ReportObjectsHostname1(sync_ids=mos_id)
            mo_hostname = mo_hostname.get_dictionary()
        moss = ReportAlarmObjects(mos_id).get_all()
        # container_lookup = ReportContainer(mos_id)
        container_lookup = None
        subject = "alarm_subject" in columns
        loc = AlarmApplication([])
        if source in ["archive", "both"]:
            # Archived Alarms
            for a in (ArchivedAlarm._get_collection().with_options(
                    read_preference=ReadPreference.SECONDARY_PREFERRED
            ).aggregate([
                {
                    "$match": match
                },
                {
                    "$addFields": {
                        "duration": {
                            "$divide": [
                                {
                                    "$subtract":
                                    ["$clear_timestamp", "$timestamp"]
                                },
                                1000,
                            ]
                        }
                    }
                },
                {
                    "$match": match_duration
                },
                    # {"$sort": {"timestamp": 1}}
            ])):
                if int(a["managed_object"]) not in moss:
                    continue
                dt = a["clear_timestamp"] - a["timestamp"]
                duration = int(dt.total_seconds())
                total_objects = sum(ss["summary"] for ss in a["total_objects"])
                if min_objects and total_objects < min_objects:
                    continue
                total_subscribers = sum(
                    ss["summary"] for ss in a["total_subscribers"]
                    if subscribers_profile
                    and ss["profile"] in subscribers_profile)
                if min_subscribers and total_subscribers < min_subscribers:
                    continue
                if "segment_" in columns.split(
                        ",") or "container_" in columns.split(","):
                    path = ObjectPath.get_path(a["managed_object"])
                    if path:
                        segment_path = [
                            NetworkSegment.get_by_id(s).name
                            for s in path.segment_path
                            if NetworkSegment.get_by_id(s)
                        ]
                        container_path = [
                            Object.get_by_id(s).name
                            for s in path.container_path if Object.get_by_id(s)
                        ]
                    else:
                        segment_path = []
                        container_path = []
                else:
                    segment_path = []
                    container_path = []
                r += [
                    translate_row(
                        row(
                            [
                                smart_text(a["_id"]),
                                smart_text(a["root"]) if a.get("root") else "",
                                a["timestamp"],
                                a["clear_timestamp"],
                                smart_text(duration),
                                moss[a["managed_object"]][0],
                                moss[a["managed_object"]][1],
                                smart_text(
                                    mo_hostname.get(a["managed_object"], "")),
                                Profile.get_by_id(
                                    moss[a["managed_object"]][3]).name
                                if moss[a["managed_object"]][5] else "",
                                moss[a["managed_object"]][6],
                                Platform.get_by_id(
                                    moss[a["managed_object"]][9])
                                if moss[a["managed_object"]][9] else "",
                                smart_text(
                                    Firmware.get_by_id(
                                        moss[a["managed_object"]][10]).version)
                                if moss[a["managed_object"]][10] else "",
                                AlarmClass.get_by_id(a["alarm_class"]).name,
                                ArchivedAlarm.objects.get(
                                    id=a["_id"]).subject if subject else "",
                                "",
                                total_objects,
                                total_subscribers,
                                a.get("escalation_tt"),
                                a.get("escalation_ts"),
                                ", ".join(ll for ll in (
                                    loc.location(moss[a["managed_object"]][5]
                                                 ) if moss[a["managed_object"]]
                                    [5] is not None else "") if ll),
                                container_lookup[a["managed_object"]].get(
                                    "text", "") if container_lookup else "",
                            ],
                            container_path,
                            segment_path,
                        ),
                        cmap,
                    )
                ]
        # Active Alarms
        if source in ["active", "both"]:
            datenow = datetime.datetime.now()
            for a in (ActiveAlarm._get_collection().with_options(
                    read_preference=ReadPreference.SECONDARY_PREFERRED).
                      aggregate([
                          {
                              "$match": match
                          },
                          {
                              "$addFields": {
                                  "duration": {
                                      "$divide": [{
                                          "$subtract": [fd, "$timestamp"]
                                      }, 1000]
                                  }
                              }
                          },
                          {
                              "$match": match_duration
                          },
                          # {"$sort": {"timestamp": 1}}
                      ])):
                dt = datenow - a["timestamp"]
                duration = int(dt.total_seconds())
                total_objects = sum(ss["summary"] for ss in a["total_objects"])
                if min_objects and total_objects < min_objects:
                    continue
                total_subscribers = sum(
                    ss["summary"] for ss in a["total_subscribers"]
                    if subscribers_profile
                    and ss["profile"] in subscribers_profile)
                if min_subscribers and total_subscribers < min_subscribers:
                    continue
                if "segment_" in columns.split(
                        ",") or "container_" in columns.split(","):
                    path = ObjectPath.get_path(a["managed_object"])
                    if path:
                        segment_path = [
                            NetworkSegment.get_by_id(s).name
                            for s in path.segment_path
                            if NetworkSegment.get_by_id(s)
                        ]
                        container_path = [
                            Object.get_by_id(s).name
                            for s in path.container_path if Object.get_by_id(s)
                        ]
                    else:
                        segment_path = []
                        container_path = []
                else:
                    segment_path = []
                    container_path = []
                r += [
                    translate_row(
                        row(
                            [
                                smart_text(a["_id"]),
                                smart_text(a["root"]) if a.get("root") else "",
                                a["timestamp"],
                                # a["clear_timestamp"],
                                "",
                                smart_text(duration),
                                moss[a["managed_object"]][0],
                                moss[a["managed_object"]][1],
                                smart_text(
                                    mo_hostname.get(a["managed_object"], "")),
                                Profile.get_by_id(moss[a["managed_object"]][3])
                                if moss[a["managed_object"]][5] else "",
                                moss[a["managed_object"]][6],
                                Platform.get_by_id(
                                    moss[a["managed_object"]][9])
                                if moss[a["managed_object"]][9] else "",
                                smart_text(
                                    Firmware.get_by_id(
                                        moss[a["managed_object"]][10]).version)
                                if moss[a["managed_object"]][10] else "",
                                AlarmClass.get_by_id(a["alarm_class"]).name,
                                ActiveAlarm.objects.get(
                                    id=a["_id"]).subject if subject else None,
                                "Yes" if a["managed_object"] in maintenance
                                else "No",
                                total_objects,
                                total_subscribers,
                                a.get("escalation_tt"),
                                a.get("escalation_ts"),
                                ", ".join(ll for ll in (
                                    loc.location(moss[a["managed_object"]][5]
                                                 ) if moss[a["managed_object"]]
                                    [5] is not None else "") if ll),
                                container_lookup[a["managed_object"]].get(
                                    "text", "") if container_lookup else "",
                            ],
                            container_path,
                            segment_path,
                        ),
                        cmap,
                    )
                ]
        if source in ["long_archive"]:
            o_format = "csv_zip"
            columns = [
                "ALARM_ID",
                "MO_ID",
                "OBJECT_PROFILE",
                "VENDOR",
                "PLATFORM",
                "VERSION",
                "OPEN_TIMESTAMP",
                "CLOSE_TIMESTAMP",
                "LOCATION",
                "",
                "POOL",
                "ADM_DOMAIN",
                "MO_NAME",
                "IP",
                "ESCALATION_TT",
                "DURATION",
                "SEVERITY",
                "REBOOTS",
            ]
            from noc.core.clickhouse.connect import connection

            ch = connection()
            fd = datetime.datetime.strptime(from_date, "%d.%m.%Y")
            td = datetime.datetime.strptime(
                to_date, "%d.%m.%Y") + datetime.timedelta(days=1)
            if td - fd > datetime.timedelta(days=390):
                return HttpResponseBadRequest(
                    _("Report more than 1 year not allowed. If nedeed - request it from Administrator"
                      ))
            ac = AlarmClass.objects.get(
                name="NOC | Managed Object | Ping Failed")
            subs = ", ".join(
                "subscribers.summary[indexOf(subscribers.profile, '%s')] as `%s`"
                % (sp.bi_id, sp.name)
                for sp in SubscriberProfile.objects.filter().order_by("name"))
            if subs:
                columns += [
                    sp.name for sp in
                    SubscriberProfile.objects.filter().order_by("name")
                ]
            r = ch.execute(LONG_ARCHIVE_QUERY % (
                ", %s" % subs if subs else "",
                fd.date().isoformat(),
                td.date().isoformat(),
                ac.bi_id,
            ))

        filename = "alarms.csv"
        if o_format == "csv":
            response = HttpResponse(content_type="text/csv")
            response[
                "Content-Disposition"] = 'attachment; filename="%s"' % filename
            writer = csv.writer(response)
            writer.writerows(r)
            return response
        elif o_format == "csv_zip":
            response = BytesIO()
            f = TextIOWrapper(TemporaryFile(mode="w+b"), encoding="utf-8")
            writer = csv.writer(f,
                                dialect="excel",
                                delimiter=";",
                                quotechar='"')
            writer.writerow(columns)
            writer.writerows(r)
            f.seek(0)
            with ZipFile(response, "w", compression=ZIP_DEFLATED) as zf:
                zf.writestr(filename, f.read())
                zf.filename = "%s.zip" % filename
            # response = HttpResponse(content_type="text/csv")
            response.seek(0)
            response = HttpResponse(response.getvalue(),
                                    content_type="application/zip")
            response[
                "Content-Disposition"] = 'attachment; filename="%s.zip"' % filename
            return response
        elif o_format == "xlsx":
            response = BytesIO()
            wb = xlsxwriter.Workbook(response)
            cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1})
            ws = wb.add_worksheet("Alarms")
            max_column_data_length = {}
            for rn, x in enumerate(r):
                for cn, c in enumerate(x):
                    if rn and (r[0][cn] not in max_column_data_length or
                               len(str(c)) > max_column_data_length[r[0][cn]]):
                        max_column_data_length[r[0][cn]] = len(str(c))
                    ws.write(rn, cn, c, cf1)
            ws.autofilter(0, 0, rn, cn)
            ws.freeze_panes(1, 0)
            for cn, c in enumerate(r[0]):
                # Set column width
                width = get_column_width(c)
                if enable_autowidth and width < max_column_data_length[c]:
                    width = max_column_data_length[c]
                ws.set_column(cn, cn, width=width)
            wb.close()
            response.seek(0)
            response = HttpResponse(response.getvalue(),
                                    content_type="application/vnd.ms-excel")
            response[
                "Content-Disposition"] = 'attachment; filename="alarms.xlsx"'
            response.close()
            return response