Esempio n. 1
0
 def get_data(self, request, pool=None, obj_profile=None, **kwargs):
     data = []
     if pool:
         pool = Pool.get_by_id(pool)
     else:
         pool = Pool.get_by_name("default")
     # Get all managed objects
     mos = ManagedObject.objects.filter(is_managed=True, pool=pool)
     if not request.user.is_superuser:
         mos = ManagedObject.objects.filter(
             is_managed=True,
             pool=pool,
             administrative_domain__in=UserAccess.get_domains(request.user),
         )
     if obj_profile:
         mos = mos.filter(object_profile=obj_profile)
     columns = (_("Managed Object"), _("Address"), _("Object"),
                _("Capabilities"))
     for mo in mos:
         mo.get_caps()
         data += [(mo.name, mo.address, _("Main"), ";".join(mo.get_caps()))]
         for i in Interface.objects.filter(managed_object=mo):
             if i.type == "SVI":
                 continue
             data += [(mo.name, mo.address, i.name,
                       ";".join(i.enabled_protocols))]
     return self.from_dataset(title=self.title, columns=columns, data=data)
Esempio n. 2
0
    def get_data(self, request, repo="config", days=1, **kwargs):
        data = []
        baseline = datetime.datetime.now() - datetime.timedelta(days=days)
        if repo == "config":
            mos = ManagedObject.objects.filter()
            if not request.user.is_superuser:
                mos = mos.filter(administrative_domain__in=UserAccess.get_domains(request.user))
            mos = dict(mos.values_list("id", "name"))
            config_db = get_db()["noc.gridvcs.config.files"].with_options(
                read_preference=ReadPreference.SECONDARY_PREFERRED)
            pipeline = [{"$match": {"ts": {"$gte": baseline}}},
                        {"$group": {"_id": "$object", "last_ts": {"$max": "$ts"}}},
                        {"$sort": {"_id": 1}}]
            for value in config_db.aggregate(pipeline):
                if value["_id"] not in mos:
                    continue
                data += [(mos[value["_id"]], value["last_ts"])]

        else:
            oc = Object.get_object_class(repo)
            data = [(o, o.last_modified) for o in
                    oc.objects.filter(last_modified__gte=baseline).order_by("-last_modified")]
        return self.from_dataset(
            title="%s: %s in %d days" % (self.title, repo, days),
            columns=[
                "Object",
                TableColumn(_("Last Changed"), format="datetime")],
            data=data,
            enumerate=True)
Esempio n. 3
0
 def get_report_object(self,
                       user=None,
                       is_managed=None,
                       adm=None,
                       selector=None,
                       pool=None,
                       segment=None,
                       ids=None):
     mos = ManagedObject.objects.filter()
     if user.is_superuser and not adm and not selector and not segment:
         mos = ManagedObject.objects.filter()
     if ids:
         mos = ManagedObject.objects.filter(id__in=[ids])
     if is_managed is not None:
         mos = ManagedObject.objects.filter(is_managed=is_managed)
     if pool:
         p = Pool.get_by_name(pool or "default")
         mos = mos.filter(pool=p)
     if not user.is_superuser:
         mos = mos.filter(
             administrative_domain__in=UserAccess.get_domains(user))
     if adm:
         ads = AdministrativeDomain.get_nested_ids(int(adm))
         mos = mos.filter(administrative_domain__in=ads)
     if selector:
         selector = ManagedObjectSelector.get_by_id(int(selector))
         mos = mos.filter(selector.Q)
     if segment:
         segment = NetworkSegment.objects.filter(id=segment).first()
         if segment:
             mos = mos.filter(segment__in=segment.get_nested_ids())
     return mos
Esempio n. 4
0
 def transform_query(cls, query, user):
     """
     Transform query, possibly applying access restrictions
     :param query:
     :param user:
     :return: query dict or None if access denied
     """
     if not user or user.is_superuser:
         return query  # No restrictions
     if query["datasource"].startswith("dictionaries."):
         return query  # To dictionaries
     # Get user domains
     domains = UserAccess.get_domains(user)
     # Resolve domains against dict
     mos_bi = [
         int(bi_id) for bi_id in ManagedObject.objects.filter(
             administrative_domain__in=domains).values_list("bi_id",
                                                            flat=True)
     ]
     filter = query.get("filter", {})
     dl = len(mos_bi)
     if not dl:
         return None
     elif dl == 1:
         q = {"$eq": [{"$field": "managed_object"}, mos_bi]}
     else:
         q = {"$in": [{"$field": "managed_object"}, mos_bi]}
     if filter:
         query["filter"] = {"$and": [query["filter"], q]}
     else:
         query["filter"] = q
     return query
Esempio n. 5
0
    def queryset(self, request, query=None):
        """
        Filter records for lookup
        """
        qs = super().queryset(request)
        if not request.user.is_superuser:
            user_ads = UserAccess.get_domains(request.user)
            qs = qs.filter(Q(administrative_domain=[]) | Q(administrative_domain__in=user_ads))
        if query and self.query_fields:
            q = qs.filter(self.get_Q(request, query))
            if q:
                return q
            sq = ManagedObject.get_search_Q(query)
            if sq:
                obj = ManagedObject.objects.filter(sq)
            else:

                obj = ManagedObject.objects.filter(name__contains=query)
            if obj:
                mos = obj.values_list("id", flat=True)
                ao = AffectedObjects.objects.filter(affected_objects__object__in=mos).values_list(
                    "maintenance"
                )
                return ao
            return qs.filter(type=None)
        else:
            return qs
Esempio n. 6
0
 def get_data(self,
              request,
              interval,
              from_date=None,
              to_date=None,
              **kwargs):
     interval = int(interval)
     if not from_date:
         interval = 1
     if interval:
         ts = datetime.datetime.now() - datetime.timedelta(days=interval)
         q = {"timestamp": {"$gte": ts}}
     else:
         t0 = datetime.datetime.strptime(from_date, "%d.%m.%Y")
         if not to_date:
             t1 = datetime.datetime.now()
         else:
             t1 = datetime.datetime.strptime(
                 to_date, "%d.%m.%Y") + datetime.timedelta(days=1)
         q = {"timestamp": {"$gte": t0, "$lte": t1}}
     q["escalation_tt"] = {"$exists": True}
     if not request.user.is_superuser:
         q["adm_path"] = {"$in": UserAccess.get_domains(request.user)}
     data = []
     for ac in (ActiveAlarm, ArchivedAlarm):
         for d in ac._get_collection().find(q):
             mo = ManagedObject.get_by_id(d["managed_object"])
             if not mo:
                 continue
             data += [(
                 d["timestamp"].strftime("%Y-%m-%d %H:%M:%S"),
                 d["escalation_ts"].strftime("%Y-%m-%d %H:%M:%S"),
                 mo.name.split("#", 1)[0],
                 mo.address,
                 mo.platform,
                 mo.segment.name,
                 d["escalation_tt"],
                 sum(ss["summary"] for ss in d["total_objects"]),
                 sum(ss["summary"] for ss in d["total_subscribers"]),
             )]
     data = sorted(data, key=operator.itemgetter(0))
     return self.from_dataset(
         title=self.title,
         columns=[
             _("Timestamp"),
             _("Escalation Timestamp"),
             _("Managed Object"),
             _("Address"),
             _("Platform"),
             _("Segment"),
             _("TT"),
             _("Objects"),
             _("Subscribers"),
         ],
         data=data,
         enumerate=True,
     )
Esempio n. 7
0
    def get_data(self, request, pool=None, selector=None, report_type=None, **kwargs):

        data = []
        columns, columns_desr = [], []
        r_map = [
            (_("Not Available"), "2is1.3isp1.3is1"),
            (_("Failed to guess CLI credentials"), "2is1.3isp0.2isp1"),
            (_("Failed to guess SNMP community"), "2is1.3isp1.3is2.1isp1")
        ]
        for x, y in r_map:
            columns += [y]
            columns_desr += [x]

        mos = ManagedObject.objects.filter()
        if pool:
            mos = mos.filter(pool=pool)
            data += [SectionRow(name=pool.name)]
        if not request.user.is_superuser:
            mos = mos.filter(administrative_domain__in=UserAccess.get_domains(request.user))
        mos = list(mos.values_list("id", flat=True).order_by("id"))
        mos_s = set(mos)
        report = ReportModelFilter()
        result = report.proccessed(",".join(columns))

        mo_hostname = ReportObjectsHostname1(sync_ids=mos)
        mo_hostname = mo_hostname.get_dictionary()
        d_result = ReportDiscoveryResult(sync_ids=mos)
        d_result = d_result.get_dictionary()
        for col in columns:
            for mo_id in result[col.strip()].intersection(mos_s):
                mo = ManagedObject.get_by_id(mo_id)
                problem = self.decode_problem(d_result.get(mo_id))
                if not problem and mo_id not in d_result:
                    problem = "Discovery disabled"
                data += [(
                    mo.name,
                    mo.address,
                    mo.administrative_domain.name,
                    mo.profile.name,
                    mo_hostname.get(mo.id, ""),
                    mo.auth_profile if mo.auth_profile else "",
                    mo.auth_profile.user if mo.auth_profile else mo.user,
                    mo.auth_profile.snmp_ro if mo.auth_profile else mo.snmp_ro,
                    _("No") if not mo.get_status() else _("Yes"),
                    columns_desr[columns.index(col)],
                    problem
                )]

        return self.from_dataset(
            title=self.title,
            columns=[
                _("Managed Object"), _("Address"), _("Administrative Domain"), _("Profile"), _("Hostname"),
                _("Auth Profile"), _("Username"), _("SNMP Community"),
                _("Avail"), _("Error"), _("Error Detail")
            ],
            data=data)
Esempio n. 8
0
    def get_data(self, request, sel=None):

        qs = ManagedObject.objects
        if not request.user.is_superuser:
            qs = ManagedObject.objects.filter(
                administrative_domain__in=UserAccess.get_domains(request.user))

        # Get all managed objects by selector
        mos_list = qs.filter(sel.Q)

        columns = [
            _("Managed Objects"),
            _("Address"),
            _("Vendor"),
            _("Platform"),
            _("HW Revision"),
            _("SW Version"),
            _("Serial"),
        ]
        data = []

        for mo in mos_list:
            q = Object._get_collection().count_documents(
                {"data.management.managed_object": {
                    "$in": [mo.id]
                }})
            if q == 0:
                data += [[
                    mo.name,
                    mo.address,
                    mo.vendor or None,
                    mo.platform.full_name if mo.platform else None,
                    mo.get_attr("HW version") or None,
                    mo.version.version if mo.version else None,
                    mo.get_attr("Serial Number") or None,
                    None,
                ]]
            else:
                for x in Object._get_collection().find(
                    {"data.management.managed_object": {
                        "$in": [mo.id]
                    }}):
                    data += [[
                        x["name"],
                        mo.address,
                        mo.vendor or None,
                        mo.platform.full_name if mo.platform else None,
                        mo.get_attr("HW version") or None,
                        mo.version.version if mo.version else None,
                        x["data"]["asset"]["serial"],
                    ]]

        return self.from_dataset(title=self.title,
                                 columns=columns,
                                 data=data,
                                 enumerate=True)
Esempio n. 9
0
    def has_repo_config_access(self, user, obj):
        """
        Check user has access to object

        :param user: User instance
        :param obj: ManagedObject instance
        :return: True if user has access, False otherwise
        """
        if user.is_superuser:
            return True
        return ManagedObject.objects.filter(id=obj.id).filter(UserAccess.Q(user)).exists()
Esempio n. 10
0
File: views.py Progetto: 0pt1on/noc
    def get_data(self,
                 request,
                 pool=None,
                 int_profile=None,
                 mop=None,
                 avail_status=None,
                 **kwargs):
        data = []
        mos = ManagedObject.objects.filter(is_managed=True)

        # % fixme remove.
        if not pool and request.user.is_superuser:
            pool = Pool.get_by_name("STAGEMO")
        if pool:
            mos = mos.filter(pool=pool)
        if not request.user.is_superuser:
            mos = mos.filter(
                administrative_domain__in=UserAccess.get_domains(request.user))
        if mop:
            mos = mos.filter(object_profile=mop)
        mos_ids = mos.values_list("id", flat=True)

        iface = Interface.objects.filter(managed_object__in=mos,
                                         profile=int_profile,
                                         type="physical").values_list(
                                             "managed_object", "name")
        res = []
        n = 0
        # Interface._get_collection()
        while mos_ids[(0 + n):(10000 + n)]:
            mos_ids_f = mos_ids[(0 + n):(10000 + n)]
            s_iface = {"%d.%s" % (mo.id, name) for mo, name in iface}
            of = ObjectFact.objects.filter(
                object__in=mos_ids_f,
                cls="subinterface",
                attrs__traffic_control_broadcast=False)
            a_f = {".".join((str(o.object.id), o.attrs["name"])) for o in of}
            res.extend(a_f.intersection(s_iface))
            n += 10000
        for s in res:
            mo, iface = s.split(".")
            mo = ManagedObject.get_by_id(mo)
            data += [(mo.name, mo.address, mo.profile.name, iface)]

        return self.from_dataset(
            title=self.title,
            columns=[
                _("Managed Object"),
                _("Address"),
                _("SA Profile"),
                _("Interface")
            ],
            data=data,
        )
Esempio n. 11
0
File: views.py Progetto: nbashev/noc
 def queryset(self, request, query=None):
     """
     Filter records for lookup
     """
     status = request.GET.get("status", "A")
     if status not in self.model_map:
         raise Exception("Invalid status")
     model = self.model_map[status]
     if request.user.is_superuser:
         return model.objects.filter().read_preference(ReadPreference.SECONDARY_PREFERRED).all()
     else:
         return model.objects.filter(
             adm_path__in=UserAccess.get_domains(request.user),
         ).read_preference(ReadPreference.SECONDARY_PREFERRED)
Esempio n. 12
0
 def search(cls, handler, query):
     q = Q(name__icontains=query)
     sq = ManagedObject.get_search_Q(query)
     if sq:
         q |= sq
     if not handler.current_user.is_superuser:
         q &= UserAccess.Q(handler.current_user)
     r = []
     for mo in ManagedObject.objects.filter(q):
         r += [{
             "scope": "managedobject",
             "id": mo.id,
             "label": "%s (%s) [%s]" % (mo.name, mo.address, mo.platform),
         }]
     return r
Esempio n. 13
0
File: views.py Progetto: nbashev/noc
 def queryset(self, request, query=None):
     """
     Filter records for lookup
     """
     self.logger.info("Queryset %s" % query)
     if self.managed_filter:
         q = d_Q(is_managed=True)
     else:
         q = d_Q()
     if not request.user.is_superuser:
         q &= UserAccess.Q(request.user)
     if query:
         sq = ManagedObject.get_search_Q(query)
         if sq:
             q &= sq
         else:
             q &= d_Q(name__contains=query)
     return self.model.objects.filter(q)
Esempio n. 14
0
 def transform_query(cls, query, user):
     if not user or user.is_superuser:
         return query  # No restrictions
     # Get user domains
     domains = UserAccess.get_domains(user)
     # Resolve domains against dict
     domain_ids = [x.bi_id for x in AdministrativeDomainM.objects.filter(id__in=domains)]
     filter = query.get("filter", {})
     dl = len(domain_ids)
     if not dl:
         return None
     elif dl == 1:
         q = {"$eq": [{"$field": "administrative_domain"}, domain_ids[0]]}
     else:
         q = {"$in": [{"$field": "administrative_domain"}, domain_ids]}
     if filter:
         query["filter"] = {"$and": [query["filter"], q]}
     else:
         query["filter"] = q
     return query
Esempio n. 15
0
    def get_data(self,
                 request,
                 pool=None,
                 obj_profile=None,
                 selector=None,
                 avail_status=None,
                 profile_check_only=None,
                 failed_scripts_only=None,
                 filter_pending_links=None,
                 filter_none_objects=None,
                 filter_view_other=None,
                 **kwargs):
        data = []
        match = None
        code_map = {
            "1": "Unknown error",
            "10000": "Unspecified CLI error",
            "10005": "Connection refused",
            "10001": "Authentication failed",
            "10002": "No super command defined",
            "10003": "No super privileges",
            "10004": "SSH Protocol error"
        }

        if not pool:
            pool = Pool.objects.filter()[0]
        data += [SectionRow(name="Report by %s" % pool.name)]
        if selector:
            mos = ManagedObject.objects.filter(selector.Q)
        else:
            mos = ManagedObject.objects.filter(pool=pool, is_managed=True)

        if not request.user.is_superuser:
            mos = mos.filter(
                administrative_domain__in=UserAccess.get_domains(request.user))
        if obj_profile:
            mos = mos.filter(object_profile=obj_profile)
        if filter_view_other:
            mnp_in = list(
                ManagedObjectProfile.objects.filter(enable_ping=False))
            mos = mos.filter(profile=Profile.objects.get(
                name=GENERIC_PROFILE)).exclude(object_profile__in=mnp_in)
        discovery = "noc.services.discovery.jobs.box.job.BoxDiscoveryJob"
        mos_id = list(mos.values_list("id", flat=True))
        if avail_status:
            avail = ObjectStatus.get_statuses(mos_id)

        if profile_check_only:
            match = {
                "$or": [{
                    "job.problems.suggest_cli": {
                        "$exists": True
                    }
                }, {
                    "job.problems.suggest_snmp": {
                        "$exists": True
                    }
                }, {
                    "job.problems.profile.": {
                        "$regex": "Cannot detect profile"
                    }
                }, {
                    "job.problems.version.": {
                        "$regex": "Remote error code 1000[1234]"
                    }
                }]
            }

        elif failed_scripts_only:
            match = {
                "$and": [{
                    "job.problems": {
                        "$exists": "true",
                        "$ne": {}
                    }
                }, {
                    "job.problems.suggest_snmp": {
                        "$exists": False
                    }
                }, {
                    "job.problems.suggest_cli": {
                        "$exists": False
                    }
                }]
            }
        elif filter_view_other:
            match = {"job.problems.suggest_snmp": {"$exists": False}}

        rdp = ReportDiscoveryProblem(mos, avail_only=avail_status, match=match)
        exclude_method = []
        if filter_pending_links:
            exclude_method += ["lldp", "lacp", "cdp", "huawei_ndp"]

        for discovery in rdp:
            mo = ManagedObject.get_by_id(discovery["key"])
            for method in ifilterfalse(lambda x: x in exclude_method,
                                       discovery["job"][0]["problems"]):
                problem = discovery["job"][0]["problems"][method]
                if filter_none_objects and not problem:
                    continue
                if isinstance(problem, dict) and "" in problem:
                    problem = problem.get("", "")
                if "Remote error code" in problem:
                    problem = code_map.get(problem.split(" ")[-1], problem)
                if isinstance(problem, six.string_types):
                    problem = problem.replace("\n", " ").replace("\r", " ")

                data += [(mo.name, mo.address,
                          mo.profile.name, mo.administrative_domain.name,
                          _("Yes") if mo.get_status() else _("No"),
                          discovery["st"].strftime("%d.%m.%Y %H:%M")
                          if "st" in discovery else "", method, problem)]

        return self.from_dataset(title=self.title,
                                 columns=[
                                     _("Managed Object"),
                                     _("Address"),
                                     _("Profile"),
                                     _("Administrative Domain"),
                                     _("Avail"),
                                     _("Last successful discovery"),
                                     _("Discovery"),
                                     _("Error")
                                 ],
                                 data=data)
Esempio n. 16
0
    def get_data(self, request, report_type=None, **kwargs):
        wr = ("", "")
        # wr_and = ("", "",)
        wr_and2 = ("", "")
        platform = {
            str(p["_id"]): p["name"]
            for p in Platform.objects.all().as_pymongo().scalar("id", "name")
        }
        version = {
            str(p["_id"]): p["version"]
            for p in Firmware.objects.all().as_pymongo().scalar(
                "id", "version")
        }
        profile = {
            str(p["_id"]): p["name"]
            for p in Profile.objects.all().as_pymongo().scalar("id", "name")
        }

        if not request.user.is_superuser:
            ad = tuple(UserAccess.get_domains(request.user))
            wr = ("WHERE administrative_domain_id in ", ad)
            # wr_and = ("AND sam.administrative_domain_id in ", ad)
            wr_and2 = ("AND administrative_domain_id in ", ad)
            if len(ad) == 1:
                wr = ("WHERE administrative_domain_id in (%s)" % ad, "")
                # wr_and = ("AND sam.administrative_domain_id in (%s)" % ad, "")
                wr_and2 = ("AND administrative_domain_id in (%s)" % ad, "")
        # By Profile
        if report_type == "profile":
            columns = [_("Profile")]
            query = ("""SELECT profile,COUNT(*) FROM sa_managedobject
                    %s%s GROUP BY 1 ORDER BY 2 DESC""" % wr)
        # By Administrative Domain
        elif report_type == "domain":
            columns = [_("Administrative Domain")]
            query = ("""SELECT a.name,COUNT(*)
                  FROM sa_managedobject o JOIN sa_administrativedomain a ON (o.administrative_domain_id=a.id)
                  %s%s
                  GROUP BY 1
                  ORDER BY 2 DESC""" % wr)
        # By Profile and Administrative Domains
        elif report_type == "domain-profile":
            columns = [_("Administrative Domain"), _("Profile")]
            query = ("""SELECT d.name,profile,COUNT(*)
                    FROM sa_managedobject o JOIN sa_administrativedomain d ON (o.administrative_domain_id=d.id)
                    %s%s
                    GROUP BY 1,2
                    """ % wr)
        # By tags
        elif report_type == "tag":
            columns = [_("Tag")]
            query = ("""
              SELECT t.tag, COUNT(*)
              FROM (
                SELECT unnest(tags) AS tag
                FROM sa_managedobject
                WHERE
                  tags IS NOT NULL
                  %s%s
                  AND array_length(tags, 1) > 0
                ) t
              GROUP BY 1
              ORDER BY 2 DESC;
            """ % wr_and2)
        elif report_type == "platform":
            columns = [_("Platform"), _("Profile")]
            query = ("""select sam.profile, sam.platform, COUNT(platform)
                    from sa_managedobject sam %s%s group by 1,2 order by count(platform) desc;"""
                     % wr)

        elif report_type == "version":
            columns = [_("Profile"), _("Version")]
            query = ("""select sam.profile, sam.version, COUNT(version)
                    from sa_managedobject sam %s%s group by 1,2 order by count(version) desc;"""
                     % wr)

        else:
            raise Exception("Invalid report type: %s" % report_type)
        for r, t in report_types:
            if r == report_type:
                title = self.title + ": " + t
                break
        columns += [
            TableColumn(_("Quantity"),
                        align="right",
                        total="sum",
                        format="integer")
        ]

        cursor = self.cursor()
        cursor.execute(query, ())
        data = []
        for c in cursor.fetchall():
            if report_type == "profile":
                data += [(profile.get(c[0]), c[1])]
            elif report_type == "domain-profile":
                data += [(c[0], profile.get(c[1]), c[2])]
            elif report_type == "platform":
                data += [(profile.get(c[0]), platform.get(c[1]), c[2])]
            elif report_type == "version":
                data += [(profile.get(c[0]), version.get(c[1]), c[2])]
            else:
                data += [c]

        return self.from_dataset(title=title,
                                 columns=columns,
                                 data=data,
                                 enumerate=True)
Esempio n. 17
0
 def get_user_domains(self):
     return UserAccess.get_domains(self.current_user)
Esempio n. 18
0
    def api_report(
        self,
        request,
        o_format,
        is_managed=None,
        administrative_domain=None,
        selector=None,
        pool=None,
        segment=None,
        avail_status=False,
        columns=None,
        ids=None,
        enable_autowidth=False,
    ):
        def row(row):
            def qe(v):
                if v is None:
                    return ""
                if isinstance(v, str):
                    return smart_text(v)
                elif isinstance(v, datetime.datetime):
                    return v.strftime("%Y-%m-%d %H:%M:%S")
                elif not isinstance(v, str):
                    return str(v)
                else:
                    return v

            return [qe(x) for x in row]

        def translate_row(row, cmap):
            return [row[i] for i in cmap]

        type_columns = ["Up/10G", "Up/1G", "Up/100M", "Down/-", "-"]

        cols = [
            "object1_admin_domain",
            # "id",
            "object1_name",
            "object1_address",
            "object1_platform",
            "object1_segment",
            "object1_tags",
            "object1_iface",
            "object1_descr",
            "object1_speed",
            "object2_admin_domain",
            "object2_name",
            "object2_address",
            "object2_platform",
            "object2_segment",
            "object2_tags",
            "object2_iface",
            "object2_descr",
            "object2_speed",
            "link_proto",
            "last_seen",
        ]

        header_row = [
            "OBJECT1_ADMIN_DOMAIN",
            "OBJECT1_NAME",
            "OBJECT1_ADDRESS",
            "OBJECT1_PLATFORM",
            "OBJECT1_SEGMENT",
            "OBJECT1_TAGS",
            "OBJECT1_IFACE",
            "OBJECT1_DESCR",
            "OBJECT1_SPEED",
            "OBJECT2_ADMIN_DOMAIN",
            "OBJECT2_NAME",
            "OBJECT2_ADDRESS",
            "OBJECT2_PLATFORM",
            "OBJECT2_SEGMENT",
            "OBJECT2_TAGS",
            "OBJECT2_IFACE",
            "OBJECT2_DESCR",
            "OBJECT2_SPEED",
            "LINK_PROTO",
            "LAST_SEEN",
        ]

        if columns:
            cmap = []
            for c in columns.split(","):
                try:
                    cmap += [cols.index(c)]
                except ValueError:
                    continue
        else:
            cmap = list(range(len(cols)))
        r = [translate_row(header_row, cmap)]
        if "interface_type_count" in columns.split(","):
            r[-1].extend(type_columns)
        # self.logger.info(r)
        # self.logger.info("---------------------------------")
        # print("-----------%s------------%s" % (administrative_domain, columns))

        p = Pool.get_by_name(pool or "default")
        mos = ManagedObject.objects.filter()
        if request.user.is_superuser and not administrative_domain and not selector and not segment:
            mos = ManagedObject.objects.filter(pool=p)
        if ids:
            mos = ManagedObject.objects.filter(id__in=[ids])
        if is_managed is not None:
            mos = ManagedObject.objects.filter(is_managed=is_managed)
        if pool:
            mos = mos.filter(pool=p)
        if not request.user.is_superuser:
            mos = mos.filter(
                administrative_domain__in=UserAccess.get_domains(request.user))
        if administrative_domain:
            ads = AdministrativeDomain.get_nested_ids(
                int(administrative_domain))
            mos = mos.filter(administrative_domain__in=ads)
        if selector:
            selector = ManagedObjectSelector.get_by_id(int(selector))
            mos = mos.filter(selector.Q)
        if segment:
            segment = NetworkSegment.objects.filter(id=segment).first()
            if segment:
                mos = mos.filter(segment__in=segment.get_nested_ids())
        mos_id = list(mos.values_list("id", flat=True))

        rld = ReportLinksDetail(mos_id)
        mo_resolv = {
            mo[0]: mo[1:]
            for mo in ManagedObject.objects.filter().values_list(
                "id",
                "administrative_domain__name",
                "name",
                "address",
                "segment",
                "platform",
                "labels",
            )
        }

        for link in rld.out:
            if len(rld.out[link]) != 2:
                # Multilink or bad link
                continue
            s1, s2 = rld.out[link]
            seg1, seg2 = None, None
            if "object1_segment" in columns.split(
                    ",") or "object2_segment" in columns.split(","):
                seg1, seg2 = mo_resolv[s1["mo"][0]][3], mo_resolv[s2["mo"]
                                                                  [0]][3]
            plat1, plat2 = None, None
            if "object1_platform" in columns.split(
                    ",") or "object2_platform" in columns.split(","):
                plat1, plat2 = mo_resolv[s1["mo"][0]][4], mo_resolv[s2["mo"]
                                                                    [0]][4]
            r += [
                translate_row(
                    row([
                        mo_resolv[s1["mo"][0]][0],
                        mo_resolv[s1["mo"][0]][1],
                        mo_resolv[s1["mo"][0]][2],
                        "" if not plat1 else Platform.get_by_id(plat1),
                        "" if not seg1 else NetworkSegment.get_by_id(seg1),
                        ";".join(mo_resolv[s1["mo"][0]][5] or []),
                        s1["iface_n"][0],
                        s1.get("iface_descr")[0]
                        if s1.get("iface_descr") else "",
                        s1.get("iface_speed")[0]
                        if s1.get("iface_speed") else 0,
                        mo_resolv[s2["mo"][0]][0],
                        mo_resolv[s2["mo"][0]][1],
                        mo_resolv[s2["mo"][0]][2],
                        "" if not plat2 else Platform.get_by_id(plat2),
                        "" if not seg2 else NetworkSegment.get_by_id(seg2),
                        ";".join(mo_resolv[s2["mo"][0]][5] or []),
                        s2["iface_n"][0],
                        s2.get("iface_descr")[0]
                        if s2.get("iface_descr") else "",
                        s2.get("iface_speed")[0]
                        if s2.get("iface_speed") else 0,
                        s2.get("dis_method", ""),
                        s2.get("last_seen", ""),
                    ]),
                    cmap,
                )
            ]
        filename = "links_detail_report_%s" % datetime.datetime.now().strftime(
            "%Y%m%d")
        if o_format == "csv":
            response = HttpResponse(content_type="text/csv")
            response[
                "Content-Disposition"] = 'attachment; filename="%s.csv"' % filename
            writer = csv.writer(response,
                                dialect="excel",
                                delimiter=",",
                                quoting=csv.QUOTE_MINIMAL)
            writer.writerows(r)
            return response
        elif o_format == "csv_zip":
            response = BytesIO()
            f = TextIOWrapper(TemporaryFile(mode="w+b"), encoding="utf-8")
            writer = csv.writer(f,
                                dialect="excel",
                                delimiter=";",
                                quotechar='"')
            writer.writerows(r)
            f.seek(0)
            with ZipFile(response, "w", compression=ZIP_DEFLATED) as zf:
                zf.writestr("%s.csv" % filename, f.read())
                zf.filename = "%s.csv.zip" % filename
            # response = HttpResponse(content_type="text/csv")
            response.seek(0)
            response = HttpResponse(response.getvalue(),
                                    content_type="application/zip")
            response[
                "Content-Disposition"] = 'attachment; filename="%s.csv.zip"' % filename
            return response
        elif o_format == "xlsx":
            response = BytesIO()
            wb = xlsxwriter.Workbook(response)
            cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1})
            ws = wb.add_worksheet("Objects")
            max_column_data_length = {}
            for rn, x in enumerate(r):
                for cn, c in enumerate(x):
                    if rn and (r[0][cn] not in max_column_data_length or
                               len(str(c)) > max_column_data_length[r[0][cn]]):
                        max_column_data_length[r[0][cn]] = len(str(c))
                    ws.write(rn, cn, c, cf1)
            ws.autofilter(0, 0, rn, cn)
            ws.freeze_panes(1, 0)
            for cn, c in enumerate(r[0]):
                # Set column width
                width = get_column_width(c)
                if enable_autowidth and width < max_column_data_length[c]:
                    width = max_column_data_length[c]
                ws.set_column(cn, cn, width=width)
            wb.close()
            response.seek(0)
            response = HttpResponse(response.getvalue(),
                                    content_type="application/vnd.ms-excel")
            # response = HttpResponse(
            #     content_type="application/x-ms-excel")
            response[
                "Content-Disposition"] = 'attachment; filename="%s.xlsx"' % filename
            response.close()
            return response
Esempio n. 19
0
File: views.py Progetto: nbashev/noc
    def api_report(
        self,
        request,
        from_date,
        to_date,
        o_format,
        administrative_domain=None,
        columns=None,
        source="both",
        alarm_class=None,
        enable_autowidth=False,
    ):
        def row(row):
            def qe(v):
                if v is None:
                    return ""
                if isinstance(v, str):
                    return smart_text(v)
                elif isinstance(v, datetime.datetime):
                    return v.strftime("%Y-%m-%d %H:%M:%S")
                elif not isinstance(v, str):
                    return smart_text(v)
                else:
                    return v

            return [qe(x) for x in row]

        def translate_row(row, cmap):
            return [row[i] for i in cmap]

        cols = [
            "id",
            "alarm_class",
            "alarm_from_ts",
            "alarm_to_ts",
            "alarm_tt",
            "object_name",
            "object_address",
            "object_admdomain",
            "log_timestamp",
            "log_source",
            "log_message",
            # "tt",
            # "escalation_ts",
        ]

        header_row = [
            "ID",
            _("ALARM_CLASS"),
            _("ALARM_FROM_TS"),
            _("ALARM_TO_TS"),
            _("ALARM_TT"),
            _("OBJECT_NAME"),
            _("OBJECT_ADDRESS"),
            _("OBJECT_ADMDOMAIN"),
            _("LOG_TIMESTAMP"),
            _("LOG_SOURCE"),
            _("LOG_MESSAGE"),
        ]

        if columns:
            cmap = []
            for c in columns.split(","):
                try:
                    cmap += [cols.index(c)]
                except ValueError:
                    continue
        else:
            cmap = list(range(len(cols)))

        r = [translate_row(header_row, cmap)]
        fd = datetime.datetime.strptime(
            to_date, "%d.%m.%Y") + datetime.timedelta(days=1)
        match = {
            "timestamp": {
                "$gte": datetime.datetime.strptime(from_date, "%d.%m.%Y"),
                "$lte": fd
            }
        }
        mos = ManagedObject.objects.filter()

        ads = []
        if administrative_domain:
            if administrative_domain.isdigit():
                administrative_domain = [int(administrative_domain)]
                ads = AdministrativeDomain.get_nested_ids(
                    administrative_domain[0])

        if not request.user.is_superuser:
            user_ads = UserAccess.get_domains(request.user)
            if administrative_domain and ads:
                if administrative_domain[0] not in user_ads:
                    ads = list(set(ads) & set(user_ads))
                    if not ads:
                        return HttpResponse(
                            "<html><body>Permission denied: Invalid Administrative Domain</html></body>"
                        )
            else:
                ads = user_ads
        if ads:
            mos = mos.filter(administrative_domain__in=ads)

        # Working if Administrative domain set
        if ads:
            try:
                match["adm_path"] = {"$in": ads}
                # @todo More 2 level hierarhy
            except bson.errors.InvalidId:
                pass

        addr_map = {
            mo[0]: (mo[1], mo[2])
            for mo in mos.values_list("id", "name", "address")
        }

        # Active Alarms
        coll = ActiveAlarm._get_collection()
        for aa in coll.aggregate([
            {
                "$match": match
            },
            {
                "$unwind": "$log"
            },
            {
                "$match": {
                    "log.source": {
                        "$exists": True,
                        "$ne": None
                    }
                }
            },
            {
                "$project": {
                    "timestamp": 1,
                    "managed_object": 1,
                    "alarm_class": 1,
                    "escalation_tt": 1,
                    "adm_path": 1,
                    "log": 1,
                }
            },
            {
                "$sort": {
                    "_id": 1,
                    "log.timestamp": 1
                }
            },
        ]):
            r += [
                translate_row(
                    row([
                        smart_text(aa["_id"]),
                        AlarmClass.get_by_id(aa["alarm_class"]).name,
                        aa["timestamp"],
                        "",
                        aa.get("escalation_tt", ""),
                        addr_map[aa["managed_object"]][0],
                        addr_map[aa["managed_object"]][1],
                        AdministrativeDomain.get_by_id(
                            aa["adm_path"][-1]).name,
                        aa["log"]["timestamp"],
                        aa["log"]["source"],
                        aa["log"]["message"],
                    ]),
                    cmap,
                )
            ]
        # Active Alarms
        coll = ArchivedAlarm._get_collection()
        for aa in coll.aggregate([
            {
                "$match": match
            },
            {
                "$unwind": "$log"
            },
            {
                "$match": {
                    "log.source": {
                        "$exists": True
                    }
                }
            },
            {
                "$project": {
                    "timestamp": 1,
                    "clear_timestamp": 1,
                    "managed_object": 1,
                    "alarm_class": 1,
                    "escalation_tt": 1,
                    "adm_path": 1,
                    "log": 1,
                }
            },
            {
                "$sort": {
                    "_id": 1,
                    "log.timestamp": 1
                }
            },
        ]):
            r += [
                translate_row(
                    row([
                        smart_text(aa["_id"]),
                        AlarmClass.get_by_id(aa["alarm_class"]).name,
                        aa["timestamp"],
                        aa["clear_timestamp"],
                        aa.get("escalation_tt", ""),
                        addr_map[aa["managed_object"]][0],
                        addr_map[aa["managed_object"]][1],
                        AdministrativeDomain.get_by_id(
                            aa["adm_path"][-1]).name,
                        aa["log"]["timestamp"],
                        aa["log"]["source"],
                        aa["log"]["message"],
                    ]),
                    cmap,
                )
            ]
        filename = "alarm_comments.csv"
        if o_format == "csv":
            response = HttpResponse(content_type="text/csv")
            response[
                "Content-Disposition"] = 'attachment; filename="%s"' % filename
            writer = csv.writer(response)
            writer.writerows(r)
            return response
        elif o_format == "csv_zip":
            response = BytesIO()
            f = TextIOWrapper(TemporaryFile(mode="w+b"), encoding="utf-8")
            writer = csv.writer(f,
                                dialect="excel",
                                delimiter=";",
                                quotechar='"')
            writer.writerow(columns)
            writer.writerows(r)
            f.seek(0)
            with ZipFile(response, "w", compression=ZIP_DEFLATED) as zf:
                zf.writestr(filename, f.read())
                zf.filename = "%s.zip" % filename
            # response = HttpResponse(content_type="text/csv")
            response.seek(0)
            response = HttpResponse(response.getvalue(),
                                    content_type="application/zip")
            response[
                "Content-Disposition"] = 'attachment; filename="%s.zip"' % filename
            return response
        elif o_format == "xlsx":
            response = BytesIO()
            wb = xlsxwriter.Workbook(response)
            cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1})
            ws = wb.add_worksheet("Alarms")
            max_column_data_length = {}
            for rn, x in enumerate(r):
                for cn, c in enumerate(x):
                    if rn and (r[0][cn] not in max_column_data_length or
                               len(str(c)) > max_column_data_length[r[0][cn]]):
                        max_column_data_length[r[0][cn]] = len(str(c))
                    ws.write(rn, cn, c, cf1)
            ws.autofilter(0, 0, rn, cn)
            ws.freeze_panes(1, 0)
            for cn, c in enumerate(r[0]):
                # Set column width
                width = get_column_width(c)
                if enable_autowidth and width < max_column_data_length[c]:
                    width = max_column_data_length[c]
                ws.set_column(cn, cn, width=width)
            wb.close()
            response.seek(0)
            response = HttpResponse(response.getvalue(),
                                    content_type="application/vnd.ms-excel")
            response[
                "Content-Disposition"] = 'attachment; filename="alarm_comments.xlsx"'
            response.close()
            return response
Esempio n. 20
0
    def get_data(self,
                 request,
                 duration,
                 from_date=None,
                 to_date=None,
                 **kwargs):
        now = datetime.datetime.now()
        if not from_date:
            duration = 1
        if int(duration):
            self.logger.info("Use duration\n")
            d = datetime.timedelta(seconds=int(duration))
            b = now - d
            q = Q(start__gte=b) | Q(stop__gte=b) | Q(stop__exists=False)
        else:
            b = datetime.datetime.strptime(from_date, "%d.%m.%Y")
            q = Q(start__gte=b) | Q(stop__gte=b) | Q(stop__exists=False)
            if to_date:
                if from_date == to_date:
                    t1 = datetime.datetime.strptime(
                        to_date, "%d.%m.%Y") + datetime.timedelta(1)
                else:
                    t1 = datetime.datetime.strptime(to_date, "%d.%m.%Y")
            else:
                t1 = now
            q &= Q(start__lte=t1) | Q(stop__lte=t1)
            d = datetime.timedelta(seconds=int((t1 - b).total_seconds()))
        outages = defaultdict(list)
        otime = defaultdict(int)
        for o in Outage.objects.filter(q):
            start = max(o.start, b)
            stop = o.stop if o.stop else now
            outages[o.object] += [o]
            otime[o.object] += (stop - start).total_seconds()
        td = d.total_seconds()
        if not request.user.is_superuser:
            for mo in ManagedObject.objects.exclude(
                    administrative_domain__in=UserAccess.get_domains(
                        request.user)):
                if mo.id in otime:
                    otime.pop(mo.id)
        # Load managed objects
        mos = list(otime)
        chunk = 500
        mo = {}
        while mos:
            for o in ManagedObject.objects.filter(id__in=mos[:chunk]):
                mo[o.id] = o
            mos = mos[chunk:]
        r = []
        for o in sorted(otime, key=lambda x: -otime[x]):
            m = mo.get(o)
            if not m:
                continue  # Hanging Outage
            dt = min(td, otime[o])
            downtime = "%02d:%02d:%02d" % ((dt // 3600) % 24,
                                           (dt // 60) % 60, dt % 60)
            if dt >= 86400:
                downtime = "%dd %s" % (dt // 86400, downtime)
            if td:
                avail = float(td - dt) * 100 / td
            else:
                avail = 0
            r += [(
                m.name,
                m.address,
                m.profile.name,
                m.platform.name if m.platform else "",
                _("Yes") if m.is_managed else _("No"),
                _("Yes") if m.get_status() else _("No"),
                downtime,
                avail,
                len(outages[o]),
            )]

        return self.from_dataset(
            title=self.title,
            columns=[
                _("Managed Object"),
                _("Address"),
                _("Profile"),
                _("Platform"),
                TableColumn(_("Managed"), align="right"),
                TableColumn(_("Status"), align="right"),
                TableColumn(_("Downtime"), align="right"),
                TableColumn(_("Availability"), align="right",
                            format="percent"),
                TableColumn(_("Downs"), align="right", format="integer"),
            ],
            data=r,
            enumerate=True,
        )
Esempio n. 21
0
    def get_ajax_data(self, **kwargs):
        def update_dict(d, s):
            for k in s:
                if k in d:
                    d[k] += s[k]
                else:
                    d[k] = s[k]

        zoom = int(self.handler.get_argument("z"))
        west = float(self.handler.get_argument("w"))
        east = float(self.handler.get_argument("e"))
        north = float(self.handler.get_argument("n"))
        south = float(self.handler.get_argument("s"))
        ms = int(self.handler.get_argument("maintenance"))
        active_layers = [
            l_r for l_r in self.get_pop_layers()
            if l_r.min_zoom <= zoom <= l_r.max_zoom
        ]
        alarms = []
        res = {}
        services = {}
        subscribers = {}
        t_data = defaultdict(list)
        mos = ManagedObject.objects.filter(is_managed=True).values(
            "id", "name", "x", "y")
        if not self.current_user.is_superuser:
            mos = mos.filter(administrative_domain__in=UserAccess.get_domains(
                self.current_user))
        for mo in mos:
            res[mo.pop("id")] = mo
        mos_id = list(res.keys())
        if ms == 0:
            mos_id = list(
                set(list(res.keys())) - set(Maintenance.currently_affected()))
        for a in ActiveAlarm._get_collection().find(
            {"managed_object": {
                "$in": mos_id,
                "$exists": True
            }},
            {
                "_id": 1,
                "managed_object": 1,
                "direct_subscribers": 1,
                "direct_services": 1
            },
        ):
            s_sub, s_service = {}, {}
            if a.get("direct_subscribers"):
                s_sub = {
                    dsub["profile"]: dsub["summary"]
                    for dsub in a["direct_subscribers"]
                }
            if a.get("direct_services"):
                s_service = {
                    dserv["profile"]: dserv["summary"]
                    for dserv in a["direct_services"]
                }
            mo = res.get(a["managed_object"])
            if not mo:
                continue
            if mo["x"] and mo["y"]:
                w = ServiceSummary.get_weight({
                    "subscriber": s_sub,
                    "service": s_service
                })
                # @todo: Should we add the object's weight to summary?
                # @todo: Check west/south hemisphere
                if active_layers and west <= mo["x"] <= east and south <= mo[
                        "y"] <= north:
                    t_data[mo["x"], mo["y"]] += [(mo, w)]
            else:
                w = 0
            alarms += [{
                "alarm_id": str(a.get("_id")),
                "managed_object": mo["name"],
                "x": mo["x"],
                "y": mo["y"],
                "w": max(w, 1),
            }]
            if s_service:
                update_dict(services, s_service)
            if s_sub:
                update_dict(subscribers, s_sub)
        links = None
        o_seen = set()
        points = None
        o_data = {}
        if t_data and active_layers:
            # Create lines
            bbox = get_bbox(west, east, north, south)
            lines = []
            for d in ObjectConnection._get_collection().find(
                {
                    "type": "pop_link",
                    "layer": {
                        "$in": [a_l.id for a_l in active_layers]
                    },
                    "line": {
                        "$geoIntersects": {
                            "$geometry": bbox
                        }
                    },
                },
                {
                    "_id": 0,
                    "connection": 1,
                    "line": 1
                },
            ):
                for c in d["line"]["coordinates"]:
                    if tuple(c) in t_data:
                        for c in d["line"]["coordinates"]:
                            tc = tuple(c)
                            o_data[tc] = t_data.get(tc, [])
                        o_seen.add(tuple(c))
                        lines += [d["line"]]
                        break
            if lines:
                links = geojson.FeatureCollection(features=lines)
            # Create points
            points = []
            for x, y in o_data:
                data = {}
                for mo, w in o_data[x, y]:
                    if mo not in data:
                        data[mo] = w
                data = sorted(data, key=lambda z: data[z],
                              reverse=True)[:self.TOOLTIP_LIMIT]
                points += [
                    geojson.Feature(
                        geometry=geojson.Point(coordinates=[x, y]),
                        properties={
                            "alarms":
                            len(t_data[x, y]),
                            "objects": [{
                                "id": mo.id,
                                "name": mo.name,
                                "address": mo.address
                            } for mo in mos],
                        },
                    )
                ]
            points = geojson.FeatureCollection(features=points)
        return {
            "alarms":
            alarms,
            "summary":
            self.f_glyph_summary({
                "service": services,
                "subscriber": subscribers
            }),
            "links":
            links,
            "pops":
            points,
        }
Esempio n. 22
0
    def get_data(self,
                 request,
                 interval=1,
                 from_date=None,
                 to_date=None,
                 skip_avail=False,
                 skip_zero_avail=False,
                 filter_zero_access=False,
                 **kwargs):
        """
        a1 = self.get_availability(1)
        a7 = self.get_availability(7)
        a30 = self.get_availability(30)
        """

        if not from_date:
            from_date = datetime.datetime.now() - datetime.timedelta(
                days=interval)
        else:
            from_date = datetime.datetime.strptime(from_date, "%d.%m.%Y")

        if not to_date or from_date == to_date:
            to_date = from_date + datetime.timedelta(days=1)
        else:
            to_date = datetime.datetime.strptime(
                to_date, "%d.%m.%Y") + datetime.timedelta(days=1)

        a = self.get_availability(start_date=from_date,
                                  stop_date=to_date,
                                  skip_zero_avail=skip_zero_avail)
        rb = self.get_reboots(start_date=from_date, stop_date=to_date)
        r = [SectionRow("Report from %s to %s" % (from_date, to_date))]
        mos = ManagedObject.objects.filter(is_managed=True)

        if not request.user.is_superuser:
            mos = mos.filter(
                administrative_domain__in=UserAccess.get_domains(request.user))
        if skip_avail:
            mos = mos.filter(id__in=list(a))
        mos_id = list(mos.order_by("id").values_list("id", flat=True))
        if filter_zero_access:
            iface_p = InterfaceProfile.objects.get(name="Клиентский порт")
            match = {"profile": iface_p.id, "managed_object": {"$in": mos_id}}
            pipeline = [
                {
                    "$match": match
                },
                {
                    "$group": {
                        "_id": "$managed_object",
                        "count": {
                            "$sum": 1
                        },
                        "m": {
                            "$max": "$oper_status"
                        },
                    }
                },
                {
                    "$match": {
                        "m": False
                    }
                },
                {
                    "$project": {
                        "_id": True
                    }
                },
            ]
            # data = Interface.objects._get_collection().aggregate(pipeline,
            data = (get_db()["noc.interfaces"].with_options(
                read_preference=ReadPreference.SECONDARY_PREFERRED).aggregate(
                    pipeline))
            data = [d["_id"] for d in data]
            mos = mos.exclude(id__in=data)

        mo_hostname = ReportObjectsHostname1(sync_ids=mos_id)
        mo_hostname = mo_hostname.get_dictionary()
        for o in mos:
            s = [
                o.administrative_domain.name,
                o.name,
                mo_hostname.get(o.id, ""),
                o.address,
                o.profile.name,
                round(a.get(o.id, (100.0, 0, 0))[0], 2),
            ]
            s.extend(a.get(o.id, (100.0, 0, 0))[1:])
            s.append(rb[o.id] if o.id in rb else 0)
            r += [s]
            """
            a1.get(o.id, 100),
            a7.get(o.id, 100),
            a30.get(o.id, 100)
            """
        # print r
        return self.from_dataset(
            title=self.title,
            columns=[
                _("Adm. Domain"),
                _("Managed Object"),
                _("Hostname"),
                _("Address"),
                _("Profile"),
                # TableColumn(_("Avail"), align="right", format="percent"),
                # TableColumn(_("Total avail (sec)"), align="right", format="numeric"),
                _("Avail"),
                _("Total unavail (sec)"),
                _("Count outages"),
                _("Reboots"),
            ],
            data=r,
            enumerate=True,
        )
Esempio n. 23
0
 def post(self, *args, **kwargs):
     """
     Request is the list of
     {
         id: <managed object id>,
         script: <script name>,
         args: <arguments>
     }
     :param args:
     :param kwargs:
     :return:
     """
     metrics["mrt_requests"] += 1
     # Parse request
     req = ujson.loads(self.request.body)
     # Disable nginx proxy buffering
     self.set_header("X-Accel-Buffering", "no")
     # Object ids
     ids = set(int(d["id"]) for d in req if "id" in d and "script" in d)
     logger.info(
         "Run task on parralels: %d (Max concurrent %d), for User: %s",
         len(req),
         self.CONCURRENCY,
         self.current_user,
     )
     # Check access
     qs = ManagedObject.objects.filter(id__in=list(ids))
     if not self.current_user.is_superuser:
         adm_domains = UserAccess.get_domains(self.current_user)
         qs = qs.filter(administrative_domain__in=adm_domains)
     ids = dict(qs.values_list("id", "bi_id"))
     with Span(
             sample=int(config.mrt.enable_command_logging),
             server="MRT",
             service="post",
             client=self.current_user,
             in_label=req,
     ) as span:
         if self.service.use_telemetry:
             logger.info("[%s] Enable telemetry for task, user: %s",
                         span.span_id, self.current_user)
         futures = []
         for d in req:
             if "id" not in d or "script" not in d:
                 continue
             oid = int(d["id"])
             if oid not in ids:
                 yield self.write_chunk({
                     "id": str(d["id"]),
                     "error": "Access denied"
                 })
                 metrics["mrt_access_denied"] += 1
             if len(futures) >= config.mrt.max_concurrency:
                 wi = tornado.gen.WaitIterator(*futures)
                 r = yield next(wi)
                 del futures[wi.current_index]
                 yield self.write_chunk(r)
             futures += [
                 self.run_script(oid,
                                 d["script"],
                                 d.get("args"),
                                 span_id=span.span_id,
                                 bi_id=ids.get(oid))
             ]
         # Wait for rest
         wi = tornado.gen.WaitIterator(*futures)
         while not wi.done():
             r = yield next(wi)
             yield self.write_chunk(r)
     logger.info("Done")
Esempio n. 24
0
    def get_data(self,
                 request,
                 pool=None,
                 obj_profile=None,
                 filter_ignore_iface=True,
                 **kwargs):

        rn = re.compile(
            r"'remote_chassis_id': u'(?P<rem_ch_id>\S+)'.+'remote_system_name': u'(?P<rem_s_name>\S+)'",
            re.IGNORECASE,
        )
        problem = {
            "Not found iface on remote": "->",
            "Not found local iface on remote": "<-",
            "Remote object is not found": "X",
        }
        data = []
        # MAC, hostname, count
        not_found = defaultdict(int)
        # Name, IP, count
        local_on_remote = defaultdict(int)
        # Get all managed objects
        mos = ManagedObject.objects.filter(is_managed=True, pool=pool)

        if not request.user.is_superuser:
            mos = mos.filter(
                administrative_domain__in=UserAccess.get_domains(request.user))
        if obj_profile:
            mos = mos.filter(object_profile=obj_profile)
        mos_id = dict((mo.id, mo) for mo in mos)
        report = ReportPendingLinks(
            list(six.iterkeys(mos_id)),
            ignore_profiles=list(
                InterfaceProfile.objects.filter(discovery_policy="I")),
        )
        problems = report.out
        for mo_id in problems:
            mo = mos_id.get(mo_id, ManagedObject.get_by_id(mo_id))
            for iface in problems[mo_id]:
                data += [(
                    mo.name,
                    mo.address,
                    mo.profile.name,
                    mo.administrative_domain.name,
                    iface,
                    problem[problems[mo_id][iface]["problem"]],
                    problems[mo_id][iface]["remote_id"],
                )]
                if problems[mo_id][iface][
                        "problem"] == "Remote object is not found":
                    match = rn.findall(problems[mo_id][iface]["remote_id"])
                    if match:
                        not_found[match[0]] += 1
                elif problems[mo_id][iface][
                        "problem"] == "Not found iface on remote":
                    local_on_remote[(mo.name, mo.address)] += 1
        data += [SectionRow(name="Summary information on u_object")]
        for c in not_found:
            if not_found[c] > 4:
                data += [c]
        data += [SectionRow(name="Summary information on agg")]
        for c in local_on_remote:
            if local_on_remote[c] > 4:
                data += [c]
        return self.from_dataset(
            title=self.title,
            columns=[
                _("Managed Object"),
                _("Address"),
                _("Profile"),
                _("Administrative domain"),
                _("Interface"),
                _("Direction"),
                _("Remote Object")
                # _("Discovery"), _("Error")
            ],
            data=data,
        )
Esempio n. 25
0
    def get_data(self, request, pool, obj_profile=None, **kwargs):
        problems = {}  # id -> problem

        if not obj_profile:
            # Get all managed objects
            mos = dict(
                (mo.id, mo)
                for mo in ManagedObject.objects.filter(is_managed=True, pool=pool)
            )
            if not request.user.is_superuser:
                mos = dict(
                    (mo.id, mo)
                    for mo in ManagedObject.objects.filter(is_managed=True, pool=pool,
                                                           administrative_domain__in=UserAccess.get_domains(request.user))
                )
        else:
            # Get all managed objects
            mos = dict(
                (mo.id, mo)
                for mo in ManagedObject.objects.filter(is_managed=True, pool=pool, object_profile=obj_profile)
            )
            if not request.user.is_superuser:
                mos = dict(
                    (mo.id, mo)
                    for mo in ManagedObject.objects.filter(is_managed=True, pool=pool, object_profile=obj_profile,
                                                           administrative_domain__in=UserAccess.get_domains(request.user))
                )

        mos_set = set(mos)
        # Get all managed objects with generic profile
        for mo in mos:
            if mos[mo].profile.name == GENERIC_PROFILE:
                problems[mo] = _("Profile check failed")
        # Get all managed objects without interfaces
        if_mo = dict(
            (x["_id"], x.get("managed_object"))
            for x in Interface._get_collection().find(
                {},
                {"_id": 1, "managed_object": 1}
            )
        )
        for mo in (mos_set - set(problems) - set(if_mo.itervalues())):
            problems[mo] = _("No interfaces")
        # Get all managed objects without links
        linked_mos = set()
        for d in Link._get_collection().find({}):
            for i in d["interfaces"]:
                linked_mos.add(if_mo.get(i))
        for mo in (mos_set - set(problems) - linked_mos):
            problems[mo] = _("No links")
        # Get all managed objects without uplinks
        uplinks = {}
        for d in ObjectData._get_collection().find():
            nu = len(d.get("uplinks", []))
            if nu:
                uplinks[d["_id"]] = nu
        for mo in (mos_set - set(problems) - set(uplinks)):
            problems[mo] = _("No uplinks")
        #
        data = []
        for mo_id in problems:
            if mo_id not in mos:
                continue
            mo = mos[mo_id]
            data += [[
                mo.name,
                mo.address,
                mo.profile.name,
                mo.platform.name if mo.platform else "",
                mo.segment.name if mo.segment else "",
                problems[mo_id]
            ]]
        data = sorted(data)
        return self.from_dataset(
            title=self.title,
            columns=[
                "Name",
                "Address",
                "Profile",
                "Platform",
                "Segment",
                "Problem"
            ],
            data=data,
            enumerate=True
        )
Esempio n. 26
0
File: views.py Progetto: nbashev/noc
    def api_report(
        self,
        request,
        reporttype=None,
        from_date=None,
        to_date=None,
        object_profile=None,
        filter_default=None,
        exclude_zero=True,
        interface_profile=None,
        selector=None,
        administrative_domain=None,
        columns=None,
        description=None,
        o_format=None,
        enable_autowidth=False,
        **kwargs,
    ):
        def load(mo_ids):
            # match = {"links.mo": {"$in": mo_ids}}
            match = {"int.managed_object": {"$in": mo_ids}}
            group = {
                "_id": "$_id",
                "links": {
                    "$push": {
                        "iface_n": "$int.name",
                        # "iface_id": "$int._id",
                        # "iface_descr": "$int.description",
                        # "iface_speed": "$int.in_speed",
                        # "dis_method": "$discovery_method",
                        # "last_seen": "$last_seen",
                        "mo": "$int.managed_object",
                        "linked_obj": "$linked_objects",
                    }
                },
            }
            value = (get_db()["noc.links"].with_options(
                read_preference=ReadPreference.SECONDARY_PREFERRED).aggregate(
                    [
                        {
                            "$unwind": "$interfaces"
                        },
                        {
                            "$lookup": {
                                "from": "noc.interfaces",
                                "localField": "interfaces",
                                "foreignField": "_id",
                                "as": "int",
                            }
                        },
                        {
                            "$match": match
                        },
                        {
                            "$group": group
                        },
                    ],
                    allowDiskUse=True,
                ))

            res = defaultdict(dict)

            for v in value:
                if v["_id"]:
                    for vv in v["links"]:
                        if len(vv["linked_obj"]) == 2:
                            mo = vv["mo"][0]
                            iface = vv["iface_n"]
                            for i in vv["linked_obj"]:
                                if mo != i:
                                    res[mo][i] = iface[0]
            return res

        def translate_row(row, cmap):
            return [row[i] for i in cmap]

        def str_to_float(str):
            return float("{0:.3f}".format(float(str)))

        cols = [
            "object_id",
            "object_name",
            "object_address",
            "object_platform",
            "object_adm_domain",
            "object_segment",
            "object_container",
            # "object_hostname",
            # "object_status",
            # "profile_name",
            # "object_profile",
            # "object_vendor",
            "iface_name",
            "iface_description",
            "iface_speed",
            "max_load_in",
            "max_load_in_time",
            "max_load_out",
            "max_load_out_time",
            "avg_load_in",
            "avg_load_out",
            "total_in",
            "total_out",
            "uplink_iface_name",
            "uplink_iface_description",
            "uplink_iface_speed",
            "uplink_max_load_in",
            "uplink_max_load_in_time",
            "uplink_max_load_out",
            "uplink_max_load_out_time",
            "uplink_avg_load_in",
            "uplink_avg_load_out",
            "uplink_total_in",
            "uplink_total_out",
        ]

        header_row = [
            "ID",
            _("OBJECT_NAME"),
            _("OBJECT_ADDRESS"),
            _("OBJECT_PLATFORM"),
            _("OBJECT_ADMDOMAIN"),
            _("OBJECT_SEGMENT"),
            _("CONTAINER_ADDRESS"),
            _("IFACE_NAME"),
            _("IFACE_DESCRIPTION"),
            _("IFACE_SPEED"),
            _("MAX_LOAD_IN, Mbps"),
            _("MAX_LOAD_IN_TIME"),
            _("MAX_LOAD_OUT, Mbps"),
            _("MAX_LOAD_OUT_TIME"),
            _("AVG_LOAD_IN, Mbps"),
            _("AVG_LOAD_OUT, Mbps"),
            _("TOTAL_IN, Mbyte"),
            _("TOTAL_OUT, Mbyte"),
            _("UPLINK_IFACE_NAME"),
            _("UPLINK_IFACE_DESCRIPTION"),
            _("UPLINK_IFACE_SPEED"),
            _("UPLINK_MAX_LOAD_IN, Mbps"),
            _("UPLINK_MAX_TIME_IN"),
            _("UPLINK_MAX_LOAD_OUT, Mbps"),
            _("UPLINK_MAX_TIME_OUT"),
            _("UPLINK_AVG_LOAD_IN, Mbps"),
            _("UPLINK_AVG_LOAD_OUT, Mbps"),
            _("UPLINK_TOTAL_IN, Mbyte"),
            _("UPLINK_TOTAL_OUT, Mbyte"),
        ]

        if columns:
            cmap = []
            for c in columns.split(","):
                try:
                    cmap += [cols.index(c)]
                except ValueError:
                    continue
        else:
            cmap = list(range(len(cols)))
        columns_order = columns.split(",")
        columns_filter = set(columns_order)
        r = [translate_row(header_row, cmap)]

        # Date Time Block
        if not from_date:
            from_date = datetime.datetime.now() - datetime.timedelta(days=1)
        else:
            from_date = datetime.datetime.strptime(from_date, "%d.%m.%Y")
        if not to_date or from_date == to_date:
            to_date = from_date + datetime.timedelta(days=1)
        else:
            to_date = datetime.datetime.strptime(
                to_date, "%d.%m.%Y") + datetime.timedelta(days=1)
        diff = to_date - from_date

        # Load managed objects
        mos = ManagedObject.objects.filter(is_managed=True)
        if not request.user.is_superuser:
            mos = mos.filter(
                administrative_domain__in=UserAccess.get_domains(request.user))
        if selector:
            mos = mos.filter(
                ManagedObjectSelector.objects.get(id=int(selector)).Q)
        if administrative_domain:
            mos = mos.filter(
                administrative_domain__in=AdministrativeDomain.get_nested_ids(
                    int(administrative_domain)))
        if object_profile:
            mos = mos.filter(object_profile=object_profile)
        if interface_profile:
            interface_profile = InterfaceProfile.objects.filter(
                id=interface_profile).first()

        mo_attrs = namedtuple("MOATTRs",
                              [c for c in cols if c.startswith("object")])

        containers_address = {}
        if "object_container" in columns_filter:
            containers_address = ReportContainerData(
                set(mos.values_list("id", flat=True)))
            containers_address = dict(list(containers_address.extract()))

        moss = {}
        for row in mos.values_list("bi_id", "name", "address", "platform",
                                   "administrative_domain__name", "segment",
                                   "id"):
            moss[row[0]] = mo_attrs(*[
                row[6],
                row[1],
                row[2],
                smart_text(Platform.get_by_id(row[3]) if row[3] else ""),
                row[4],
                smart_text(NetworkSegment.get_by_id(row[5])) if row[5] else "",
                containers_address.
                get(row[6], "") if containers_address and row[6] else "",
            ])

        report_metric = ReportInterfaceMetrics(tuple(sorted(moss)),
                                               from_date,
                                               to_date,
                                               columns=None)
        report_metric.SELECT_QUERY_MAP = {
            (0, "managed_object", "id"): "managed_object",
            (1, "path", "iface_name"): "arrayStringConcat(path)",
            (
                2,
                "",
                "iface_description",
            ):
            "dictGetString('interfaceattributes','description' , (managed_object, arrayStringConcat(path)))",
            (
                3,
                "",
                "profile",
            ):
            "dictGetString('interfaceattributes', 'profile', (managed_object, arrayStringConcat(path)))",
            (
                4,
                "speed",
                "iface_speed",
            ):
            "dictGetUInt64('interfaceattributes', 'in_speed', (managed_object, arrayStringConcat(path)))",
            (5, "load_in_max", "load_in_max"): "divide(max(load_in),1048576)",
            (6, "load_out_max", "load_out_max"):
            "divide(max(load_out),1048576)",
            (7, "max_load_in_time", "max_load_in_time"): "argMax(ts,load_in)",
            (8, "max_load_out_time", "max_load_out_time"):
            "argMax(ts,load_out)",
            (9, "avg_load_in", "avg_load_in"): "divide(avg(load_in),1048576)",
            (10, "avg_load_out", "avg_load_out"):
            "divide(avg(load_out),1048576)",
        }
        ifaces_metrics = defaultdict(dict)

        for row in report_metric.do_query():
            avg_in = str_to_float(row[9])
            avg_out = str_to_float(row[10])
            total_in = avg_in * diff.total_seconds() / 8
            total_out = avg_out * diff.total_seconds() / 8
            ifaces_metrics[row[0]][row[1]] = {
                "description": row[2],
                "profile": row[3],
                "bandwidth": row[4],
                "max_load_in": str_to_float(row[5]),
                "max_load_out": str_to_float(row[6]),
                "max_load_in_time": row[7],
                "max_load_out_time": row[8],
                "avg_load_in": avg_in,
                "avg_load_out": avg_out,
                "total_in": float("{0:.1f}".format(total_in)),
                "total_out": float("{0:.1f}".format(total_out)),
            }

        # find uplinks
        links = {}
        if cmap[-1] > 17:
            mos_id = list(mos.values_list("id", flat=True))
            uplinks = {obj: [] for obj in mos_id}
            for d in ObjectData._get_collection().find(
                {"_id": {
                    "$in": mos_id
                }}, {
                    "_id": 1,
                    "uplinks": 1
                }):
                uplinks[d["_id"]] = d.get("uplinks", [])
            rld = load(mos_id)

            for mo in uplinks:
                for uplink in uplinks[mo]:
                    if rld[mo]:
                        if mo in links:
                            links[mo] += [rld[mo][uplink]]
                        else:
                            links[mo] = [rld[mo][uplink]]

        for mo_bi in ifaces_metrics:
            mo_id = moss[int(mo_bi)]
            mo_ids = getattr(mo_id, "object_id")

            for i in ifaces_metrics[mo_bi]:
                if not exclude_zero:
                    if (ifaces_metrics[mo_bi][i]["max_load_in"] == 0
                            and ifaces_metrics[mo_bi][i]["max_load_out"] == 0):
                        continue
                if description:
                    if description not in ifaces_metrics[mo_bi][i][
                            "description"]:
                        continue
                if interface_profile:
                    if interface_profile.name not in ifaces_metrics[mo_bi][i][
                            "profile"]:
                        continue

                row2 = [
                    mo_ids,
                    getattr(mo_id, "object_name"),
                    getattr(mo_id, "object_address"),
                    getattr(mo_id, "object_platform"),
                    getattr(mo_id, "object_adm_domain"),
                    getattr(mo_id, "object_segment"),
                    getattr(mo_id, "object_container"),
                    i,
                    ifaces_metrics[mo_bi][i]["description"],
                    ifaces_metrics[mo_bi][i]["bandwidth"],
                    ifaces_metrics[mo_bi][i]["max_load_in"],
                    ifaces_metrics[mo_bi][i]["max_load_in_time"],
                    ifaces_metrics[mo_bi][i]["max_load_out"],
                    ifaces_metrics[mo_bi][i]["max_load_out_time"],
                    ifaces_metrics[mo_bi][i]["avg_load_in"],
                    ifaces_metrics[mo_bi][i]["avg_load_out"],
                    ifaces_metrics[mo_bi][i]["total_in"],
                    ifaces_metrics[mo_bi][i]["total_out"],
                    "",
                    "",
                    "",
                    "",
                    "",
                    "",
                    "",
                    "",
                    "",
                    "",
                    "",
                ]

                ss = True
                if mo_ids in links:
                    for ifname_uplink in links[mo_ids]:
                        if ifname_uplink in ifaces_metrics[mo_bi]:
                            row2[18] = ifname_uplink
                            row2[19] = ifaces_metrics[mo_bi][ifname_uplink][
                                "description"]
                            row2[20] = ifaces_metrics[mo_bi][ifname_uplink][
                                "bandwidth"]
                            row2[21] = ifaces_metrics[mo_bi][ifname_uplink][
                                "max_load_in"]
                            row2[22] = ifaces_metrics[mo_bi][ifname_uplink][
                                "max_load_in_time"]
                            row2[23] = ifaces_metrics[mo_bi][ifname_uplink][
                                "max_load_out"]
                            row2[24] = ifaces_metrics[mo_bi][ifname_uplink][
                                "max_load_out_time"]
                            row2[25] = ifaces_metrics[mo_bi][ifname_uplink][
                                "avg_load_in"]
                            row2[26] = ifaces_metrics[mo_bi][ifname_uplink][
                                "avg_load_out"]
                            row2[27] = ifaces_metrics[mo_bi][ifname_uplink][
                                "total_in"]
                            row2[28] = ifaces_metrics[mo_bi][ifname_uplink][
                                "total_out"]
                            r += [translate_row(row2, cmap)]
                            ss = False
                if ss:
                    r += [translate_row(row2, cmap)]

        filename = "metrics_detail_report_%s" % datetime.datetime.now(
        ).strftime("%Y%m%d")
        if o_format == "csv":
            response = HttpResponse(content_type="text/csv")
            response[
                "Content-Disposition"] = 'attachment; filename="%s.csv"' % filename
            writer = csv.writer(response,
                                dialect="excel",
                                delimiter=",",
                                quoting=csv.QUOTE_MINIMAL)
            writer.writerows(r)
            return response
        elif o_format == "csv_zip":
            response = BytesIO()
            f = TextIOWrapper(TemporaryFile(mode="w+b"), encoding="utf-8")
            writer = csv.writer(f,
                                dialect="excel",
                                delimiter=";",
                                quotechar='"')
            writer.writerows(r)
            f.seek(0)
            with ZipFile(response, "w", compression=ZIP_DEFLATED) as zf:
                zf.writestr("%s.csv" % filename, f.read())
                zf.filename = "%s.csv.zip" % filename
            # response = HttpResponse(content_type="text/csv")
            response.seek(0)
            response = HttpResponse(response.getvalue(),
                                    content_type="application/zip")
            response[
                "Content-Disposition"] = 'attachment; filename="%s.csv.zip"' % filename
            return response
        elif o_format == "xlsx":
            response = BytesIO()
            wb = xlsxwriter.Workbook(response)
            cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1})
            ws = wb.add_worksheet("Metrics")
            max_column_data_length = {}
            for rn, x in enumerate(r):
                for cn, c in enumerate(x):
                    if rn and (r[0][cn] not in max_column_data_length or
                               len(str(c)) > max_column_data_length[r[0][cn]]):
                        max_column_data_length[r[0][cn]] = len(str(c))
                    ws.write(rn, cn, c, cf1)
            ws.autofilter(0, 0, rn, cn)
            ws.freeze_panes(1, 0)
            for cn, c in enumerate(r[0]):
                # Set column width
                width = get_column_width(c)
                if enable_autowidth and width < max_column_data_length[c]:
                    width = max_column_data_length[c]
                ws.set_column(cn, cn, width=width)
            wb.close()
            response.seek(0)
            response = HttpResponse(response.getvalue(),
                                    content_type="application/vnd.ms-excel")
            response[
                "Content-Disposition"] = 'attachment; filename="%s.xlsx"' % filename
            response.close()
            return response
Esempio n. 27
0
    def get_data(self,
                 request,
                 interval,
                 from_date=None,
                 to_date=None,
                 **kwargs):
        interval = int(interval)
        if not from_date:
            interval = 1
        if interval:
            ts = datetime.datetime.now() - datetime.timedelta(days=interval)
            match = {"ts": {"$gte": ts}}
        else:
            t0 = datetime.datetime.strptime(from_date, "%d.%m.%Y")
            if not to_date:
                t1 = datetime.datetime.now()
            else:
                t1 = datetime.datetime.strptime(
                    to_date, "%d.%m.%Y") + datetime.timedelta(days=1)
            match = {"ts": {"$gte": t0, "$lte": t1}}
        pipeline = [
            {
                "$match": match
            },
            {
                "$group": {
                    "_id": "$object",
                    "count": {
                        "$sum": 1
                    }
                }
            },
            {
                "$sort": {
                    "count": -1
                }
            },
        ]
        data = list(Reboot._get_collection().aggregate(pipeline))
        # Get managed objects
        if not request.user.is_superuser:
            id_perm = [
                mo.id for mo in ManagedObject.objects.filter(
                    administrative_domain__in=UserAccess.get_domains(
                        request.user))
            ]
            ids = [x["_id"] for x in data if x["_id"] in id_perm]
        else:
            ids = [x["_id"] for x in data]
        mo_names = {}  # mo id -> mo name
        cursor = connection.cursor()
        while ids:
            chunk = [str(x) for x in ids[:500]]
            ids = ids[500:]
            cursor.execute("""
                SELECT id, address, name
                FROM sa_managedobject
                WHERE id IN (%s)""" % ", ".join(chunk))
            mo_names.update(dict((c[0], c[1:3]) for c in cursor))
        #
        if not request.user.is_superuser:
            data = [(mo_names.get(x["_id"], "---")[1],
                     mo_names.get(x["_id"], "---")[0], x["count"])
                    for x in data if x["_id"] in id_perm]
        else:
            data = [(mo_names.get(x["_id"], "---")[1],
                     mo_names.get(x["_id"], "---")[0], x["count"])
                    for x in data]

        return self.from_dataset(
            title=self.title,
            columns=[
                _("Managed Object"),
                _("Address"),
                TableColumn(_("Reboots"),
                            align="right",
                            format="numeric",
                            total="sum"),
            ],
            data=data,
            enumerate=True,
        )
Esempio n. 28
0
File: views.py Progetto: 0pt1on/noc
    def api_report(
        self,
        request,
        o_format,
        administrative_domain=None,
        selector=None,
        interface_profile=None,
        zero=None,
        def_profile=None,
        columns=None,
        enable_autowidth=False,
    ):
        def humanize_speed(speed):
            if not speed:
                return "-"
            for t, n in [(1000000, "G"), (1000, "M"), (1, "k")]:
                if speed >= t:
                    if speed // t * t == speed:
                        return "%d%s" % (speed // t, n)
                    else:
                        return "%.2f%s" % (float(speed) / t, n)
            return str(speed)

        def row(row):
            def qe(v):
                if v is None:
                    return ""
                if isinstance(v, unicode):
                    return v.encode("utf-8")
                elif isinstance(v, datetime.datetime):
                    return v.strftime("%Y-%m-%d %H:%M:%S")
                elif not isinstance(v, str):
                    return str(v)
                else:
                    return v

            return [qe(x) for x in row]

        def translate_row(row, cmap):
            return [row[i] for i in cmap]

        cols = [
            "object_name",
            "object_address",
            "object_model",
            "object_software",
            "object_port_name",
            "object_port_profile_name",
            "object_port_status",
            "object_link_status",
            "object_port_speed",
            "object_port_duplex",
            "object_port_untagged_vlan",
            "object_port_tagged_vlans",
        ]

        header_row = [
            "MANAGED_OBJECT",
            "OBJECT_ADDRESS",
            "OBJECT_MODEL",
            "OBJECT_SOFTWARE",
            "PORT_NAME",
            "PORT_PROFILE_NAME",
            "PORT_STATUS",
            "LINK_STATUS",
            "PORT_SPEED",
            "PORT_DUPLEX",
            "PORT_UNTAGGED_VLAN",
            "PORT_TAGGED_VLANS",
        ]

        if columns:
            cmap = []
            for c in columns.split(","):
                try:
                    cmap += [cols.index(c)]
                except ValueError:
                    continue
        else:
            cmap = list(range(len(cols)))

        r = [translate_row(header_row, cmap)]
        mo = {}
        if_p = {}
        DUPLEX = {True: "Full", False: "Half"}

        for ifp in InterfaceProfile.objects.filter():
            if_p[ifp.id] = {"name": ifp.name}
        mos = ManagedObject.objects.filter(is_managed=True)
        if (request.user.is_superuser and not administrative_domain
                and not selector and not interface_profile):
            mos = ManagedObject.objects.filter(is_managed=True)
        if not request.user.is_superuser:
            mos = mos.filter(
                administrative_domain__in=UserAccess.get_domains(request.user))
        if administrative_domain:
            ads = AdministrativeDomain.get_nested_ids(
                int(administrative_domain))
            mos = mos.filter(administrative_domain__in=ads)
        if selector:
            selector = ManagedObjectSelector.get_by_id(int(selector))
            mos = mos.filter(selector.Q)

        for o in mos:
            mo[o.id] = {
                "type": "managedobject",
                "id": str(o.id),
                "name": o.name,
                "status": o.is_managed,
                "address": o.address,
                "vendor": o.vendor,
                "version": o.version,
                "platform": o.platform,
            }

        mos_id = list(mos.values_list("id", flat=True))

        rld = ReportInterfaceStatus(mos_id, zero, def_profile,
                                    interface_profile)

        for i in rld.out:
            untag, tagged = "", ""
            if i["subs"]:
                untag = i["subs"][0].get("untagged_vlan", "")
                tagged = list_to_ranges(i["subs"][0].get("tagged_vlans", []))
            r += [
                translate_row(
                    row([
                        mo[i["managed_object"]]["name"],
                        mo[i["managed_object"]]["address"],
                        "%s %s" % (
                            str(mo[i["managed_object"]]["vendor"]),
                            str(mo[i["managed_object"]]["platform"]),
                        ),
                        str(mo[i["managed_object"]]["version"]),
                        i["name"],
                        if_p[i["profile"]]["name"],
                        "UP" if i["admin_status"] is True else "Down",
                        "UP" if "oper_status" in i and i["oper_status"] is True
                        else "Down",
                        humanize_speed(i["in_speed"])
                        if "in_speed" in i else "-",
                        DUPLEX.get(i["full_duplex"])
                        if "full_duplex" in i and "in_speed" in i else "-",
                        untag,
                        tagged,
                    ]),
                    cmap,
                )
            ]

        filename = "interface_status_report_%s" % datetime.datetime.now(
        ).strftime("%Y%m%d")
        if o_format == "csv":
            response = HttpResponse(content_type="text/csv")
            response[
                "Content-Disposition"] = 'attachment; filename="%s.csv"' % filename
            writer = csv.writer(response, dialect="excel", delimiter=";")
            writer.writerows(r)
            return response
        elif o_format == "xlsx":
            response = StringIO()
            wb = xlsxwriter.Workbook(response)
            cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1})
            ws = wb.add_worksheet("Objects")
            max_column_data_length = {}
            for rn, x in enumerate(r):
                for cn, c in enumerate(x):
                    if rn and (r[0][cn] not in max_column_data_length or
                               len(str(c)) > max_column_data_length[r[0][cn]]):
                        max_column_data_length[r[0][cn]] = len(str(c))
                    ws.write(rn, cn, c, cf1)
            ws.autofilter(0, 0, rn, cn)
            ws.freeze_panes(1, 0)
            for cn, c in enumerate(r[0]):
                # Set column width
                width = get_column_width(c)
                if enable_autowidth and width < max_column_data_length[c]:
                    width = max_column_data_length[c]
                ws.set_column(cn, cn, width=width)
            wb.close()
            response.seek(0)
            response = HttpResponse(response.getvalue(),
                                    content_type="application/vnd.ms-excel")
            # response = HttpResponse(
            #     content_type="application/x-ms-excel")
            response[
                "Content-Disposition"] = 'attachment; filename="%s.xlsx"' % filename
            response.close()
            return response
Esempio n. 29
0
 def queryset(self, request, query=None):
     qs = super(ManagedObjectApplication, self).queryset(request, query)
     if not request.user.is_superuser:
         qs = qs.filter(UserAccess.Q(request.user))
     qs = qs.exclude(name__startswith="wiping-")
     return qs
Esempio n. 30
0
    def api_report(
        self,
        request,
        reporttype=None,
        from_date=None,
        to_date=None,
        object_profile=None,
        filter_default=None,
        exclude_zero=None,
        interface_profile=None,
        selector=None,
        administrative_domain=None,
        columns=None,
        o_format=None,
        enable_autowidth=False,
        **kwargs
    ):
        def translate_row(row, cmap):
            return [row[i] for i in cmap]

        map_table = {
            "load_interfaces": "/Interface\s\|\sLoad\s\|\s[In|Out]/",
            "load_cpu": "/[CPU|Memory]\s\|\sUsage/",
            "errors": "/Interface\s\|\s[Errors|Discards]\s\|\s[In|Out]/",
            "ping": "/Ping\s\|\sRTT/",
        }
        cols = [
            "id",
            "object_name",
            "object_address",
            "object_platform",
            "object_adm_domain",
            "object_segment",
            # "object_hostname",
            # "object_status",
            # "profile_name",
            # "object_profile",
            # "object_vendor",
            "iface_name",
            "iface_description",
            "iface_speed",
            "load_in",
            "load_in_p",
            "load_out",
            "load_out_p",
            "errors_in",
            "errors_out",
            "slot",
            "cpu_usage",
            "memory_usage",
            "ping_rtt",
            "ping_attempts",
            "interface_flap",
            "interface_load_url",
        ]

        header_row = [
            "ID",
            "OBJECT_NAME",
            "OBJECT_ADDRESS",
            "OBJECT_PLATFORM",
            "OBJECT_ADM_DOMAIN",
            "OBJECT_SEGMENT",
            "IFACE_NAME",
            "IFACE_DESCRIPTION",
            "IFACE_SPEED",
            "LOAD_IN",
            "LOAD_IN_P",
            "LOAD_OUT",
            "LOAD_OUT_P",
            "ERRORS_IN",
            "ERRORS_OUT",
            "CPU_USAGE",
            "MEMORY_USAGE",
            "PING_RTT",
            "PING_ATTEMPTS",
            "INTERFACE_FLAP",
            "INTERFACE_LOAD_URL",
        ]

        if columns:
            cmap = []
            for c in columns.split(","):
                try:
                    cmap += [cols.index(c)]
                except ValueError:
                    continue
        else:
            cmap = list(range(len(cols)))
        columns_order = columns.split(",")
        columns_filter = set(columns_order)
        r = [translate_row(header_row, cmap)]
        object_columns = [c for c in columns_order if c.startswith("object")]

        # Date Time Block
        if not from_date:
            from_date = datetime.datetime.now() - datetime.timedelta(days=1)
        else:
            from_date = datetime.datetime.strptime(from_date, "%d.%m.%Y")
        if not to_date or from_date == to_date:
            to_date = from_date + datetime.timedelta(days=1)
        else:
            to_date = datetime.datetime.strptime(to_date, "%d.%m.%Y") + datetime.timedelta(days=1)
        # interval = (to_date - from_date).days
        ts_from_date = time.mktime(from_date.timetuple())
        ts_to_date = time.mktime(to_date.timetuple())

        # Load managed objects
        mos = ManagedObject.objects.filter(is_managed=True)
        if not request.user.is_superuser:
            mos = mos.filter(administrative_domain__in=UserAccess.get_domains(request.user))
        if selector:
            mos = mos.filter(ManagedObjectSelector.objects.get(id=int(selector)).Q)
        if administrative_domain:
            mos = mos.filter(
                administrative_domain__in=AdministrativeDomain.get_nested_ids(
                    int(administrative_domain)
                )
            )
        if object_profile:
            mos = mos.filter(object_profile=object_profile)
        # iface_dict = {}

        d_url = {
            "path": "/ui/grafana/dashboard/script/report.js",
            "rname": map_table[reporttype],
            "from": str(int(ts_from_date * 1000)),
            "to": str(int(ts_to_date * 1000)),
            # o.name.replace("#", "%23")
            "biid": "",
            "oname": "",
            "iname": "",
        }

        report_map = {
            "load_interfaces": {
                "url": "%(path)s?title=interface&biid=%(biid)s"
                "&obj=%(oname)s&iface=%(iname)s&from=%(from)s&to=%(to)s",
                "q_group": ["interface"],
                "q_select": {
                    (0, "managed_object", "id"): "managed_object",
                    (1, "path", "iface_name"): "arrayStringConcat(path)",
                },
            },
            "errors": {
                "url": """%(path)s?title=errors&biid=%(biid)s&obj=%(oname)s&iface=%(iname)s&from=%(from)s&to=%(to)s""",
                "q_group": ["interface"],
            },
            "load_cpu": {
                "url": """%(path)s?title=cpu&biid=%(biid)s&obj=%(oname)s&from=%(from)s&to=%(to)s""",
                "q_select": {
                    (0, "managed_object", "id"): "managed_object",
                    (1, "path", "slot"): "arrayStringConcat(path)",
                },
            },
            "ping": {
                "url": """%(path)s?title=ping&biid=%(biid)s&obj=%(oname)s&from=%(from)s&to=%(to)s""",
                "q_select": {(0, "managed_object", "id"): "managed_object"},
            },
        }

        query_map = {
            # "iface_description": ('', 'iface_description', "''"),
            "iface_description": (
                "",
                "iface_description",
                "dictGetString('interfaceattributes','description' , (managed_object, arrayStringConcat(path)))",
            ),
            "iface_speed": (
                "speed",
                "iface_speed",
                "if(max(speed) = 0, dictGetUInt64('interfaceattributes', 'in_speed', "
                "(managed_object, arrayStringConcat(path))), max(speed))",
            ),
            "load_in": ("load_in", "l_in", "round(quantile(0.90)(load_in), 0)"),
            "load_in_p": (
                "load_in",
                "l_in_p",
                "replaceOne(toString(round(quantile(0.90)(load_in) / "
                "if(max(speed) = 0, dictGetUInt64('interfaceattributes', 'in_speed', "
                "(managed_object, arrayStringConcat(path))), max(speed)), 4) * 100), '.', ',')",
            ),
            "load_out": ("load_out", "l_out", "round(quantile(0.90)(load_out), 0)"),
            "load_out_p": (
                "load_out",
                "l_out_p",
                "replaceOne(toString(round(quantile(0.90)(load_out) / "
                "if(max(speed) = 0, dictGetUInt64('interfaceattributes', 'in_speed', "
                "(managed_object, arrayStringConcat(path))), max(speed)), 4) * 100), '.', ',')",
            ),
            "errors_in": ("errors_in", "err_in", "quantile(0.90)(errors_in)"),
            "errors_out": ("errors_out", "err_out", "quantile(0.90)(errors_out)"),
            "cpu_usage": ("usage", "cpu_usage", "quantile(0.90)(usage)"),
            "ping_rtt": ("rtt", "ping_rtt", "round(quantile(0.90)(rtt) / 1000, 2)"),
            "ping_attempts": ("attempts", "ping_attempts", "avg(attempts)"),
        }
        query_fields = []
        for c in report_map[reporttype]["q_select"]:
            query_fields += [c[2]]
        field_shift = len(query_fields)  # deny replacing field
        for c in columns.split(","):
            if c not in query_map:
                continue
            field, alias, func = query_map[c]
            report_map[reporttype]["q_select"][
                (columns_order.index(c) + field_shift, field, alias)
            ] = func
            query_fields += [c]
        metrics_attrs = namedtuple("METRICSATTRs", query_fields)

        mo_attrs = namedtuple("MOATTRs", [c for c in cols if c.startswith("object")])
        moss = {}
        for row in mos.values_list(
            "bi_id", "name", "address", "platform", "administrative_domain__name", "segment"
        ):
            moss[row[0]] = mo_attrs(
                *[
                    row[1],
                    row[2],
                    str(Platform.get_by_id(row[3]) if row[3] else ""),
                    row[4],
                    str(NetworkSegment.get_by_id(row[5])) if row[5] else "",
                ]
            )
        url = report_map[reporttype].get("url", "")
        report_metric = self.metric_source[reporttype](
            tuple(sorted(moss)), from_date, to_date, columns=None
        )
        report_metric.SELECT_QUERY_MAP = report_map[reporttype]["q_select"]
        if exclude_zero and reporttype == "load_interfaces":
            report_metric.CUSTOM_FILTER["having"] += ["max(load_in) != 0 AND max(load_out) != 0"]
        if interface_profile:
            interface_profile = InterfaceProfile.objects.filter(id=interface_profile).first()
            report_metric.CUSTOM_FILTER["having"] += [
                "dictGetString('interfaceattributes', 'profile', "
                "(managed_object, arrayStringConcat(path))) = '%s'" % interface_profile.name
            ]
        # OBJECT_PLATFORM, ADMIN_DOMAIN, SEGMENT, OBJECT_HOSTNAME
        for row in report_metric.do_query():
            mm = metrics_attrs(*row)
            mo = moss[int(mm.id)]
            res = []
            for y in columns_order:
                if y in object_columns:
                    res += [getattr(mo, y)]
                else:
                    res += [getattr(mm, y)]
            if "interface_load_url" in columns_filter:
                d_url["biid"] = mm.id
                d_url["oname"] = mo[2].replace("#", "%23")
                # res += [url % d_url, interval]
                res.insert(columns_order.index("interface_load_url"), url % d_url)
            r += [res]
        filename = "metrics_detail_report_%s" % datetime.datetime.now().strftime("%Y%m%d")
        if o_format == "csv":
            response = HttpResponse(content_type="text/csv")
            response["Content-Disposition"] = 'attachment; filename="%s.csv"' % filename
            writer = csv.writer(response, dialect="excel", delimiter=",", quoting=csv.QUOTE_MINIMAL)
            writer.writerows(r)
            return response
        elif o_format == "xlsx":
            response = StringIO()
            wb = xlsxwriter.Workbook(response)
            cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1})
            ws = wb.add_worksheet("Alarms")
            max_column_data_length = {}
            for rn, x in enumerate(r):
                for cn, c in enumerate(x):
                    if rn and (
                        r[0][cn] not in max_column_data_length
                        or len(str(c)) > max_column_data_length[r[0][cn]]
                    ):
                        max_column_data_length[r[0][cn]] = len(str(c))
                    ws.write(rn, cn, c, cf1)
            ws.autofilter(0, 0, rn, cn)
            ws.freeze_panes(1, 0)
            for cn, c in enumerate(r[0]):
                # Set column width
                width = get_column_width(c)
                if enable_autowidth and width < max_column_data_length[c]:
                    width = max_column_data_length[c]
                ws.set_column(cn, cn, width=width)
            wb.close()
            response.seek(0)
            response = HttpResponse(response.getvalue(), content_type="application/vnd.ms-excel")
            response["Content-Disposition"] = 'attachment; filename="%s.xlsx"' % filename
            response.close()
            return response