示例#1
0
文件: views.py 项目: marcosvella/noc
 def api_test(self, request, id):
     r = []
     out = {"total": 0, "success": True, "data": None}
     data = [
         d
         for d in AffectedObjects._get_collection().aggregate(
             [
                 {"$match": {"maintenance": bson.ObjectId(id)}},
                 {
                     "$project": {"objects": "$affected_objects.object"},
                 },
             ]
         )
     ]
     if data:
         for mo in (
             ManagedObject.objects.filter(is_managed=True, id__in=data[0].get("objects"))
             .values("id", "name", "is_managed", "profile", "address", "description", "labels")
             .distinct()
         ):
             r += [
                 {
                     "id": mo["id"],
                     "name": mo["name"],
                     "is_managed": mo["is_managed"],
                     "profile": Profile.get_by_id(mo["profile"]).name,
                     "address": mo["address"],
                     "description": mo["description"],
                     "labels": mo["labels"],
                 }
             ]
             out = {"total": len(r), "success": True, "data": r}
     return self.response(out, status=self.OK)
示例#2
0
 def handler(self):
     self.logger.info("Checking CPEs")
     now = datetime.datetime.now()
     result = self.object.scripts.get_cpe()
     for cpe in result:
         if cpe["status"] != "active":
             self.logger.debug(
                 "[%s|%s] CPE status is '%s'. Skipping",
                 cpe["id"],
                 cpe["global_id"],
                 cpe["status"],
             )
             continue
         mo = self.find_cpe(cpe["global_id"])
         if mo:
             changes = self.update_if_changed(
                 mo,
                 {
                     "controller": self.object,
                     "local_cpe_id": cpe["id"],
                     "global_cpe_id": cpe["global_id"],
                     "address": cpe["ip"],
                     "last_seen": now,
                 },
             )
             if changes:
                 self.logger.info(
                     "[%s|%s] Changed: %s",
                     cpe["id"],
                     cpe["global_id"],
                     ", ".join("%s='%s'" % c for c in changes),
                 )
         else:
             name = cpe.get("name") or "cpe-%s" % cpe["global_id"]
             if ManagedObject.objects.filter(name=name).exists():
                 name = "cpe-%s" % cpe["global_id"]
             self.logger.info("[%s|%s] Created CPE %s", cpe["id"],
                              cpe["global_id"], name)
             mo = ManagedObject(
                 name=name,
                 pool=self.object.pool,
                 profile=Profile.get_by_id(
                     Profile.get_generic_profile_id()),
                 object_profile=self.object.object_profile.cpe_profile
                 or self.object.object_profile,
                 administrative_domain=self.object.administrative_domain,
                 scheme=self.object.scheme,
                 segment=self.object.segment,
                 auth_profile=self.object.object_profile.cpe_auth_profile
                 or self.object.auth_profile,
                 address=cpe.get("ip") or "0.0.0.0",
                 controller=self.object,
                 last_seen=now,
                 local_cpe_id=cpe["id"],
                 global_cpe_id=cpe["global_id"],
             )
             mo.save()
示例#3
0
 def extract(self):
     containers = ReportContainerData(
         sorted(
             set(ManagedObject.objects.all().order_by(
                 "container").values_list("container", flat=True))))
     containers = containers.get_dictionary()
     for (
             mo_id,
             bi_id,
             name,
             address,
             profile,
             platform,
             version,
             remote_id,
             remote_system,
             adm_id,
             adm_name,
             container,
     ) in (ManagedObject.objects.all().order_by("id").values_list(
             "id",
             "bi_id",
             "name",
             "address",
             "profile",
             "platform",
             "version",
             "remote_id",
             "remote_system",
             "administrative_domain",
             "administrative_domain__name",
             "container",
     )):
         yield (
             bi_id,
             mo_id,
             name,
             address,
             Profile.get_by_id(profile).name if profile else "",
             Platform.get_by_id(platform).name if platform else "",
             Firmware.get_by_id(version).version if version else "",
             remote_id if remote_id else "",
             RemoteSystem.get_by_id(remote_system).name
             if remote_system else "",
             adm_id,
             adm_name,
             containers.get(container, ("", ))[0] if container else "",
         )
示例#4
0
 def bulk_field_managed_object(self, data):
     """
     Apply managed objects field
     :param data:
     :return:
     """
     mo_ids = [x["id"] for x in data]
     if not mo_ids:
         return data
     mos = {
         x[0]: {
             "name": x[1],
             "address": x[2],
             "profile_name": str(Profile.get_by_id(x[3]))
         }
         for x in ManagedObject.objects.filter(
             id__in=mo_ids).values_list("id", "name", "address", "profile")
     }
     for x in data:
         x.update(mos[x["id"]])
     return data
示例#5
0
文件: sae.py 项目: nbashev/noc
 def get_object_data(self, object_id):
     """
     Worker to resolve credentials
     """
     object_id = int(object_id)
     # Get Object's attributes
     with self.service.get_pg_connect() as connection:
         cursor = connection.cursor()
         cursor.execute(self.RUN_SQL, [object_id, object_id])
         data = cursor.fetchall()
     if not data:
         metrics["error", ("type", "object_not_found")] += 1
         raise APIError("Object is not found")
     # Build capabilities
     capabilities = ObjectCapabilities.get_capabilities(object_id)
     # Get object credentials
     (
         name,
         is_managed,
         profile,
         vendor,
         platform,
         version,
         scheme,
         address,
         port,
         user,
         password,
         super_password,
         remote_path,
         snmp_ro,
         pool_id,
         sw_image,
         auth_profile_id,
         ap_user,
         ap_password,
         ap_super_password,
         ap_snmp_ro,
         ap_snmp_rw,
         privilege_policy,
         snmp_rate_limit,
         p_privilege_policy,
         p_snmp_rate_limit,
         access_preference,
         p_access_preference,
         beef_storage_id,
         beef_path_template_id,
         attrs,
     ) = data[0]
     # Check object is managed
     if not is_managed:
         metrics["error", ("type", "object_not_managed")] += 1
         raise APIError("Object is not managed")
     if auth_profile_id:
         user = ap_user
         password = ap_password
         super_password = ap_super_password
         snmp_ro = ap_snmp_ro
         snmp_rw = ap_snmp_rw  # noqa just to be
     #
     if privilege_policy == "E":
         raise_privileges = True
     elif privilege_policy == "P":
         raise_privileges = p_privilege_policy == "E"
     else:
         raise_privileges = False
     if access_preference == "P":
         access_preference = p_access_preference
     if not snmp_rate_limit:
         snmp_rate_limit = p_snmp_rate_limit
     # Build credentials
     credentials = {
         "name": name,
         "address": address,
         "user": user,
         "password": password,
         "super_password": super_password,
         "path": remote_path,
         "raise_privileges": raise_privileges,
         "access_preference": access_preference,
         "snmp_rate_limit": snmp_rate_limit,
     }
     if snmp_ro:
         credentials["snmp_ro"] = snmp_ro
         if capabilities.get("SNMP | v2c"):
             credentials["snmp_version"] = "v2c"
         elif capabilities.get("SNMP | v1"):
             credentials["snmp_version"] = "v1"
     if scheme in CLI_PROTOCOLS:
         credentials["cli_protocol"] = PROTOCOLS[scheme]
         if port:
             credentials["cli_port"] = port
     elif scheme in HTTP_PROTOCOLS:
         credentials["http_protocol"] = PROTOCOLS[scheme]
         if port:
             credentials["http_port"] = port
     # Build version
     if vendor and platform and version:
         vendor = Vendor.get_by_id(vendor)
         version = {
             "vendor": vendor.code[0] if vendor.code else vendor.name,
             "platform": Platform.get_by_id(platform).name,
             "version": Firmware.get_by_id(version).version,
         }
         if sw_image:
             version["image"] = sw_image
         if attrs:
             version["attributes"] = attrs
     else:
         version = None
     # Beef processing
     if scheme == BEEF and beef_storage_id and beef_path_template_id:
         mo = ManagedObject.get_by_id(object_id)
         tpl = Template.get_by_id(beef_path_template_id)
         beef_path = tpl.render_subject(object=mo)
         if beef_path:
             storage = ExtStorage.get_by_id(beef_storage_id)
             credentials["beef_storage_url"] = storage.url
             credentials["beef_path"] = beef_path
     return dict(
         profile=Profile.get_by_id(profile).name,
         pool_id=pool_id,
         credentials=credentials,
         capabilities=capabilities,
         version=version,
     )
示例#6
0
 def handler(self):
     # Decode request
     try:
         req = ujson.loads(self.request.body)
     except ValueError:
         return 400, "Cannot decode JSON"
     # Validate
     try:
         req = Request.clean(req)
     except ValueError as e:
         return 400, "Bad request: %s" % e
     # Check timestamps
     from_ts = dateutil.parser.parse(req["from"])
     to_ts = dateutil.parser.parse(req["to"])
     if to_ts < from_ts:
         return 400, "Invalid range"
     # Check time range
     delta = to_ts - from_ts
     if delta.total_seconds() > config.nbi.objectmetrics_max_interval:
         return 400, "Requested range too large"
     # Prepare data for queries
     objects = set()
     for mc in req["metrics"]:
         try:
             mo_id = int(mc["object"])
             objects.add(mo_id)
         except ValueError:
             return 400, "Invalid object id: %s" % mc["object"]
     #
     if not objects:
         return 200, []
     # Map managed object id to bi_id
     id_to_bi = {}
     profiles = {}  # object id -> profile
     for mo_id, bi_id, profile_id in ManagedObject.objects.filter(
             id__in=list(objects)).values_list("id", "bi_id", "profile"):
         id_to_bi[str(mo_id)] = bi_id
         profiles[str(mo_id)] = Profile.get_by_id(profile_id).get_profile()
     if not id_to_bi:
         return 404, "Object(s) id not found: %s" % ",".join(
             [str(o) for o in objects])
     # Prepare queries
     scopes = {}  # table_name -> ([fields, ..], [where, ..])
     for mc in req["metrics"]:
         profile = profiles[mc["object"]]
         ifaces = tuple(
             sorted(
                 profile.convert_interface_name(i)
                 for i in mc.get("interfaces", [])))
         for mn in mc["metric_types"]:
             mt = MetricType.get_by_name(mn)
             if not mt:
                 return 400, "Invalid metric_type: %s" % mn
             table = mt.scope.table_name
             q = scopes.get(table)
             if not q:
                 q = (set(), set())
                 scopes[table] = q
             q[0].add(mt.field_name)
             if table == S_INTERFACE:
                 q[1].add((id_to_bi[mc["object"]], ifaces))
             else:
                 q[1].add((id_to_bi[mc["object"]], ))
     # Execute queries and collect result
     from_date = from_ts.strftime("%Y-%m-%d")
     to_date = to_ts.strftime("%Y-%m-%d")
     if from_date == to_date:
         date_q = "date = '%s'" % from_date
     else:
         date_q = "date >= '%s' AND date <= '%s'" % (from_date, to_date)
     date_q = "%s AND ts >= '%s' AND ts <= '%s'" % (
         date_q,
         from_ts.replace(tzinfo=None).isoformat(),
         to_ts.replace(tzinfo=None).isoformat(),
     )
     connect = ClickhouseClient()
     scope_data = {}
     for table in scopes:
         sdata = {
         }  # managed_object.bi_id, interface, field -> ([(ts, value), ...], path)
         scope_data[table] = sdata
         # Build SQL request
         qx = []
         for wx in scopes[table][1]:
             if len(wx) == 1 or not wx[1]:
                 qx += ["(managed_object = %d)" % wx[0]]
             elif len(wx[1]) == 1:
                 qx += [
                     "(managed_object = %d AND path[4] = '%s')" %
                     (wx[0], wx[1][0])
                 ]
             else:
                 qx += [
                     "(managed_object = %d AND path[4] IN (%s))" %
                     (wx[0], ", ".join("'%s'" % x for x in wx[1]))
                 ]
         fields = ["ts", "managed_object", "path"] + sorted(
             scopes[table][0])
         query = "SELECT %s FROM %s WHERE %s AND (%s)" % (
             ", ".join(fields),
             table,
             date_q,
             " OR ".join(qx),
         )
         # Execute
         self.logger.info("%s", query)
         try:
             data = connect.execute(query)
         except ClickhouseError as e:
             self.logger.error("SQL Error: %s", e)
             return 500, "SQL Error: %s" % e
         # Process result
         for row in data:
             d = dict(zip(fields, row))
             ts = d.pop("ts")
             mo = int(d.pop("managed_object"))
             path = self.clear_path(d.pop("path"))
             if table == S_INTERFACE:
                 iface = path[3]
             else:
                 iface = None
             for field in d:
                 key = (mo, iface, field)
                 item = (ts, d[field])
                 bucket = sdata.get(key)
                 if bucket:
                     xdata = bucket[0]
                     xdata += [item]
                 else:
                     sdata[key] = ([item], path)
     # Format result
     result = []
     for mc in req["metrics"]:
         ifaces = tuple(sorted(mc.get("interfaces", [])))
         mo_bi_id = id_to_bi[mc["object"]]
         for mn in mc["metric_types"]:
             mt = MetricType.get_by_name(mn)
             table = mt.scope.table_name
             field = mt.field_name
             if table == S_INTERFACE:
                 if_list = ifaces
             else:
                 if_list = (None, )
             sdata = scope_data[table]
             for iface in if_list:
                 key = (mo_bi_id, iface, field)
                 mdata = sdata.get(key)
                 if not mdata:
                     continue
                 points, path = mdata
                 # Clean data type
                 points = sorted(
                     ((p[0].replace(" ", "T"), mt.clean_value(p[1]))
                      for p in points),
                     key=operator.itemgetter(0),
                 )
                 #
                 r = {
                     "object": mc["object"],
                     "metric_type": mn,
                     "path": path,
                     "values": points
                 }
                 if iface is not None:
                     r["interface"] = iface
                 result += [r]
     # Return response
     return 200, {"from": req["from"], "to": req["to"], "metrics": result}
示例#7
0
文件: views.py 项目: nbashev/noc
 def get_data(self,
              request,
              repo="config",
              days=1,
              adm_domain=None,
              managed_object=None,
              **kwargs):
     baseline = datetime.datetime.now() - datetime.timedelta(days=days)
     coll = get_db()["noc.gridvcs.%s.files" % repo].with_options(
         read_preference=ReadPreference.SECONDARY_PREFERRED)
     pipeline = [
         {
             "$match": {
                 "ts": {
                     "$gte": baseline
                 }
             }
         },
         {
             "$group": {
                 "_id": "$object",
                 "last_ts": {
                     "$max": "$ts"
                 }
             }
         },
         {
             "$sort": {
                 "_id": 1
             }
         },
     ]
     if repo == "config":
         objects = ManagedObject.objects.filter()
         if not request.user.is_superuser:
             objects = objects.filter(administrative_domain__in=UserAccess.
                                      get_domains(request.user))
         if adm_domain:
             adm_domain = AdministrativeDomain.get_nested_ids(adm_domain)
             objects = objects.filter(administrative_domain__in=adm_domain)
         if managed_object:
             mo_q = ManagedObject.get_search_Q(managed_object)
             if not mo_q:
                 objects = objects.filter(name__contains=managed_object)
             else:
                 objects = objects.filter(mo_q)
         pipeline = [{
             "$match": {
                 "object": {
                     "$in": list(objects.values_list("id", flat=True))
                 }
             }
         }] + pipeline
     # Perform query
     data = list(coll.aggregate(pipeline))
     # Resolve names
     result = []
     if data:
         seen_ids = list(set(d["_id"] for d in data))
         n_map = {}
         if repo == "config":
             n_map = {
                 x[0]: x[1:]
                 for x in ManagedObject.objects.filter(
                     id__in=list(seen_ids)).values_list(
                         "id", "name", "address", "profile")
             }
         elif repo == "dns":
             n_map = {
                 x[0]: x[1:]
                 for x in DNSZone.objects.filter(
                     id__in=list(seen_ids)).values_list(
                         "id", "name", "address", "profile")
             }
         for d in data:
             name, profile = n_map.get(d["_id"], ("-", None))
             result += [(d["_id"], name,
                         Profile.get_by_id(profile) if profile else "-",
                         d["last_ts"])]
     return self.from_dataset(
         title="%s: %s in %d days" % (self.title, repo, days),
         columns=[
             "ID",
             "Name",
             "Address",
             "Profile",
             TableColumn(_("Last Changed"), format="datetime"),
         ],
         data=result,
         enumerate=True,
     )
示例#8
0
    def api_report(
        self,
        request,
        o_format,
        is_managed=None,
        administrative_domain=None,
        selector=None,
        pool=None,
        segment=None,
        avail_status=False,
        columns=None,
        ids=None,
        detail_stat=None,
        enable_autowidth=False,
    ):
        def row(row):
            def qe(v):
                if v is None:
                    return ""
                if isinstance(v, unicode):
                    return v.encode("utf-8")
                elif isinstance(v, datetime.datetime):
                    return v.strftime("%Y-%m-%d %H:%M:%S")
                elif not isinstance(v, str):
                    return str(v)
                else:
                    return v

            return [qe(x) for x in row]

        def translate_row(row, cmap):
            return [row[i] for i in cmap]

        type_columns = ["Up/10G", "Up/1G", "Up/100M", "Up/10M", "Down/-", "-"]
        cols = [
            "id", "object_name", "object_address", "object_hostname",
            "object_status", "profile_name", "object_profile", "object_vendor",
            "object_platform", "object_attr_hwversion", "object_version",
            "object_attr_bootprom", "object_serial", "object_attr_patch",
            "auth_profile", "avail", "admin_domain", "container", "segment",
            "phys_interface_count", "link_count", "last_config_ts"
            # "discovery_problem"
            # "object_tags"
            # "sorted_tags"
            # "object_caps"
            # "interface_type_count"
        ]

        header_row = [
            "ID",
            "OBJECT_NAME",
            "OBJECT_ADDRESS",
            "OBJECT_HOSTNAME",
            "OBJECT_STATUS",
            "PROFILE_NAME",
            "OBJECT_PROFILE",
            "OBJECT_VENDOR",
            "OBJECT_PLATFORM",
            "OBJECT_HWVERSION",
            "OBJECT_VERSION",
            "OBJECT_BOOTPROM",
            "OBJECT_SERIAL",
            "OBJECT_ATTR_PATCH",
            "AUTH_PROFILE",
            "AVAIL",
            "ADMIN_DOMAIN",
            "CONTAINER",
            "SEGMENT",
            "PHYS_INTERFACE_COUNT",
            "LINK_COUNT",
            "LAST_CONFIG_TS",
        ]
        # "DISCOVERY_PROBLEM"
        # "ADM_PATH
        # "DISCOVERY_PROBLEM"
        # "OBJECT_TAGS"
        # "SORTED_TAGS"
        # "OBJECT_CAPS"
        # "INTERFACE_TYPE_COUNT"

        if columns:
            cmap = []
            for c in columns.split(","):
                try:
                    cmap += [cols.index(c)]
                except ValueError:
                    continue
        else:
            cmap = list(range(len(cols)))
        r = [translate_row(header_row, cmap)]
        mos = self.get_report_object(request.user, is_managed,
                                     administrative_domain, selector, pool,
                                     segment, ids)
        columns_filter = set(columns.split(","))
        mos_id = tuple(mos.order_by("id").values_list("id", flat=True))
        mos_filter = None
        if detail_stat:
            ref = ReportModelFilter()
            ids = list(six.itervalues(ref.proccessed(detail_stat)))
            mos_filter = set(mos_id).intersection(ids[0])
            mos_id = sorted(mos_filter)
        avail = {}
        if "avail" in columns_filter:
            avail = ObjectStatus.get_statuses(mos_id)
        link_count = iter(ReportObjectLinkCount(mos_id))
        iface_count = iter(ReportObjectIfacesTypeStat(mos_id))
        if "container" in columns_filter:
            container_lookup = iter(ReportContainerData(mos_id))
        else:
            container_lookup = None
        if "object_serial" in columns_filter:
            container_serials = iter(ReportContainer(mos_id))
        else:
            container_serials = None
        if "interface_type_count" in columns_filter:
            iss = iter(ReportObjectIfacesStatusStat(mos_id))
        else:
            iss = None
        if "object_attr_patch" in columns_filter or "object_serial" in columns_filter:
            roa = iter(ReportObjectAttributes(mos_id))
        else:
            roa = None
        hn = iter(ReportObjectsHostname1(mos_id))
        rc = iter(ReportObjectConfig(mos_id))
        # ccc = iter(ReportObjectCaps(mos_id))
        if "adm_path" in columns_filter:
            ad_path = ReportAdPath()
            r[-1].extend([_("ADM_PATH1"), _("ADM_PATH1"), _("ADM_PATH1")])
        if "interface_type_count" in columns_filter:
            r[-1].extend(type_columns)
        if "object_caps" in columns_filter:
            object_caps = ReportObjectCaps(mos_id)
            caps_columns = list(six.itervalues(object_caps.ATTRS))
            ccc = iter(object_caps)
            r[-1].extend(caps_columns)
        if "object_tags" in columns_filter:
            r[-1].extend([_("OBJECT_TAGS")])
        if "sorted_tags" in columns_filter:
            tags = set()
            for s in (ManagedObject.objects.filter().exclude(
                    tags=None).values_list("tags", flat=True).distinct()):
                tags.update(set(s))
            tags_o = sorted([t for t in tags if "{" not in t])
            r[-1].extend(tags_o)
        if "discovery_problem" in columns.split(","):
            discovery_result = ReportDiscoveryResult(mos_id)
            discovery_result.safe_output = True
            discovery_result.unknown_value = ([""] *
                                              len(discovery_result.ATTRS), )
            dp_columns = discovery_result.ATTRS
            dp = iter(discovery_result)
            r[-1].extend(dp_columns)
        for (
                mo_id,
                name,
                address,
                is_managed,
                sa_profile,
                o_profile,
                auth_profile,
                ad,
                m_segment,
                vendor,
                platform,
                version,
                tags,
        ) in (mos.values_list(
                "id",
                "name",
                "address",
                "is_managed",
                "profile",
                "object_profile__name",
                "auth_profile__name",
                "administrative_domain__name",
                "segment",
                "vendor",
                "platform",
                "version",
                "tags",
        ).order_by("id").iterator()):
            if (mos_filter and mo_id not in mos_filter) or not mos_id:
                continue
            if container_serials:
                mo_serials = next(container_serials)
            else:
                mo_serials = [{}]
            if container_lookup:
                mo_continer = next(container_lookup)
            else:
                mo_continer = ("", )
            if roa:
                serial, hw_ver, boot_prom, patch = next(roa)[0]  # noqa
            else:
                serial, hw_ver, boot_prom, patch = "", "", "", ""  # noqa
            r.append(
                translate_row(
                    row([
                        mo_id,
                        name,
                        address,
                        next(hn)[0],
                        "managed" if is_managed else "unmanaged",
                        Profile.get_by_id(sa_profile),
                        o_profile,
                        Vendor.get_by_id(vendor) if vendor else "",
                        Platform.get_by_id(platform) if platform else "",
                        hw_ver,
                        Firmware.get_by_id(version) if version else "",
                        boot_prom,
                        # Serial
                        mo_serials[0].get("serial", "") or serial,
                        patch or "",
                        auth_profile,
                        _("Yes") if avail.get(mo_id, None) else _("No"),
                        ad,
                        mo_continer[0],
                        NetworkSegment.get_by_id(m_segment)
                        if m_segment else "",
                        next(iface_count)[0],
                        next(link_count)[0],
                        next(rc)[0],
                    ]),
                    cmap,
                ))
            if "adm_path" in columns_filter:
                r[-1].extend([ad] + list(ad_path[ad]))
            if "interface_type_count" in columns_filter:
                r[-1].extend(next(iss)[0])
            if "object_caps" in columns_filter:
                r[-1].extend(next(ccc)[0])
            if "object_tags" in columns_filter:
                r[-1].append(",".join(tags if tags else []))
            if "sorted_tags" in columns_filter:
                out_tags = [""] * len(tags_o)
                try:
                    if tags:
                        for m in tags:
                            out_tags[tags_o.index(m)] = m
                except ValueError:
                    logger.warning("Bad value for tag: %s", m)
                r[-1].extend(out_tags)
            if "discovery_problem" in columns_filter:
                r[-1].extend(next(dp)[0])
        filename = "mo_detail_report_%s" % datetime.datetime.now().strftime(
            "%Y%m%d")
        if o_format == "csv":
            response = HttpResponse(content_type="text/csv")
            response[
                "Content-Disposition"] = 'attachment; filename="%s.csv"' % filename
            writer = csv.writer(response,
                                dialect="excel",
                                delimiter=";",
                                quotechar='"')
            writer.writerows(r)
            return response
        elif o_format == "xlsx":
            response = StringIO()
            wb = xlsxwriter.Workbook(response)
            cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1})
            ws = wb.add_worksheet("Objects")
            max_column_data_length = {}
            for rn, x in enumerate(r):
                for cn, c in enumerate(x):
                    if rn and (r[0][cn] not in max_column_data_length or
                               len(str(c)) > max_column_data_length[r[0][cn]]):
                        max_column_data_length[r[0][cn]] = len(str(c))
                    ws.write(rn, cn, c, cf1)
            # for
            ws.autofilter(0, 0, rn, cn)
            ws.freeze_panes(1, 0)
            for cn, c in enumerate(r[0]):
                # Set column width
                width = get_column_width(c)
                if enable_autowidth and width < max_column_data_length[c]:
                    width = max_column_data_length[c]
                ws.set_column(cn, cn, width=width)
            wb.close()
            response.seek(0)
            response = HttpResponse(response.getvalue(),
                                    content_type="application/vnd.ms-excel")
            # response = HttpResponse(
            #     content_type="application/x-ms-excel")
            response[
                "Content-Disposition"] = 'attachment; filename="%s.xlsx"' % filename
            response.close()
            return response
示例#9
0
    def api_report(
        self,
        request,
        from_date,
        to_date,
        o_format,
        min_duration=0,
        max_duration=0,
        min_objects=0,
        min_subscribers=0,
        segment=None,
        administrative_domain=None,
        selector=None,
        ex_selector=None,
        columns=None,
        source="both",
        alarm_class=None,
        subscribers=None,
        enable_autowidth=False,
    ):
        def row(row, container_path, segment_path):
            def qe(v):
                if v is None:
                    return ""
                if isinstance(v, unicode):
                    return v.encode("utf-8")
                elif isinstance(v, datetime.datetime):
                    return v.strftime("%Y-%m-%d %H:%M:%S")
                elif not isinstance(v, str):
                    return str(v)
                else:
                    return v

            r = [qe(x) for x in row]
            if len(container_path) < self.CONTAINER_PATH_DEPTH:
                container_path += [""] * (self.CONTAINER_PATH_DEPTH -
                                          len(container_path))
            else:
                container_path = container_path[:self.CONTAINER_PATH_DEPTH]
            if len(segment_path) < self.SEGMENT_PATH_DEPTH:
                segment_path += [""] * (self.SEGMENT_PATH_DEPTH -
                                        len(segment_path))
            else:
                segment_path = segment_path[:self.SEGMENT_PATH_DEPTH]
            return r + container_path + segment_path

        def translate_row(row, cmap):
            return [row[i] for i in cmap]

        cols = ([
            "id",
            "root_id",
            "from_ts",
            "to_ts",
            "duration_sec",
            "object_name",
            "object_address",
            "object_hostname",
            "object_profile",
            "object_admdomain",
            "object_platform",
            "object_version",
            "alarm_class",
            "alarm_subject",
            "maintenance",
            "objects",
            "subscribers",
            "tt",
            "escalation_ts",
            "location",
            "container_address",
        ] + ["container_%d" % i for i in range(self.CONTAINER_PATH_DEPTH)] +
                ["segment_%d" % i for i in range(self.SEGMENT_PATH_DEPTH)])

        header_row = (
            [
                "ID",
                _("ROOT_ID"),
                _("FROM_TS"),
                _("TO_TS"),
                _("DURATION_SEC"),
                _("OBJECT_NAME"),
                _("OBJECT_ADDRESS"),
                _("OBJECT_HOSTNAME"),
                _("OBJECT_PROFILE"),
                _("OBJECT_ADMDOMAIN"),
                _("OBJECT_PLATFORM"),
                _("OBJECT_VERSION"),
                _("ALARM_CLASS"),
                _("ALARM_SUBJECT"),
                _("MAINTENANCE"),
                _("OBJECTS"),
                _("SUBSCRIBERS"),
                _("TT"),
                _("ESCALATION_TS"),
                _("LOCATION"),
                _("CONTAINER_ADDRESS"),
            ] + ["CONTAINER_%d" % i
                 for i in range(self.CONTAINER_PATH_DEPTH)] +
            ["SEGMENT_%d" % i for i in range(self.SEGMENT_PATH_DEPTH)])

        if columns:
            cmap = []
            for c in columns.split(","):
                try:
                    cmap += [cols.index(c)]
                except ValueError:
                    continue
        else:
            cmap = list(range(len(cols)))
        subscribers_profile = self.default_subscribers_profile
        if subscribers:
            subscribers_profile = set(
                SubscriberProfile.objects.filter(
                    id__in=subscribers.split(",")).scalar("id"))
        r = [translate_row(header_row, cmap)]
        fd = datetime.datetime.strptime(
            to_date, "%d.%m.%Y") + datetime.timedelta(days=1)
        match = {
            "timestamp": {
                "$gte": datetime.datetime.strptime(from_date, "%d.%m.%Y"),
                "$lte": fd
            }
        }

        match_duration = {"duration": {"$gte": min_duration}}
        if max_duration:
            match_duration = {
                "duration": {
                    "$gte": min_duration,
                    "$lte": max_duration
                }
            }
        mos = ManagedObject.objects.filter(is_managed=True)

        if segment:
            try:
                match["segment_path"] = bson.ObjectId(segment)
            except bson.errors.InvalidId:
                pass

        ads = []
        if administrative_domain:
            if administrative_domain.isdigit():
                administrative_domain = [int(administrative_domain)]
                ads = AdministrativeDomain.get_nested_ids(
                    administrative_domain[0])

        if not request.user.is_superuser:
            user_ads = UserAccess.get_domains(request.user)
            if administrative_domain and ads:
                if administrative_domain[0] not in user_ads:
                    ads = list(set(ads) & set(user_ads))
                else:
                    ads = administrative_domain
            else:
                ads = user_ads
        if ads:
            mos = mos.filter(administrative_domain__in=ads)
        if selector:
            selector = ManagedObjectSelector.get_by_id(int(selector))
            mos = mos.filter(selector.Q)
        if ex_selector:
            ex_selector = ManagedObjectSelector.get_by_id(int(ex_selector))
            mos = mos.exclude(ex_selector.Q)

        # Working if Administrative domain set
        if ads:
            try:
                match["adm_path"] = {"$in": ads}
                # @todo More 2 level hierarhy
            except bson.errors.InvalidId:
                pass

        mos_id = list(mos.order_by("id").values_list("id", flat=True))
        mo_hostname = {}
        maintenance = []
        if mos_id and (selector or ex_selector):
            match["managed_object"] = {"$in": mos_id}
        if "maintenance" in columns.split(","):
            maintenance = Maintenance.currently_affected()
        if "object_hostname" in columns.split(","):
            mo_hostname = ReportObjectsHostname1(sync_ids=mos_id)
            mo_hostname = mo_hostname.get_dictionary()
        moss = ReportAlarmObjects(mos_id).get_all()
        # container_lookup = ReportContainer(mos_id)
        container_lookup = None
        subject = "alarm_subject" in columns
        loc = AlarmApplication([])
        if source in ["archive", "both"]:
            # Archived Alarms
            for a in (ArchivedAlarm._get_collection().with_options(
                    read_preference=ReadPreference.SECONDARY_PREFERRED
            ).aggregate([
                {
                    "$match": match
                },
                {
                    "$addFields": {
                        "duration": {
                            "$divide": [
                                {
                                    "$subtract":
                                    ["$clear_timestamp", "$timestamp"]
                                },
                                1000,
                            ]
                        }
                    }
                },
                {
                    "$match": match_duration
                },
                    # {"$sort": {"timestamp": 1}}
            ])):
                if int(a["managed_object"]) not in moss:
                    continue
                dt = a["clear_timestamp"] - a["timestamp"]
                duration = int(dt.total_seconds())
                total_objects = sum(ss["summary"] for ss in a["total_objects"])
                if min_objects and total_objects < min_objects:
                    continue
                total_subscribers = sum(
                    ss["summary"] for ss in a["total_subscribers"]
                    if subscribers_profile
                    and ss["profile"] in subscribers_profile)
                if min_subscribers and total_subscribers < min_subscribers:
                    continue
                if "segment_" in columns.split(
                        ",") or "container_" in columns.split(","):
                    path = ObjectPath.get_path(a["managed_object"])
                    if path:
                        segment_path = [
                            NetworkSegment.get_by_id(s).name
                            for s in path.segment_path
                            if NetworkSegment.get_by_id(s)
                        ]
                        container_path = [
                            Object.get_by_id(s).name
                            for s in path.container_path if Object.get_by_id(s)
                        ]
                    else:
                        segment_path = []
                        container_path = []
                else:
                    segment_path = []
                    container_path = []
                r += [
                    translate_row(
                        row(
                            [
                                str(a["_id"]),
                                str(a["root"]) if a.get("root") else "",
                                a["timestamp"],
                                a["clear_timestamp"],
                                str(duration),
                                moss[a["managed_object"]][0],
                                moss[a["managed_object"]][1],
                                mo_hostname.get(a["managed_object"], ""),
                                Profile.get_by_id(
                                    moss[a["managed_object"]][3]).name
                                if moss[a["managed_object"]][5] else "",
                                moss[a["managed_object"]][6],
                                Platform.get_by_id(
                                    moss[a["managed_object"]][9])
                                if moss[a["managed_object"]][9] else "",
                                Firmware.get_by_id(
                                    moss[a["managed_object"]][10])
                                if moss[a["managed_object"]][10] else "",
                                AlarmClass.get_by_id(a["alarm_class"]).name,
                                ArchivedAlarm.objects.get(
                                    id=a["_id"]).subject if subject else "",
                                "",
                                total_objects,
                                total_subscribers,
                                a.get("escalation_tt"),
                                a.get("escalation_ts"),
                                ", ".join(l for l in (
                                    loc.location(moss[a["managed_object"]][5]
                                                 ) if moss[a["managed_object"]]
                                    [5] is not None else "") if l),
                                container_lookup[a["managed_object"]].get(
                                    "text", "") if container_lookup else "",
                            ],
                            container_path,
                            segment_path,
                        ),
                        cmap,
                    )
                ]
        # Active Alarms
        if source in ["active", "both"]:
            for a in (ActiveAlarm._get_collection().with_options(
                    read_preference=ReadPreference.SECONDARY_PREFERRED).
                      aggregate([
                          {
                              "$match": match
                          },
                          {
                              "$addFields": {
                                  "duration": {
                                      "$divide": [{
                                          "$subtract": [fd, "$timestamp"]
                                      }, 1000]
                                  }
                              }
                          },
                          {
                              "$match": match_duration
                          },
                          # {"$sort": {"timestamp": 1}}
                      ])):
                dt = fd - a["timestamp"]
                duration = int(dt.total_seconds())
                total_objects = sum(ss["summary"] for ss in a["total_objects"])
                if min_objects and total_objects < min_objects:
                    continue
                total_subscribers = sum(
                    ss["summary"] for ss in a["total_subscribers"]
                    if subscribers_profile
                    and ss["profile"] in subscribers_profile)
                if min_subscribers and total_subscribers < min_subscribers:
                    continue
                if "segment_" in columns.split(
                        ",") or "container_" in columns.split(","):
                    path = ObjectPath.get_path(a["managed_object"])
                    if path:
                        segment_path = [
                            NetworkSegment.get_by_id(s).name
                            for s in path.segment_path
                            if NetworkSegment.get_by_id(s)
                        ]
                        container_path = [
                            Object.get_by_id(s).name
                            for s in path.container_path if Object.get_by_id(s)
                        ]
                    else:
                        segment_path = []
                        container_path = []
                else:
                    segment_path = []
                    container_path = []
                r += [
                    translate_row(
                        row(
                            [
                                str(a["_id"]),
                                str(a["root"]) if a.get("root") else "",
                                a["timestamp"],
                                # a["clear_timestamp"],
                                "",
                                str(duration),
                                moss[a["managed_object"]][0],
                                moss[a["managed_object"]][1],
                                mo_hostname.get(a["managed_object"], ""),
                                Profile.get_by_id(moss[a["managed_object"]][3])
                                if moss[a["managed_object"]][5] else "",
                                moss[a["managed_object"]][6],
                                Platform.get_by_id(
                                    moss[a["managed_object"]][9])
                                if moss[a["managed_object"]][9] else "",
                                Firmware.get_by_id(
                                    moss[a["managed_object"]][10])
                                if moss[a["managed_object"]][10] else "",
                                AlarmClass.get_by_id(a["alarm_class"]).name,
                                ActiveAlarm.objects.get(
                                    id=a["_id"]).subject if subject else None,
                                "Yes" if a["managed_object"] in maintenance
                                else "No",
                                total_objects,
                                total_subscribers,
                                a.get("escalation_tt"),
                                a.get("escalation_ts"),
                                ", ".join(l for l in (
                                    loc.location(moss[a["managed_object"]][5]
                                                 ) if moss[a["managed_object"]]
                                    [5] is not None else "") if l),
                                container_lookup[a["managed_object"]].get(
                                    "text", "") if container_lookup else "",
                            ],
                            container_path,
                            segment_path,
                        ),
                        cmap,
                    )
                ]

        if o_format == "csv":
            response = HttpResponse(content_type="text/csv")
            response[
                "Content-Disposition"] = 'attachment; filename="alarms.csv"'
            writer = csv.writer(response)
            writer.writerows(r)
            return response
        elif o_format == "xlsx":
            response = StringIO()
            wb = xlsxwriter.Workbook(response)
            cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1})
            ws = wb.add_worksheet("Alarms")
            max_column_data_length = {}
            for rn, x in enumerate(r):
                for cn, c in enumerate(x):
                    if rn and (r[0][cn] not in max_column_data_length or
                               len(str(c)) > max_column_data_length[r[0][cn]]):
                        max_column_data_length[r[0][cn]] = len(str(c))
                    ws.write(rn, cn, c, cf1)
            ws.autofilter(0, 0, rn, cn)
            ws.freeze_panes(1, 0)
            for cn, c in enumerate(r[0]):
                # Set column width
                width = get_column_width(c)
                if enable_autowidth and width < max_column_data_length[c]:
                    width = max_column_data_length[c]
                ws.set_column(cn, cn, width=width)
            wb.close()
            response.seek(0)
            response = HttpResponse(response.getvalue(),
                                    content_type="application/vnd.ms-excel")
            response[
                "Content-Disposition"] = 'attachment; filename="alarms.xlsx"'
            response.close()
            return response
示例#10
0
文件: views.py 项目: gabrielat/noc
    def get_data(self,
                 request,
                 interval=1,
                 from_date=None,
                 to_date=None,
                 skip_avail=False,
                 skip_zero_avail=False,
                 filter_zero_access=False,
                 **kwargs):
        """
        a1 = self.get_availability(1)
        a7 = self.get_availability(7)
        a30 = self.get_availability(30)
        """

        if not from_date:
            from_date = datetime.datetime.now() - datetime.timedelta(
                days=interval)
        else:
            from_date = datetime.datetime.strptime(from_date, "%d.%m.%Y")

        if not to_date or from_date == to_date:
            to_date = from_date + datetime.timedelta(days=1)
        else:
            to_date = datetime.datetime.strptime(
                to_date, "%d.%m.%Y") + datetime.timedelta(days=1)

        a = self.get_availability(start_date=from_date,
                                  stop_date=to_date,
                                  skip_zero_avail=skip_zero_avail)
        rb = self.get_reboots(start_date=from_date, stop_date=to_date)
        r = [SectionRow("Report from %s to %s" % (from_date, to_date))]
        mos = ManagedObject.objects.filter(is_managed=True)

        if not request.user.is_superuser:
            mos = mos.filter(
                administrative_domain__in=UserAccess.get_domains(request.user))
        if skip_avail:
            mos = mos.filter(id__in=list(a))
        mos_id = list(mos.order_by("id").values_list("id", flat=True))
        if filter_zero_access:
            iface_p = InterfaceProfile.objects.get(name="Клиентский порт")
            match = {"profile": iface_p.id, "managed_object": {"$in": mos_id}}
            pipeline = [
                {
                    "$match": match
                },
                {
                    "$group": {
                        "_id": "$managed_object",
                        "count": {
                            "$sum": 1
                        },
                        "m": {
                            "$max": "$oper_status"
                        },
                    }
                },
                {
                    "$match": {
                        "m": False
                    }
                },
                {
                    "$project": {
                        "_id": True
                    }
                },
            ]
            # data = Interface.objects._get_collection().aggregate(pipeline,
            data = (get_db()["noc.interfaces"].with_options(
                read_preference=ReadPreference.SECONDARY_PREFERRED).aggregate(
                    pipeline))
            data = [d["_id"] for d in data]
            mos = mos.exclude(id__in=data)

        mo_hostname = ReportObjectsHostname1(sync_ids=mos_id)
        mo_hostname = mo_hostname.get_dictionary()
        for mo_id, mo_name, address, profile, ad_name in mos.values_list(
                "id", "name", "address", "profile",
                "administrative_domain__name"):
            s = [
                ad_name,
                mo_name,
                mo_hostname.get(mo_id, ""),
                address,
                Profile.get_by_id(profile).name,
                round(a.get(mo_id, (100.0, 0, 0))[0], 2),
            ]
            s.extend(a.get(mo_id, (100.0, 0, 0))[1:])
            s.append(rb[mo_id] if mo_id in rb else 0)
            r += [s]
            """
            a1.get(o.id, 100),
            a7.get(o.id, 100),
            a30.get(o.id, 100)
            """
        # print r
        return self.from_dataset(
            title=self.title,
            columns=[
                _("Adm. Domain"),
                _("Managed Object"),
                _("Hostname"),
                _("Address"),
                _("Profile"),
                # TableColumn(_("Avail"), align="right", format="percent"),
                # TableColumn(_("Total avail (sec)"), align="right", format="numeric"),
                _("Avail"),
                _("Total unavail (sec)"),
                _("Count outages"),
                _("Reboots"),
            ],
            data=r,
            enumerate=True,
        )
示例#11
0
文件: views.py 项目: gabrielat/noc
    def get_data(self, request, pool=None, obj_profile=None, **kwargs):
        problems = {}  # id -> problem

        mos = ManagedObject.objects.filter(is_managed=True, pool=pool)
        if not request.user.is_superuser:
            mos = mos.filter(administrative_domain__in=UserAccess.get_domains(request.user))
        if obj_profile:
            # Get all managed objects
            mos = mos.filter(object_profile=obj_profile)
        mos = {
            mo[0]: (mo[1], mo[2], Profile.get_by_id(mo[3]), mo[4], mo[5])
            for mo in mos.values_list("id", "name", "address", "profile", "platform", "segment")
        }
        mos_set = set(mos)
        # Get all managed objects with generic profile
        for mo in mos:
            if mos[mo][2] == GENERIC_PROFILE:
                problems[mo] = _("Profile check failed")
        # Get all managed objects without interfaces
        if_mo = dict(
            (x["_id"], x.get("managed_object"))
            for x in Interface._get_collection().find({}, {"_id": 1, "managed_object": 1})
        )
        for mo in mos_set - set(problems) - set(six.itervalues(if_mo)):
            problems[mo] = _("No interfaces")
        # Get all managed objects without links
        linked_mos = set()
        for d in Link._get_collection().find({}):
            for i in d["interfaces"]:
                linked_mos.add(if_mo.get(i))
        for mo in mos_set - set(problems) - linked_mos:
            problems[mo] = _("No links")
        # Get all managed objects without uplinks
        uplinks = {}
        for d in ObjectData._get_collection().find():
            nu = len(d.get("uplinks", []))
            if nu:
                uplinks[d["_id"]] = nu
        for mo in mos_set - set(problems) - set(uplinks):
            problems[mo] = _("No uplinks")
        #
        data = []
        for mo_id in problems:
            if mo_id not in mos:
                continue
            name, address, profile, platform, segment = mos[mo_id]
            data += [
                [
                    name,
                    address,
                    profile.name,
                    Platform.get_by_id(platform).name if platform else "",
                    NetworkSegment.get_by_id(segment).name if segment else "",
                    problems[mo_id],
                ]
            ]
        data = sorted(data)
        return self.from_dataset(
            title=self.title,
            columns=["Name", "Address", "Profile", "Platform", "Segment", "Problem"],
            data=data,
            enumerate=True,
        )
示例#12
0
文件: views.py 项目: nbashev/noc
    def api_report(
        self,
        request,
        from_date,
        to_date,
        o_format,
        min_duration=0,
        max_duration=0,
        min_objects=0,
        min_subscribers=0,
        segment=None,
        administrative_domain=None,
        selector=None,
        ex_selector=None,
        columns=None,
        source="both",
        alarm_class=None,
        subscribers=None,
        enable_autowidth=False,
    ):
        def row(row, container_path, segment_path):
            def qe(v):
                if v is None:
                    return ""
                if isinstance(v, str):
                    return smart_text(v)
                elif isinstance(v, datetime.datetime):
                    return v.strftime("%Y-%m-%d %H:%M:%S")
                elif not isinstance(v, str):
                    return smart_text(v)
                else:
                    return v

            r = [qe(x) for x in row]
            if len(container_path) < self.CONTAINER_PATH_DEPTH:
                container_path += [""] * (self.CONTAINER_PATH_DEPTH -
                                          len(container_path))
            else:
                container_path = container_path[:self.CONTAINER_PATH_DEPTH]
            if len(segment_path) < self.SEGMENT_PATH_DEPTH:
                segment_path += [""] * (self.SEGMENT_PATH_DEPTH -
                                        len(segment_path))
            else:
                segment_path = segment_path[:self.SEGMENT_PATH_DEPTH]
            return r + container_path + segment_path

        def translate_row(row, cmap):
            return [row[i] for i in cmap]

        cols = ([
            "id",
            "root_id",
            "from_ts",
            "to_ts",
            "duration_sec",
            "object_name",
            "object_address",
            "object_hostname",
            "object_profile",
            "object_admdomain",
            "object_platform",
            "object_version",
            "alarm_class",
            "alarm_subject",
            "maintenance",
            "objects",
            "subscribers",
            "tt",
            "escalation_ts",
            "location",
            "container_address",
        ] + ["container_%d" % i for i in range(self.CONTAINER_PATH_DEPTH)] +
                ["segment_%d" % i for i in range(self.SEGMENT_PATH_DEPTH)])

        header_row = (
            [
                "ID",
                _("ROOT_ID"),
                _("FROM_TS"),
                _("TO_TS"),
                _("DURATION_SEC"),
                _("OBJECT_NAME"),
                _("OBJECT_ADDRESS"),
                _("OBJECT_HOSTNAME"),
                _("OBJECT_PROFILE"),
                _("OBJECT_ADMDOMAIN"),
                _("OBJECT_PLATFORM"),
                _("OBJECT_VERSION"),
                _("ALARM_CLASS"),
                _("ALARM_SUBJECT"),
                _("MAINTENANCE"),
                _("OBJECTS"),
                _("SUBSCRIBERS"),
                _("TT"),
                _("ESCALATION_TS"),
                _("LOCATION"),
                _("CONTAINER_ADDRESS"),
            ] + ["CONTAINER_%d" % i
                 for i in range(self.CONTAINER_PATH_DEPTH)] +
            ["SEGMENT_%d" % i for i in range(self.SEGMENT_PATH_DEPTH)])

        if columns:
            cmap = []
            for c in columns.split(","):
                try:
                    cmap += [cols.index(c)]
                except ValueError:
                    continue
        else:
            cmap = list(range(len(cols)))
        subscribers_profile = self.default_subscribers_profile
        if subscribers:
            subscribers_profile = set(
                SubscriberProfile.objects.filter(
                    id__in=subscribers.split(",")).scalar("id"))
        r = [translate_row(header_row, cmap)]
        fd = datetime.datetime.strptime(
            to_date, "%d.%m.%Y") + datetime.timedelta(days=1)
        match = {
            "timestamp": {
                "$gte": datetime.datetime.strptime(from_date, "%d.%m.%Y"),
                "$lte": fd
            }
        }

        match_duration = {"duration": {"$gte": min_duration}}
        if max_duration:
            match_duration = {
                "duration": {
                    "$gte": min_duration,
                    "$lte": max_duration
                }
            }
        mos = ManagedObject.objects.filter(is_managed=True)

        if segment:
            try:
                match["segment_path"] = bson.ObjectId(segment)
            except bson.errors.InvalidId:
                pass

        ads = []
        if administrative_domain:
            if administrative_domain.isdigit():
                administrative_domain = [int(administrative_domain)]
                ads = AdministrativeDomain.get_nested_ids(
                    administrative_domain[0])

        if not request.user.is_superuser:
            user_ads = UserAccess.get_domains(request.user)
            if administrative_domain and ads:
                if administrative_domain[0] not in user_ads:
                    ads = list(set(ads) & set(user_ads))
                    if not ads:
                        return HttpResponse(
                            "<html><body>Permission denied: Invalid Administrative Domain</html></body>"
                        )
            else:
                ads = user_ads
        if ads:
            mos = mos.filter(administrative_domain__in=ads)
        if selector:
            selector = ManagedObjectSelector.get_by_id(int(selector))
            mos = mos.filter(selector.Q)
        if ex_selector:
            ex_selector = ManagedObjectSelector.get_by_id(int(ex_selector))
            mos = mos.exclude(ex_selector.Q)

        # Working if Administrative domain set
        if ads:
            try:
                match["adm_path"] = {"$in": ads}
                # @todo More 2 level hierarhy
            except bson.errors.InvalidId:
                pass

        mos_id = list(mos.order_by("id").values_list("id", flat=True))
        mo_hostname = {}
        maintenance = []
        if mos_id and (selector or ex_selector):
            match["managed_object"] = {"$in": mos_id}
        if "maintenance" in columns.split(","):
            maintenance = Maintenance.currently_affected()
        if "object_hostname" in columns.split(","):
            mo_hostname = ReportObjectsHostname1(sync_ids=mos_id)
            mo_hostname = mo_hostname.get_dictionary()
        moss = ReportAlarmObjects(mos_id).get_all()
        # container_lookup = ReportContainer(mos_id)
        container_lookup = None
        subject = "alarm_subject" in columns
        loc = AlarmApplication([])
        if source in ["archive", "both"]:
            # Archived Alarms
            for a in (ArchivedAlarm._get_collection().with_options(
                    read_preference=ReadPreference.SECONDARY_PREFERRED
            ).aggregate([
                {
                    "$match": match
                },
                {
                    "$addFields": {
                        "duration": {
                            "$divide": [
                                {
                                    "$subtract":
                                    ["$clear_timestamp", "$timestamp"]
                                },
                                1000,
                            ]
                        }
                    }
                },
                {
                    "$match": match_duration
                },
                    # {"$sort": {"timestamp": 1}}
            ])):
                if int(a["managed_object"]) not in moss:
                    continue
                dt = a["clear_timestamp"] - a["timestamp"]
                duration = int(dt.total_seconds())
                total_objects = sum(ss["summary"] for ss in a["total_objects"])
                if min_objects and total_objects < min_objects:
                    continue
                total_subscribers = sum(
                    ss["summary"] for ss in a["total_subscribers"]
                    if subscribers_profile
                    and ss["profile"] in subscribers_profile)
                if min_subscribers and total_subscribers < min_subscribers:
                    continue
                if "segment_" in columns.split(
                        ",") or "container_" in columns.split(","):
                    path = ObjectPath.get_path(a["managed_object"])
                    if path:
                        segment_path = [
                            NetworkSegment.get_by_id(s).name
                            for s in path.segment_path
                            if NetworkSegment.get_by_id(s)
                        ]
                        container_path = [
                            Object.get_by_id(s).name
                            for s in path.container_path if Object.get_by_id(s)
                        ]
                    else:
                        segment_path = []
                        container_path = []
                else:
                    segment_path = []
                    container_path = []
                r += [
                    translate_row(
                        row(
                            [
                                smart_text(a["_id"]),
                                smart_text(a["root"]) if a.get("root") else "",
                                a["timestamp"],
                                a["clear_timestamp"],
                                smart_text(duration),
                                moss[a["managed_object"]][0],
                                moss[a["managed_object"]][1],
                                smart_text(
                                    mo_hostname.get(a["managed_object"], "")),
                                Profile.get_by_id(
                                    moss[a["managed_object"]][3]).name
                                if moss[a["managed_object"]][5] else "",
                                moss[a["managed_object"]][6],
                                Platform.get_by_id(
                                    moss[a["managed_object"]][9])
                                if moss[a["managed_object"]][9] else "",
                                smart_text(
                                    Firmware.get_by_id(
                                        moss[a["managed_object"]][10]).version)
                                if moss[a["managed_object"]][10] else "",
                                AlarmClass.get_by_id(a["alarm_class"]).name,
                                ArchivedAlarm.objects.get(
                                    id=a["_id"]).subject if subject else "",
                                "",
                                total_objects,
                                total_subscribers,
                                a.get("escalation_tt"),
                                a.get("escalation_ts"),
                                ", ".join(ll for ll in (
                                    loc.location(moss[a["managed_object"]][5]
                                                 ) if moss[a["managed_object"]]
                                    [5] is not None else "") if ll),
                                container_lookup[a["managed_object"]].get(
                                    "text", "") if container_lookup else "",
                            ],
                            container_path,
                            segment_path,
                        ),
                        cmap,
                    )
                ]
        # Active Alarms
        if source in ["active", "both"]:
            datenow = datetime.datetime.now()
            for a in (ActiveAlarm._get_collection().with_options(
                    read_preference=ReadPreference.SECONDARY_PREFERRED).
                      aggregate([
                          {
                              "$match": match
                          },
                          {
                              "$addFields": {
                                  "duration": {
                                      "$divide": [{
                                          "$subtract": [fd, "$timestamp"]
                                      }, 1000]
                                  }
                              }
                          },
                          {
                              "$match": match_duration
                          },
                          # {"$sort": {"timestamp": 1}}
                      ])):
                dt = datenow - a["timestamp"]
                duration = int(dt.total_seconds())
                total_objects = sum(ss["summary"] for ss in a["total_objects"])
                if min_objects and total_objects < min_objects:
                    continue
                total_subscribers = sum(
                    ss["summary"] for ss in a["total_subscribers"]
                    if subscribers_profile
                    and ss["profile"] in subscribers_profile)
                if min_subscribers and total_subscribers < min_subscribers:
                    continue
                if "segment_" in columns.split(
                        ",") or "container_" in columns.split(","):
                    path = ObjectPath.get_path(a["managed_object"])
                    if path:
                        segment_path = [
                            NetworkSegment.get_by_id(s).name
                            for s in path.segment_path
                            if NetworkSegment.get_by_id(s)
                        ]
                        container_path = [
                            Object.get_by_id(s).name
                            for s in path.container_path if Object.get_by_id(s)
                        ]
                    else:
                        segment_path = []
                        container_path = []
                else:
                    segment_path = []
                    container_path = []
                r += [
                    translate_row(
                        row(
                            [
                                smart_text(a["_id"]),
                                smart_text(a["root"]) if a.get("root") else "",
                                a["timestamp"],
                                # a["clear_timestamp"],
                                "",
                                smart_text(duration),
                                moss[a["managed_object"]][0],
                                moss[a["managed_object"]][1],
                                smart_text(
                                    mo_hostname.get(a["managed_object"], "")),
                                Profile.get_by_id(moss[a["managed_object"]][3])
                                if moss[a["managed_object"]][5] else "",
                                moss[a["managed_object"]][6],
                                Platform.get_by_id(
                                    moss[a["managed_object"]][9])
                                if moss[a["managed_object"]][9] else "",
                                smart_text(
                                    Firmware.get_by_id(
                                        moss[a["managed_object"]][10]).version)
                                if moss[a["managed_object"]][10] else "",
                                AlarmClass.get_by_id(a["alarm_class"]).name,
                                ActiveAlarm.objects.get(
                                    id=a["_id"]).subject if subject else None,
                                "Yes" if a["managed_object"] in maintenance
                                else "No",
                                total_objects,
                                total_subscribers,
                                a.get("escalation_tt"),
                                a.get("escalation_ts"),
                                ", ".join(ll for ll in (
                                    loc.location(moss[a["managed_object"]][5]
                                                 ) if moss[a["managed_object"]]
                                    [5] is not None else "") if ll),
                                container_lookup[a["managed_object"]].get(
                                    "text", "") if container_lookup else "",
                            ],
                            container_path,
                            segment_path,
                        ),
                        cmap,
                    )
                ]
        if source in ["long_archive"]:
            o_format = "csv_zip"
            columns = [
                "ALARM_ID",
                "MO_ID",
                "OBJECT_PROFILE",
                "VENDOR",
                "PLATFORM",
                "VERSION",
                "OPEN_TIMESTAMP",
                "CLOSE_TIMESTAMP",
                "LOCATION",
                "",
                "POOL",
                "ADM_DOMAIN",
                "MO_NAME",
                "IP",
                "ESCALATION_TT",
                "DURATION",
                "SEVERITY",
                "REBOOTS",
            ]
            from noc.core.clickhouse.connect import connection

            ch = connection()
            fd = datetime.datetime.strptime(from_date, "%d.%m.%Y")
            td = datetime.datetime.strptime(
                to_date, "%d.%m.%Y") + datetime.timedelta(days=1)
            if td - fd > datetime.timedelta(days=390):
                return HttpResponseBadRequest(
                    _("Report more than 1 year not allowed. If nedeed - request it from Administrator"
                      ))
            ac = AlarmClass.objects.get(
                name="NOC | Managed Object | Ping Failed")
            subs = ", ".join(
                "subscribers.summary[indexOf(subscribers.profile, '%s')] as `%s`"
                % (sp.bi_id, sp.name)
                for sp in SubscriberProfile.objects.filter().order_by("name"))
            if subs:
                columns += [
                    sp.name for sp in
                    SubscriberProfile.objects.filter().order_by("name")
                ]
            r = ch.execute(LONG_ARCHIVE_QUERY % (
                ", %s" % subs if subs else "",
                fd.date().isoformat(),
                td.date().isoformat(),
                ac.bi_id,
            ))

        filename = "alarms.csv"
        if o_format == "csv":
            response = HttpResponse(content_type="text/csv")
            response[
                "Content-Disposition"] = 'attachment; filename="%s"' % filename
            writer = csv.writer(response)
            writer.writerows(r)
            return response
        elif o_format == "csv_zip":
            response = BytesIO()
            f = TextIOWrapper(TemporaryFile(mode="w+b"), encoding="utf-8")
            writer = csv.writer(f,
                                dialect="excel",
                                delimiter=";",
                                quotechar='"')
            writer.writerow(columns)
            writer.writerows(r)
            f.seek(0)
            with ZipFile(response, "w", compression=ZIP_DEFLATED) as zf:
                zf.writestr(filename, f.read())
                zf.filename = "%s.zip" % filename
            # response = HttpResponse(content_type="text/csv")
            response.seek(0)
            response = HttpResponse(response.getvalue(),
                                    content_type="application/zip")
            response[
                "Content-Disposition"] = 'attachment; filename="%s.zip"' % filename
            return response
        elif o_format == "xlsx":
            response = BytesIO()
            wb = xlsxwriter.Workbook(response)
            cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1})
            ws = wb.add_worksheet("Alarms")
            max_column_data_length = {}
            for rn, x in enumerate(r):
                for cn, c in enumerate(x):
                    if rn and (r[0][cn] not in max_column_data_length or
                               len(str(c)) > max_column_data_length[r[0][cn]]):
                        max_column_data_length[r[0][cn]] = len(str(c))
                    ws.write(rn, cn, c, cf1)
            ws.autofilter(0, 0, rn, cn)
            ws.freeze_panes(1, 0)
            for cn, c in enumerate(r[0]):
                # Set column width
                width = get_column_width(c)
                if enable_autowidth and width < max_column_data_length[c]:
                    width = max_column_data_length[c]
                ws.set_column(cn, cn, width=width)
            wb.close()
            response.seek(0)
            response = HttpResponse(response.getvalue(),
                                    content_type="application/vnd.ms-excel")
            response[
                "Content-Disposition"] = 'attachment; filename="alarms.xlsx"'
            response.close()
            return response