def extract(self): containers = ReportContainerData( sorted( set(ManagedObject.objects.all().order_by( "container").values_list("container", flat=True)))) containers = containers.get_dictionary() for ( mo_id, bi_id, name, address, profile, platform, version, remote_id, remote_system, adm_id, adm_name, container, ) in (ManagedObject.objects.all().order_by("id").values_list( "id", "bi_id", "name", "address", "profile", "platform", "version", "remote_id", "remote_system", "administrative_domain", "administrative_domain__name", "container", )): yield ( bi_id, mo_id, name, address, Profile.get_by_id(profile).name if profile else "", Platform.get_by_id(platform).name if platform else "", Firmware.get_by_id(version).version if version else "", remote_id if remote_id else "", RemoteSystem.get_by_id(remote_system).name if remote_system else "", adm_id, adm_name, containers.get(container, ("", ))[0] if container else "", )
def get_object_data(self, object_id): """ Worker to resolve credentials """ object_id = int(object_id) # Get Object's attributes with self.service.get_pg_connect() as connection: cursor = connection.cursor() cursor.execute(self.RUN_SQL, [object_id, object_id]) data = cursor.fetchall() if not data: metrics["error", ("type", "object_not_found")] += 1 raise APIError("Object is not found") # Build capabilities capabilities = ObjectCapabilities.get_capabilities(object_id) # Get object credentials ( name, is_managed, profile, vendor, platform, version, scheme, address, port, user, password, super_password, remote_path, snmp_ro, pool_id, sw_image, auth_profile_id, ap_user, ap_password, ap_super_password, ap_snmp_ro, ap_snmp_rw, privilege_policy, snmp_rate_limit, p_privilege_policy, p_snmp_rate_limit, access_preference, p_access_preference, beef_storage_id, beef_path_template_id, attrs, ) = data[0] # Check object is managed if not is_managed: metrics["error", ("type", "object_not_managed")] += 1 raise APIError("Object is not managed") if auth_profile_id: user = ap_user password = ap_password super_password = ap_super_password snmp_ro = ap_snmp_ro snmp_rw = ap_snmp_rw # noqa just to be # if privilege_policy == "E": raise_privileges = True elif privilege_policy == "P": raise_privileges = p_privilege_policy == "E" else: raise_privileges = False if access_preference == "P": access_preference = p_access_preference if not snmp_rate_limit: snmp_rate_limit = p_snmp_rate_limit # Build credentials credentials = { "name": name, "address": address, "user": user, "password": password, "super_password": super_password, "path": remote_path, "raise_privileges": raise_privileges, "access_preference": access_preference, "snmp_rate_limit": snmp_rate_limit, } if snmp_ro: credentials["snmp_ro"] = snmp_ro if capabilities.get("SNMP | v2c"): credentials["snmp_version"] = "v2c" elif capabilities.get("SNMP | v1"): credentials["snmp_version"] = "v1" if scheme in CLI_PROTOCOLS: credentials["cli_protocol"] = PROTOCOLS[scheme] if port: credentials["cli_port"] = port elif scheme in HTTP_PROTOCOLS: credentials["http_protocol"] = PROTOCOLS[scheme] if port: credentials["http_port"] = port # Build version if vendor and platform and version: vendor = Vendor.get_by_id(vendor) version = { "vendor": vendor.code[0] if vendor.code else vendor.name, "platform": Platform.get_by_id(platform).name, "version": Firmware.get_by_id(version).version, } if sw_image: version["image"] = sw_image if attrs: version["attributes"] = attrs else: version = None # Beef processing if scheme == BEEF and beef_storage_id and beef_path_template_id: mo = ManagedObject.get_by_id(object_id) tpl = Template.get_by_id(beef_path_template_id) beef_path = tpl.render_subject(object=mo) if beef_path: storage = ExtStorage.get_by_id(beef_storage_id) credentials["beef_storage_url"] = storage.url credentials["beef_path"] = beef_path return dict( profile=Profile.get_by_id(profile).name, pool_id=pool_id, credentials=credentials, capabilities=capabilities, version=version, )
def api_report( self, request, o_format, is_managed=None, administrative_domain=None, selector=None, pool=None, segment=None, avail_status=False, columns=None, ids=None, enable_autowidth=False, ): def row(row): def qe(v): if v is None: return "" if isinstance(v, str): return smart_text(v) elif isinstance(v, datetime.datetime): return v.strftime("%Y-%m-%d %H:%M:%S") elif not isinstance(v, str): return str(v) else: return v return [qe(x) for x in row] def translate_row(row, cmap): return [row[i] for i in cmap] type_columns = ["Up/10G", "Up/1G", "Up/100M", "Down/-", "-"] cols = [ "object1_admin_domain", # "id", "object1_name", "object1_address", "object1_platform", "object1_segment", "object1_tags", "object1_iface", "object1_descr", "object1_speed", "object2_admin_domain", "object2_name", "object2_address", "object2_platform", "object2_segment", "object2_tags", "object2_iface", "object2_descr", "object2_speed", "link_proto", "last_seen", ] header_row = [ "OBJECT1_ADMIN_DOMAIN", "OBJECT1_NAME", "OBJECT1_ADDRESS", "OBJECT1_PLATFORM", "OBJECT1_SEGMENT", "OBJECT1_TAGS", "OBJECT1_IFACE", "OBJECT1_DESCR", "OBJECT1_SPEED", "OBJECT2_ADMIN_DOMAIN", "OBJECT2_NAME", "OBJECT2_ADDRESS", "OBJECT2_PLATFORM", "OBJECT2_SEGMENT", "OBJECT2_TAGS", "OBJECT2_IFACE", "OBJECT2_DESCR", "OBJECT2_SPEED", "LINK_PROTO", "LAST_SEEN", ] if columns: cmap = [] for c in columns.split(","): try: cmap += [cols.index(c)] except ValueError: continue else: cmap = list(range(len(cols))) r = [translate_row(header_row, cmap)] if "interface_type_count" in columns.split(","): r[-1].extend(type_columns) # self.logger.info(r) # self.logger.info("---------------------------------") # print("-----------%s------------%s" % (administrative_domain, columns)) p = Pool.get_by_name(pool or "default") mos = ManagedObject.objects.filter() if request.user.is_superuser and not administrative_domain and not selector and not segment: mos = ManagedObject.objects.filter(pool=p) if ids: mos = ManagedObject.objects.filter(id__in=[ids]) if is_managed is not None: mos = ManagedObject.objects.filter(is_managed=is_managed) if pool: mos = mos.filter(pool=p) if not request.user.is_superuser: mos = mos.filter( administrative_domain__in=UserAccess.get_domains(request.user)) if administrative_domain: ads = AdministrativeDomain.get_nested_ids( int(administrative_domain)) mos = mos.filter(administrative_domain__in=ads) if selector: selector = ManagedObjectSelector.get_by_id(int(selector)) mos = mos.filter(selector.Q) if segment: segment = NetworkSegment.objects.filter(id=segment).first() if segment: mos = mos.filter(segment__in=segment.get_nested_ids()) mos_id = list(mos.values_list("id", flat=True)) rld = ReportLinksDetail(mos_id) mo_resolv = { mo[0]: mo[1:] for mo in ManagedObject.objects.filter().values_list( "id", "administrative_domain__name", "name", "address", "segment", "platform", "labels", ) } for link in rld.out: if len(rld.out[link]) != 2: # Multilink or bad link continue s1, s2 = rld.out[link] seg1, seg2 = None, None if "object1_segment" in columns.split( ",") or "object2_segment" in columns.split(","): seg1, seg2 = mo_resolv[s1["mo"][0]][3], mo_resolv[s2["mo"] [0]][3] plat1, plat2 = None, None if "object1_platform" in columns.split( ",") or "object2_platform" in columns.split(","): plat1, plat2 = mo_resolv[s1["mo"][0]][4], mo_resolv[s2["mo"] [0]][4] r += [ translate_row( row([ mo_resolv[s1["mo"][0]][0], mo_resolv[s1["mo"][0]][1], mo_resolv[s1["mo"][0]][2], "" if not plat1 else Platform.get_by_id(plat1), "" if not seg1 else NetworkSegment.get_by_id(seg1), ";".join(mo_resolv[s1["mo"][0]][5] or []), s1["iface_n"][0], s1.get("iface_descr")[0] if s1.get("iface_descr") else "", s1.get("iface_speed")[0] if s1.get("iface_speed") else 0, mo_resolv[s2["mo"][0]][0], mo_resolv[s2["mo"][0]][1], mo_resolv[s2["mo"][0]][2], "" if not plat2 else Platform.get_by_id(plat2), "" if not seg2 else NetworkSegment.get_by_id(seg2), ";".join(mo_resolv[s2["mo"][0]][5] or []), s2["iface_n"][0], s2.get("iface_descr")[0] if s2.get("iface_descr") else "", s2.get("iface_speed")[0] if s2.get("iface_speed") else 0, s2.get("dis_method", ""), s2.get("last_seen", ""), ]), cmap, ) ] filename = "links_detail_report_%s" % datetime.datetime.now().strftime( "%Y%m%d") if o_format == "csv": response = HttpResponse(content_type="text/csv") response[ "Content-Disposition"] = 'attachment; filename="%s.csv"' % filename writer = csv.writer(response, dialect="excel", delimiter=",", quoting=csv.QUOTE_MINIMAL) writer.writerows(r) return response elif o_format == "csv_zip": response = BytesIO() f = TextIOWrapper(TemporaryFile(mode="w+b"), encoding="utf-8") writer = csv.writer(f, dialect="excel", delimiter=";", quotechar='"') writer.writerows(r) f.seek(0) with ZipFile(response, "w", compression=ZIP_DEFLATED) as zf: zf.writestr("%s.csv" % filename, f.read()) zf.filename = "%s.csv.zip" % filename # response = HttpResponse(content_type="text/csv") response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/zip") response[ "Content-Disposition"] = 'attachment; filename="%s.csv.zip"' % filename return response elif o_format == "xlsx": response = BytesIO() wb = xlsxwriter.Workbook(response) cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1}) ws = wb.add_worksheet("Objects") max_column_data_length = {} for rn, x in enumerate(r): for cn, c in enumerate(x): if rn and (r[0][cn] not in max_column_data_length or len(str(c)) > max_column_data_length[r[0][cn]]): max_column_data_length[r[0][cn]] = len(str(c)) ws.write(rn, cn, c, cf1) ws.autofilter(0, 0, rn, cn) ws.freeze_panes(1, 0) for cn, c in enumerate(r[0]): # Set column width width = get_column_width(c) if enable_autowidth and width < max_column_data_length[c]: width = max_column_data_length[c] ws.set_column(cn, cn, width=width) wb.close() response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/vnd.ms-excel") # response = HttpResponse( # content_type="application/x-ms-excel") response[ "Content-Disposition"] = 'attachment; filename="%s.xlsx"' % filename response.close() return response
def handler(self): self.logger.info("Checking version") result = self.object.scripts.get_version() changed = False # Sync vendor vendor = Vendor.ensure_vendor(result["vendor"]) if not self.object.vendor or vendor.id != self.object.vendor.id: if self.object.vendor: self.logger.info("Vendor changed: %s -> %s", self.object.vendor.name, vendor.name) else: self.logger.info("Set vendor: %s", vendor.name) self.object.vendor = vendor changed = True # Sync platform strict_platform = self.object.object_profile.new_platform_creation_policy != "C" platform = Platform.ensure_platform(vendor, result["platform"], strict=strict_platform) if strict_platform and platform is None: # Denied to create platform, stop if self.object.object_profile.new_platform_creation_policy == "A": self.set_problem( alarm_class="NOC | Managed Object | New Platform", message="New platform (%s: %s) creation is denied by policy" % (vendor, result["platform"]), fatal=True, ) else: self.job.set_fatal_error() return if not self.object.platform or platform.id != self.object.platform.id: if self.object.platform: self.logger.info("Platform changed: %s -> %s", self.object.platform.name, platform.name) else: self.logger.info("Set platform: %s", platform.name) self.object.platform = platform changed = True # Platform changed, clear links if self.object.object_profile.clear_links_on_platform_change: self.clear_links() # Invalidate neighbor cache self.invalidate_neighbor_cache() # Sync version version = Firmware.ensure_firmware(self.object.profile, vendor, result["version"]) if not self.object.version or version.id != self.object.version.id: if self.object.version: self.logger.info("Version changed: %s -> %s", self.object.version.version, version.version) else: self.logger.info("Set version: %s", version.version) self.object.version = version changed = True # @todo: Check next_version and report upgrade # Sync image image = result.get("image", "") or None if image != self.object.software_image: if image: image = image.strip()[:255] # Cut to field length if not image: image = None if self.object.version: self.logger.info("Image changed: %s -> %s", self.object.software_image, image) else: self.logger.info("Set image: %s", image) self.object.software_image = image changed = True # Sync attributes if "attributes" in result: self.object.update_attributes(result["attributes"]) # if changed: self.object.save() # dfp = self.object.get_denied_firmware_policy() if dfp != "I": firmware_status = FirmwarePolicy.get_status(platform, version) if firmware_status == FS_DENIED: self.logger.info("Firmware version is denied by policy") if dfp in ("A", "S"): self.set_problem( alarm_class="NOC | Managed Object | Denied Firmware", message="Firmware version is denied to use by policy", fatal=dfp == "S", ) elif dfp == "s": self.job.set_fatal_error() if dfp in ("s", "S"): self.logger.error( "Further box discovery is stopped by policy")
def api_report( self, request, reporttype=None, from_date=None, to_date=None, object_profile=None, filter_default=None, exclude_zero=None, interface_profile=None, selector=None, administrative_domain=None, columns=None, o_format=None, enable_autowidth=False, **kwargs ): def translate_row(row, cmap): return [row[i] for i in cmap] map_table = { "load_interfaces": "/Interface\s\|\sLoad\s\|\s[In|Out]/", "load_cpu": "/[CPU|Memory]\s\|\sUsage/", "errors": "/Interface\s\|\s[Errors|Discards]\s\|\s[In|Out]/", "ping": "/Ping\s\|\sRTT/", } cols = [ "id", "object_name", "object_address", "object_platform", "object_adm_domain", "object_segment", # "object_hostname", # "object_status", # "profile_name", # "object_profile", # "object_vendor", "iface_name", "iface_description", "iface_speed", "load_in", "load_in_p", "load_out", "load_out_p", "errors_in", "errors_out", "slot", "cpu_usage", "memory_usage", "ping_rtt", "ping_attempts", "interface_flap", "interface_load_url", ] header_row = [ "ID", "OBJECT_NAME", "OBJECT_ADDRESS", "OBJECT_PLATFORM", "OBJECT_ADM_DOMAIN", "OBJECT_SEGMENT", "IFACE_NAME", "IFACE_DESCRIPTION", "IFACE_SPEED", "LOAD_IN", "LOAD_IN_P", "LOAD_OUT", "LOAD_OUT_P", "ERRORS_IN", "ERRORS_OUT", "CPU_USAGE", "MEMORY_USAGE", "PING_RTT", "PING_ATTEMPTS", "INTERFACE_FLAP", "INTERFACE_LOAD_URL", ] if columns: cmap = [] for c in columns.split(","): try: cmap += [cols.index(c)] except ValueError: continue else: cmap = list(range(len(cols))) columns_order = columns.split(",") columns_filter = set(columns_order) r = [translate_row(header_row, cmap)] object_columns = [c for c in columns_order if c.startswith("object")] # Date Time Block if not from_date: from_date = datetime.datetime.now() - datetime.timedelta(days=1) else: from_date = datetime.datetime.strptime(from_date, "%d.%m.%Y") if not to_date or from_date == to_date: to_date = from_date + datetime.timedelta(days=1) else: to_date = datetime.datetime.strptime(to_date, "%d.%m.%Y") + datetime.timedelta(days=1) # interval = (to_date - from_date).days ts_from_date = time.mktime(from_date.timetuple()) ts_to_date = time.mktime(to_date.timetuple()) # Load managed objects mos = ManagedObject.objects.filter(is_managed=True) if not request.user.is_superuser: mos = mos.filter(administrative_domain__in=UserAccess.get_domains(request.user)) if selector: mos = mos.filter(ManagedObjectSelector.objects.get(id=int(selector)).Q) if administrative_domain: mos = mos.filter( administrative_domain__in=AdministrativeDomain.get_nested_ids( int(administrative_domain) ) ) if object_profile: mos = mos.filter(object_profile=object_profile) # iface_dict = {} d_url = { "path": "/ui/grafana/dashboard/script/report.js", "rname": map_table[reporttype], "from": str(int(ts_from_date * 1000)), "to": str(int(ts_to_date * 1000)), # o.name.replace("#", "%23") "biid": "", "oname": "", "iname": "", } report_map = { "load_interfaces": { "url": "%(path)s?title=interface&biid=%(biid)s" "&obj=%(oname)s&iface=%(iname)s&from=%(from)s&to=%(to)s", "q_group": ["interface"], "q_select": { (0, "managed_object", "id"): "managed_object", (1, "path", "iface_name"): "arrayStringConcat(path)", }, }, "errors": { "url": """%(path)s?title=errors&biid=%(biid)s&obj=%(oname)s&iface=%(iname)s&from=%(from)s&to=%(to)s""", "q_group": ["interface"], }, "load_cpu": { "url": """%(path)s?title=cpu&biid=%(biid)s&obj=%(oname)s&from=%(from)s&to=%(to)s""", "q_select": { (0, "managed_object", "id"): "managed_object", (1, "path", "slot"): "arrayStringConcat(path)", }, }, "ping": { "url": """%(path)s?title=ping&biid=%(biid)s&obj=%(oname)s&from=%(from)s&to=%(to)s""", "q_select": {(0, "managed_object", "id"): "managed_object"}, }, } query_map = { # "iface_description": ('', 'iface_description', "''"), "iface_description": ( "", "iface_description", "dictGetString('interfaceattributes','description' , (managed_object, arrayStringConcat(path)))", ), "iface_speed": ( "speed", "iface_speed", "if(max(speed) = 0, dictGetUInt64('interfaceattributes', 'in_speed', " "(managed_object, arrayStringConcat(path))), max(speed))", ), "load_in": ("load_in", "l_in", "round(quantile(0.90)(load_in), 0)"), "load_in_p": ( "load_in", "l_in_p", "replaceOne(toString(round(quantile(0.90)(load_in) / " "if(max(speed) = 0, dictGetUInt64('interfaceattributes', 'in_speed', " "(managed_object, arrayStringConcat(path))), max(speed)), 4) * 100), '.', ',')", ), "load_out": ("load_out", "l_out", "round(quantile(0.90)(load_out), 0)"), "load_out_p": ( "load_out", "l_out_p", "replaceOne(toString(round(quantile(0.90)(load_out) / " "if(max(speed) = 0, dictGetUInt64('interfaceattributes', 'in_speed', " "(managed_object, arrayStringConcat(path))), max(speed)), 4) * 100), '.', ',')", ), "errors_in": ("errors_in", "err_in", "quantile(0.90)(errors_in)"), "errors_out": ("errors_out", "err_out", "quantile(0.90)(errors_out)"), "cpu_usage": ("usage", "cpu_usage", "quantile(0.90)(usage)"), "ping_rtt": ("rtt", "ping_rtt", "round(quantile(0.90)(rtt) / 1000, 2)"), "ping_attempts": ("attempts", "ping_attempts", "avg(attempts)"), } query_fields = [] for c in report_map[reporttype]["q_select"]: query_fields += [c[2]] field_shift = len(query_fields) # deny replacing field for c in columns.split(","): if c not in query_map: continue field, alias, func = query_map[c] report_map[reporttype]["q_select"][ (columns_order.index(c) + field_shift, field, alias) ] = func query_fields += [c] metrics_attrs = namedtuple("METRICSATTRs", query_fields) mo_attrs = namedtuple("MOATTRs", [c for c in cols if c.startswith("object")]) moss = {} for row in mos.values_list( "bi_id", "name", "address", "platform", "administrative_domain__name", "segment" ): moss[row[0]] = mo_attrs( *[ row[1], row[2], str(Platform.get_by_id(row[3]) if row[3] else ""), row[4], str(NetworkSegment.get_by_id(row[5])) if row[5] else "", ] ) url = report_map[reporttype].get("url", "") report_metric = self.metric_source[reporttype]( tuple(sorted(moss)), from_date, to_date, columns=None ) report_metric.SELECT_QUERY_MAP = report_map[reporttype]["q_select"] if exclude_zero and reporttype == "load_interfaces": report_metric.CUSTOM_FILTER["having"] += ["max(load_in) != 0 AND max(load_out) != 0"] if interface_profile: interface_profile = InterfaceProfile.objects.filter(id=interface_profile).first() report_metric.CUSTOM_FILTER["having"] += [ "dictGetString('interfaceattributes', 'profile', " "(managed_object, arrayStringConcat(path))) = '%s'" % interface_profile.name ] # OBJECT_PLATFORM, ADMIN_DOMAIN, SEGMENT, OBJECT_HOSTNAME for row in report_metric.do_query(): mm = metrics_attrs(*row) mo = moss[int(mm.id)] res = [] for y in columns_order: if y in object_columns: res += [getattr(mo, y)] else: res += [getattr(mm, y)] if "interface_load_url" in columns_filter: d_url["biid"] = mm.id d_url["oname"] = mo[2].replace("#", "%23") # res += [url % d_url, interval] res.insert(columns_order.index("interface_load_url"), url % d_url) r += [res] filename = "metrics_detail_report_%s" % datetime.datetime.now().strftime("%Y%m%d") if o_format == "csv": response = HttpResponse(content_type="text/csv") response["Content-Disposition"] = 'attachment; filename="%s.csv"' % filename writer = csv.writer(response, dialect="excel", delimiter=",", quoting=csv.QUOTE_MINIMAL) writer.writerows(r) return response elif o_format == "xlsx": response = StringIO() wb = xlsxwriter.Workbook(response) cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1}) ws = wb.add_worksheet("Alarms") max_column_data_length = {} for rn, x in enumerate(r): for cn, c in enumerate(x): if rn and ( r[0][cn] not in max_column_data_length or len(str(c)) > max_column_data_length[r[0][cn]] ): max_column_data_length[r[0][cn]] = len(str(c)) ws.write(rn, cn, c, cf1) ws.autofilter(0, 0, rn, cn) ws.freeze_panes(1, 0) for cn, c in enumerate(r[0]): # Set column width width = get_column_width(c) if enable_autowidth and width < max_column_data_length[c]: width = max_column_data_length[c] ws.set_column(cn, cn, width=width) wb.close() response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/vnd.ms-excel") response["Content-Disposition"] = 'attachment; filename="%s.xlsx"' % filename response.close() return response
def api_report( self, request, o_format, is_managed=None, administrative_domain=None, selector=None, pool=None, segment=None, avail_status=False, columns=None, ids=None, detail_stat=None, enable_autowidth=False, ): def row(row): def qe(v): if v is None: return "" if isinstance(v, unicode): return v.encode("utf-8") elif isinstance(v, datetime.datetime): return v.strftime("%Y-%m-%d %H:%M:%S") elif not isinstance(v, str): return str(v) else: return v return [qe(x) for x in row] def translate_row(row, cmap): return [row[i] for i in cmap] type_columns = ["Up/10G", "Up/1G", "Up/100M", "Up/10M", "Down/-", "-"] cols = [ "id", "object_name", "object_address", "object_hostname", "object_status", "profile_name", "object_profile", "object_vendor", "object_platform", "object_attr_hwversion", "object_version", "object_attr_bootprom", "object_serial", "object_attr_patch", "auth_profile", "avail", "admin_domain", "container", "segment", "phys_interface_count", "link_count", "last_config_ts" # "discovery_problem" # "object_tags" # "sorted_tags" # "object_caps" # "interface_type_count" ] header_row = [ "ID", "OBJECT_NAME", "OBJECT_ADDRESS", "OBJECT_HOSTNAME", "OBJECT_STATUS", "PROFILE_NAME", "OBJECT_PROFILE", "OBJECT_VENDOR", "OBJECT_PLATFORM", "OBJECT_HWVERSION", "OBJECT_VERSION", "OBJECT_BOOTPROM", "OBJECT_SERIAL", "OBJECT_ATTR_PATCH", "AUTH_PROFILE", "AVAIL", "ADMIN_DOMAIN", "CONTAINER", "SEGMENT", "PHYS_INTERFACE_COUNT", "LINK_COUNT", "LAST_CONFIG_TS", ] # "DISCOVERY_PROBLEM" # "ADM_PATH # "DISCOVERY_PROBLEM" # "OBJECT_TAGS" # "SORTED_TAGS" # "OBJECT_CAPS" # "INTERFACE_TYPE_COUNT" if columns: cmap = [] for c in columns.split(","): try: cmap += [cols.index(c)] except ValueError: continue else: cmap = list(range(len(cols))) r = [translate_row(header_row, cmap)] mos = self.get_report_object(request.user, is_managed, administrative_domain, selector, pool, segment, ids) columns_filter = set(columns.split(",")) mos_id = tuple(mos.order_by("id").values_list("id", flat=True)) mos_filter = None if detail_stat: ref = ReportModelFilter() ids = list(six.itervalues(ref.proccessed(detail_stat))) mos_filter = set(mos_id).intersection(ids[0]) mos_id = sorted(mos_filter) avail = {} if "avail" in columns_filter: avail = ObjectStatus.get_statuses(mos_id) link_count = iter(ReportObjectLinkCount(mos_id)) iface_count = iter(ReportObjectIfacesTypeStat(mos_id)) if "container" in columns_filter: container_lookup = iter(ReportContainerData(mos_id)) else: container_lookup = None if "object_serial" in columns_filter: container_serials = iter(ReportContainer(mos_id)) else: container_serials = None if "interface_type_count" in columns_filter: iss = iter(ReportObjectIfacesStatusStat(mos_id)) else: iss = None if "object_attr_patch" in columns_filter or "object_serial" in columns_filter: roa = iter(ReportObjectAttributes(mos_id)) else: roa = None hn = iter(ReportObjectsHostname1(mos_id)) rc = iter(ReportObjectConfig(mos_id)) # ccc = iter(ReportObjectCaps(mos_id)) if "adm_path" in columns_filter: ad_path = ReportAdPath() r[-1].extend([_("ADM_PATH1"), _("ADM_PATH1"), _("ADM_PATH1")]) if "interface_type_count" in columns_filter: r[-1].extend(type_columns) if "object_caps" in columns_filter: object_caps = ReportObjectCaps(mos_id) caps_columns = list(six.itervalues(object_caps.ATTRS)) ccc = iter(object_caps) r[-1].extend(caps_columns) if "object_tags" in columns_filter: r[-1].extend([_("OBJECT_TAGS")]) if "sorted_tags" in columns_filter: tags = set() for s in (ManagedObject.objects.filter().exclude( tags=None).values_list("tags", flat=True).distinct()): tags.update(set(s)) tags_o = sorted([t for t in tags if "{" not in t]) r[-1].extend(tags_o) if "discovery_problem" in columns.split(","): discovery_result = ReportDiscoveryResult(mos_id) discovery_result.safe_output = True discovery_result.unknown_value = ([""] * len(discovery_result.ATTRS), ) dp_columns = discovery_result.ATTRS dp = iter(discovery_result) r[-1].extend(dp_columns) for ( mo_id, name, address, is_managed, sa_profile, o_profile, auth_profile, ad, m_segment, vendor, platform, version, tags, ) in (mos.values_list( "id", "name", "address", "is_managed", "profile", "object_profile__name", "auth_profile__name", "administrative_domain__name", "segment", "vendor", "platform", "version", "tags", ).order_by("id").iterator()): if (mos_filter and mo_id not in mos_filter) or not mos_id: continue if container_serials: mo_serials = next(container_serials) else: mo_serials = [{}] if container_lookup: mo_continer = next(container_lookup) else: mo_continer = ("", ) if roa: serial, hw_ver, boot_prom, patch = next(roa)[0] # noqa else: serial, hw_ver, boot_prom, patch = "", "", "", "" # noqa r.append( translate_row( row([ mo_id, name, address, next(hn)[0], "managed" if is_managed else "unmanaged", Profile.get_by_id(sa_profile), o_profile, Vendor.get_by_id(vendor) if vendor else "", Platform.get_by_id(platform) if platform else "", hw_ver, Firmware.get_by_id(version) if version else "", boot_prom, # Serial mo_serials[0].get("serial", "") or serial, patch or "", auth_profile, _("Yes") if avail.get(mo_id, None) else _("No"), ad, mo_continer[0], NetworkSegment.get_by_id(m_segment) if m_segment else "", next(iface_count)[0], next(link_count)[0], next(rc)[0], ]), cmap, )) if "adm_path" in columns_filter: r[-1].extend([ad] + list(ad_path[ad])) if "interface_type_count" in columns_filter: r[-1].extend(next(iss)[0]) if "object_caps" in columns_filter: r[-1].extend(next(ccc)[0]) if "object_tags" in columns_filter: r[-1].append(",".join(tags if tags else [])) if "sorted_tags" in columns_filter: out_tags = [""] * len(tags_o) try: if tags: for m in tags: out_tags[tags_o.index(m)] = m except ValueError: logger.warning("Bad value for tag: %s", m) r[-1].extend(out_tags) if "discovery_problem" in columns_filter: r[-1].extend(next(dp)[0]) filename = "mo_detail_report_%s" % datetime.datetime.now().strftime( "%Y%m%d") if o_format == "csv": response = HttpResponse(content_type="text/csv") response[ "Content-Disposition"] = 'attachment; filename="%s.csv"' % filename writer = csv.writer(response, dialect="excel", delimiter=";", quotechar='"') writer.writerows(r) return response elif o_format == "xlsx": response = StringIO() wb = xlsxwriter.Workbook(response) cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1}) ws = wb.add_worksheet("Objects") max_column_data_length = {} for rn, x in enumerate(r): for cn, c in enumerate(x): if rn and (r[0][cn] not in max_column_data_length or len(str(c)) > max_column_data_length[r[0][cn]]): max_column_data_length[r[0][cn]] = len(str(c)) ws.write(rn, cn, c, cf1) # for ws.autofilter(0, 0, rn, cn) ws.freeze_panes(1, 0) for cn, c in enumerate(r[0]): # Set column width width = get_column_width(c) if enable_autowidth and width < max_column_data_length[c]: width = max_column_data_length[c] ws.set_column(cn, cn, width=width) wb.close() response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/vnd.ms-excel") # response = HttpResponse( # content_type="application/x-ms-excel") response[ "Content-Disposition"] = 'attachment; filename="%s.xlsx"' % filename response.close() return response
def get_data(self, request, sel=None): qs = ManagedObject.objects if not request.user.is_superuser: qs = ManagedObject.objects.filter( administrative_domain__in=UserAccess.get_domains(request.user)) # Get all managed objects by selector mos_list = qs.filter(sel.Q) columns = [ _("Managed Objects"), _("Address"), _("Vendor"), _("Platform"), _("HW Revision"), _("SW Version"), _("Serial"), ] data = [] mos = { x["id"]: x for x in mos_list.values("id", "name", "address", "vendor", "platform", "version") } ra = ReportObjectAttributes(sorted(mos)) ra = ra.get_dictionary() objects_serials = {} for o in Object._get_collection().aggregate([ { "$match": { "data": { "$elemMatch": { "attr": "managed_object", "value": { "$in": list(mos) } } } } }, { "$project": { "data": { "$filter": { "input": "$data", "cond": { "$in": ["$$this.attr", ["serial", "managed_object"]] }, } } } }, ]): if len(o["data"]) < 2: continue if o["data"][0]["attr"] == "serial": objects_serials[int( o["data"][1]["value"])] = o["data"][0]["value"] else: objects_serials[int( o["data"][0]["value"])] = o["data"][1]["value"] for mo in mos.values(): platform = Platform.get_by_id( mo["platform"]) if mo.get("platform") else None vendor = Vendor.get_by_id( mo["vendor"]) if mo.get("vendor") else None version = Firmware.get_by_id( mo["version"]) if mo.get("version") else None sn, hw = ra[mo["id"]][:2] if mo["id"] in ra else (None, None) if mo["id"] in objects_serials: sn = objects_serials[mo["id"]] data += [[ mo["name"], mo["address"], vendor, platform, hw, version, sn, None, ]] return self.from_dataset(title=self.title, columns=columns, data=data, enumerate=True)
def api_report( self, request, reporttype=None, from_date=None, to_date=None, object_profile=None, filter_default=None, exclude_zero=True, interface_profile=None, selector=None, administrative_domain=None, columns=None, description=None, o_format=None, enable_autowidth=False, **kwargs, ): def load(mo_ids): # match = {"links.mo": {"$in": mo_ids}} match = {"int.managed_object": {"$in": mo_ids}} group = { "_id": "$_id", "links": { "$push": { "iface_n": "$int.name", # "iface_id": "$int._id", # "iface_descr": "$int.description", # "iface_speed": "$int.in_speed", # "dis_method": "$discovery_method", # "last_seen": "$last_seen", "mo": "$int.managed_object", "linked_obj": "$linked_objects", } }, } value = (get_db()["noc.links"].with_options( read_preference=ReadPreference.SECONDARY_PREFERRED).aggregate( [ { "$unwind": "$interfaces" }, { "$lookup": { "from": "noc.interfaces", "localField": "interfaces", "foreignField": "_id", "as": "int", } }, { "$match": match }, { "$group": group }, ], allowDiskUse=True, )) res = defaultdict(dict) for v in value: if v["_id"]: for vv in v["links"]: if len(vv["linked_obj"]) == 2: mo = vv["mo"][0] iface = vv["iface_n"] for i in vv["linked_obj"]: if mo != i: res[mo][i] = iface[0] return res def translate_row(row, cmap): return [row[i] for i in cmap] def str_to_float(str): return float("{0:.3f}".format(float(str))) cols = [ "object_id", "object_name", "object_address", "object_platform", "object_adm_domain", "object_segment", "object_container", # "object_hostname", # "object_status", # "profile_name", # "object_profile", # "object_vendor", "iface_name", "iface_description", "iface_speed", "max_load_in", "max_load_in_time", "max_load_out", "max_load_out_time", "avg_load_in", "avg_load_out", "total_in", "total_out", "uplink_iface_name", "uplink_iface_description", "uplink_iface_speed", "uplink_max_load_in", "uplink_max_load_in_time", "uplink_max_load_out", "uplink_max_load_out_time", "uplink_avg_load_in", "uplink_avg_load_out", "uplink_total_in", "uplink_total_out", ] header_row = [ "ID", _("OBJECT_NAME"), _("OBJECT_ADDRESS"), _("OBJECT_PLATFORM"), _("OBJECT_ADMDOMAIN"), _("OBJECT_SEGMENT"), _("CONTAINER_ADDRESS"), _("IFACE_NAME"), _("IFACE_DESCRIPTION"), _("IFACE_SPEED"), _("MAX_LOAD_IN, Mbps"), _("MAX_LOAD_IN_TIME"), _("MAX_LOAD_OUT, Mbps"), _("MAX_LOAD_OUT_TIME"), _("AVG_LOAD_IN, Mbps"), _("AVG_LOAD_OUT, Mbps"), _("TOTAL_IN, Mbyte"), _("TOTAL_OUT, Mbyte"), _("UPLINK_IFACE_NAME"), _("UPLINK_IFACE_DESCRIPTION"), _("UPLINK_IFACE_SPEED"), _("UPLINK_MAX_LOAD_IN, Mbps"), _("UPLINK_MAX_TIME_IN"), _("UPLINK_MAX_LOAD_OUT, Mbps"), _("UPLINK_MAX_TIME_OUT"), _("UPLINK_AVG_LOAD_IN, Mbps"), _("UPLINK_AVG_LOAD_OUT, Mbps"), _("UPLINK_TOTAL_IN, Mbyte"), _("UPLINK_TOTAL_OUT, Mbyte"), ] if columns: cmap = [] for c in columns.split(","): try: cmap += [cols.index(c)] except ValueError: continue else: cmap = list(range(len(cols))) columns_order = columns.split(",") columns_filter = set(columns_order) r = [translate_row(header_row, cmap)] # Date Time Block if not from_date: from_date = datetime.datetime.now() - datetime.timedelta(days=1) else: from_date = datetime.datetime.strptime(from_date, "%d.%m.%Y") if not to_date or from_date == to_date: to_date = from_date + datetime.timedelta(days=1) else: to_date = datetime.datetime.strptime( to_date, "%d.%m.%Y") + datetime.timedelta(days=1) diff = to_date - from_date # Load managed objects mos = ManagedObject.objects.filter(is_managed=True) if not request.user.is_superuser: mos = mos.filter( administrative_domain__in=UserAccess.get_domains(request.user)) if selector: mos = mos.filter( ManagedObjectSelector.objects.get(id=int(selector)).Q) if administrative_domain: mos = mos.filter( administrative_domain__in=AdministrativeDomain.get_nested_ids( int(administrative_domain))) if object_profile: mos = mos.filter(object_profile=object_profile) if interface_profile: interface_profile = InterfaceProfile.objects.filter( id=interface_profile).first() mo_attrs = namedtuple("MOATTRs", [c for c in cols if c.startswith("object")]) containers_address = {} if "object_container" in columns_filter: containers_address = ReportContainerData( set(mos.values_list("id", flat=True))) containers_address = dict(list(containers_address.extract())) moss = {} for row in mos.values_list("bi_id", "name", "address", "platform", "administrative_domain__name", "segment", "id"): moss[row[0]] = mo_attrs(*[ row[6], row[1], row[2], smart_text(Platform.get_by_id(row[3]) if row[3] else ""), row[4], smart_text(NetworkSegment.get_by_id(row[5])) if row[5] else "", containers_address. get(row[6], "") if containers_address and row[6] else "", ]) report_metric = ReportInterfaceMetrics(tuple(sorted(moss)), from_date, to_date, columns=None) report_metric.SELECT_QUERY_MAP = { (0, "managed_object", "id"): "managed_object", (1, "path", "iface_name"): "arrayStringConcat(path)", ( 2, "", "iface_description", ): "dictGetString('interfaceattributes','description' , (managed_object, arrayStringConcat(path)))", ( 3, "", "profile", ): "dictGetString('interfaceattributes', 'profile', (managed_object, arrayStringConcat(path)))", ( 4, "speed", "iface_speed", ): "dictGetUInt64('interfaceattributes', 'in_speed', (managed_object, arrayStringConcat(path)))", (5, "load_in_max", "load_in_max"): "divide(max(load_in),1048576)", (6, "load_out_max", "load_out_max"): "divide(max(load_out),1048576)", (7, "max_load_in_time", "max_load_in_time"): "argMax(ts,load_in)", (8, "max_load_out_time", "max_load_out_time"): "argMax(ts,load_out)", (9, "avg_load_in", "avg_load_in"): "divide(avg(load_in),1048576)", (10, "avg_load_out", "avg_load_out"): "divide(avg(load_out),1048576)", } ifaces_metrics = defaultdict(dict) for row in report_metric.do_query(): avg_in = str_to_float(row[9]) avg_out = str_to_float(row[10]) total_in = avg_in * diff.total_seconds() / 8 total_out = avg_out * diff.total_seconds() / 8 ifaces_metrics[row[0]][row[1]] = { "description": row[2], "profile": row[3], "bandwidth": row[4], "max_load_in": str_to_float(row[5]), "max_load_out": str_to_float(row[6]), "max_load_in_time": row[7], "max_load_out_time": row[8], "avg_load_in": avg_in, "avg_load_out": avg_out, "total_in": float("{0:.1f}".format(total_in)), "total_out": float("{0:.1f}".format(total_out)), } # find uplinks links = {} if cmap[-1] > 17: mos_id = list(mos.values_list("id", flat=True)) uplinks = {obj: [] for obj in mos_id} for d in ObjectData._get_collection().find( {"_id": { "$in": mos_id }}, { "_id": 1, "uplinks": 1 }): uplinks[d["_id"]] = d.get("uplinks", []) rld = load(mos_id) for mo in uplinks: for uplink in uplinks[mo]: if rld[mo]: if mo in links: links[mo] += [rld[mo][uplink]] else: links[mo] = [rld[mo][uplink]] for mo_bi in ifaces_metrics: mo_id = moss[int(mo_bi)] mo_ids = getattr(mo_id, "object_id") for i in ifaces_metrics[mo_bi]: if not exclude_zero: if (ifaces_metrics[mo_bi][i]["max_load_in"] == 0 and ifaces_metrics[mo_bi][i]["max_load_out"] == 0): continue if description: if description not in ifaces_metrics[mo_bi][i][ "description"]: continue if interface_profile: if interface_profile.name not in ifaces_metrics[mo_bi][i][ "profile"]: continue row2 = [ mo_ids, getattr(mo_id, "object_name"), getattr(mo_id, "object_address"), getattr(mo_id, "object_platform"), getattr(mo_id, "object_adm_domain"), getattr(mo_id, "object_segment"), getattr(mo_id, "object_container"), i, ifaces_metrics[mo_bi][i]["description"], ifaces_metrics[mo_bi][i]["bandwidth"], ifaces_metrics[mo_bi][i]["max_load_in"], ifaces_metrics[mo_bi][i]["max_load_in_time"], ifaces_metrics[mo_bi][i]["max_load_out"], ifaces_metrics[mo_bi][i]["max_load_out_time"], ifaces_metrics[mo_bi][i]["avg_load_in"], ifaces_metrics[mo_bi][i]["avg_load_out"], ifaces_metrics[mo_bi][i]["total_in"], ifaces_metrics[mo_bi][i]["total_out"], "", "", "", "", "", "", "", "", "", "", "", ] ss = True if mo_ids in links: for ifname_uplink in links[mo_ids]: if ifname_uplink in ifaces_metrics[mo_bi]: row2[18] = ifname_uplink row2[19] = ifaces_metrics[mo_bi][ifname_uplink][ "description"] row2[20] = ifaces_metrics[mo_bi][ifname_uplink][ "bandwidth"] row2[21] = ifaces_metrics[mo_bi][ifname_uplink][ "max_load_in"] row2[22] = ifaces_metrics[mo_bi][ifname_uplink][ "max_load_in_time"] row2[23] = ifaces_metrics[mo_bi][ifname_uplink][ "max_load_out"] row2[24] = ifaces_metrics[mo_bi][ifname_uplink][ "max_load_out_time"] row2[25] = ifaces_metrics[mo_bi][ifname_uplink][ "avg_load_in"] row2[26] = ifaces_metrics[mo_bi][ifname_uplink][ "avg_load_out"] row2[27] = ifaces_metrics[mo_bi][ifname_uplink][ "total_in"] row2[28] = ifaces_metrics[mo_bi][ifname_uplink][ "total_out"] r += [translate_row(row2, cmap)] ss = False if ss: r += [translate_row(row2, cmap)] filename = "metrics_detail_report_%s" % datetime.datetime.now( ).strftime("%Y%m%d") if o_format == "csv": response = HttpResponse(content_type="text/csv") response[ "Content-Disposition"] = 'attachment; filename="%s.csv"' % filename writer = csv.writer(response, dialect="excel", delimiter=",", quoting=csv.QUOTE_MINIMAL) writer.writerows(r) return response elif o_format == "csv_zip": response = BytesIO() f = TextIOWrapper(TemporaryFile(mode="w+b"), encoding="utf-8") writer = csv.writer(f, dialect="excel", delimiter=";", quotechar='"') writer.writerows(r) f.seek(0) with ZipFile(response, "w", compression=ZIP_DEFLATED) as zf: zf.writestr("%s.csv" % filename, f.read()) zf.filename = "%s.csv.zip" % filename # response = HttpResponse(content_type="text/csv") response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/zip") response[ "Content-Disposition"] = 'attachment; filename="%s.csv.zip"' % filename return response elif o_format == "xlsx": response = BytesIO() wb = xlsxwriter.Workbook(response) cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1}) ws = wb.add_worksheet("Metrics") max_column_data_length = {} for rn, x in enumerate(r): for cn, c in enumerate(x): if rn and (r[0][cn] not in max_column_data_length or len(str(c)) > max_column_data_length[r[0][cn]]): max_column_data_length[r[0][cn]] = len(str(c)) ws.write(rn, cn, c, cf1) ws.autofilter(0, 0, rn, cn) ws.freeze_panes(1, 0) for cn, c in enumerate(r[0]): # Set column width width = get_column_width(c) if enable_autowidth and width < max_column_data_length[c]: width = max_column_data_length[c] ws.set_column(cn, cn, width=width) wb.close() response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/vnd.ms-excel") response[ "Content-Disposition"] = 'attachment; filename="%s.xlsx"' % filename response.close() return response
def api_report( self, request, from_date, to_date, o_format, min_duration=0, max_duration=0, min_objects=0, min_subscribers=0, segment=None, administrative_domain=None, selector=None, ex_selector=None, columns=None, source="both", alarm_class=None, subscribers=None, enable_autowidth=False, ): def row(row, container_path, segment_path): def qe(v): if v is None: return "" if isinstance(v, unicode): return v.encode("utf-8") elif isinstance(v, datetime.datetime): return v.strftime("%Y-%m-%d %H:%M:%S") elif not isinstance(v, str): return str(v) else: return v r = [qe(x) for x in row] if len(container_path) < self.CONTAINER_PATH_DEPTH: container_path += [""] * (self.CONTAINER_PATH_DEPTH - len(container_path)) else: container_path = container_path[:self.CONTAINER_PATH_DEPTH] if len(segment_path) < self.SEGMENT_PATH_DEPTH: segment_path += [""] * (self.SEGMENT_PATH_DEPTH - len(segment_path)) else: segment_path = segment_path[:self.SEGMENT_PATH_DEPTH] return r + container_path + segment_path def translate_row(row, cmap): return [row[i] for i in cmap] cols = ([ "id", "root_id", "from_ts", "to_ts", "duration_sec", "object_name", "object_address", "object_hostname", "object_profile", "object_admdomain", "object_platform", "object_version", "alarm_class", "alarm_subject", "maintenance", "objects", "subscribers", "tt", "escalation_ts", "location", "container_address", ] + ["container_%d" % i for i in range(self.CONTAINER_PATH_DEPTH)] + ["segment_%d" % i for i in range(self.SEGMENT_PATH_DEPTH)]) header_row = ( [ "ID", _("ROOT_ID"), _("FROM_TS"), _("TO_TS"), _("DURATION_SEC"), _("OBJECT_NAME"), _("OBJECT_ADDRESS"), _("OBJECT_HOSTNAME"), _("OBJECT_PROFILE"), _("OBJECT_ADMDOMAIN"), _("OBJECT_PLATFORM"), _("OBJECT_VERSION"), _("ALARM_CLASS"), _("ALARM_SUBJECT"), _("MAINTENANCE"), _("OBJECTS"), _("SUBSCRIBERS"), _("TT"), _("ESCALATION_TS"), _("LOCATION"), _("CONTAINER_ADDRESS"), ] + ["CONTAINER_%d" % i for i in range(self.CONTAINER_PATH_DEPTH)] + ["SEGMENT_%d" % i for i in range(self.SEGMENT_PATH_DEPTH)]) if columns: cmap = [] for c in columns.split(","): try: cmap += [cols.index(c)] except ValueError: continue else: cmap = list(range(len(cols))) subscribers_profile = self.default_subscribers_profile if subscribers: subscribers_profile = set( SubscriberProfile.objects.filter( id__in=subscribers.split(",")).scalar("id")) r = [translate_row(header_row, cmap)] fd = datetime.datetime.strptime( to_date, "%d.%m.%Y") + datetime.timedelta(days=1) match = { "timestamp": { "$gte": datetime.datetime.strptime(from_date, "%d.%m.%Y"), "$lte": fd } } match_duration = {"duration": {"$gte": min_duration}} if max_duration: match_duration = { "duration": { "$gte": min_duration, "$lte": max_duration } } mos = ManagedObject.objects.filter(is_managed=True) if segment: try: match["segment_path"] = bson.ObjectId(segment) except bson.errors.InvalidId: pass ads = [] if administrative_domain: if administrative_domain.isdigit(): administrative_domain = [int(administrative_domain)] ads = AdministrativeDomain.get_nested_ids( administrative_domain[0]) if not request.user.is_superuser: user_ads = UserAccess.get_domains(request.user) if administrative_domain and ads: if administrative_domain[0] not in user_ads: ads = list(set(ads) & set(user_ads)) else: ads = administrative_domain else: ads = user_ads if ads: mos = mos.filter(administrative_domain__in=ads) if selector: selector = ManagedObjectSelector.get_by_id(int(selector)) mos = mos.filter(selector.Q) if ex_selector: ex_selector = ManagedObjectSelector.get_by_id(int(ex_selector)) mos = mos.exclude(ex_selector.Q) # Working if Administrative domain set if ads: try: match["adm_path"] = {"$in": ads} # @todo More 2 level hierarhy except bson.errors.InvalidId: pass mos_id = list(mos.order_by("id").values_list("id", flat=True)) mo_hostname = {} maintenance = [] if mos_id and (selector or ex_selector): match["managed_object"] = {"$in": mos_id} if "maintenance" in columns.split(","): maintenance = Maintenance.currently_affected() if "object_hostname" in columns.split(","): mo_hostname = ReportObjectsHostname1(sync_ids=mos_id) mo_hostname = mo_hostname.get_dictionary() moss = ReportAlarmObjects(mos_id).get_all() # container_lookup = ReportContainer(mos_id) container_lookup = None subject = "alarm_subject" in columns loc = AlarmApplication([]) if source in ["archive", "both"]: # Archived Alarms for a in (ArchivedAlarm._get_collection().with_options( read_preference=ReadPreference.SECONDARY_PREFERRED ).aggregate([ { "$match": match }, { "$addFields": { "duration": { "$divide": [ { "$subtract": ["$clear_timestamp", "$timestamp"] }, 1000, ] } } }, { "$match": match_duration }, # {"$sort": {"timestamp": 1}} ])): if int(a["managed_object"]) not in moss: continue dt = a["clear_timestamp"] - a["timestamp"] duration = int(dt.total_seconds()) total_objects = sum(ss["summary"] for ss in a["total_objects"]) if min_objects and total_objects < min_objects: continue total_subscribers = sum( ss["summary"] for ss in a["total_subscribers"] if subscribers_profile and ss["profile"] in subscribers_profile) if min_subscribers and total_subscribers < min_subscribers: continue if "segment_" in columns.split( ",") or "container_" in columns.split(","): path = ObjectPath.get_path(a["managed_object"]) if path: segment_path = [ NetworkSegment.get_by_id(s).name for s in path.segment_path if NetworkSegment.get_by_id(s) ] container_path = [ Object.get_by_id(s).name for s in path.container_path if Object.get_by_id(s) ] else: segment_path = [] container_path = [] else: segment_path = [] container_path = [] r += [ translate_row( row( [ str(a["_id"]), str(a["root"]) if a.get("root") else "", a["timestamp"], a["clear_timestamp"], str(duration), moss[a["managed_object"]][0], moss[a["managed_object"]][1], mo_hostname.get(a["managed_object"], ""), Profile.get_by_id( moss[a["managed_object"]][3]).name if moss[a["managed_object"]][5] else "", moss[a["managed_object"]][6], Platform.get_by_id( moss[a["managed_object"]][9]) if moss[a["managed_object"]][9] else "", Firmware.get_by_id( moss[a["managed_object"]][10]) if moss[a["managed_object"]][10] else "", AlarmClass.get_by_id(a["alarm_class"]).name, ArchivedAlarm.objects.get( id=a["_id"]).subject if subject else "", "", total_objects, total_subscribers, a.get("escalation_tt"), a.get("escalation_ts"), ", ".join(l for l in ( loc.location(moss[a["managed_object"]][5] ) if moss[a["managed_object"]] [5] is not None else "") if l), container_lookup[a["managed_object"]].get( "text", "") if container_lookup else "", ], container_path, segment_path, ), cmap, ) ] # Active Alarms if source in ["active", "both"]: for a in (ActiveAlarm._get_collection().with_options( read_preference=ReadPreference.SECONDARY_PREFERRED). aggregate([ { "$match": match }, { "$addFields": { "duration": { "$divide": [{ "$subtract": [fd, "$timestamp"] }, 1000] } } }, { "$match": match_duration }, # {"$sort": {"timestamp": 1}} ])): dt = fd - a["timestamp"] duration = int(dt.total_seconds()) total_objects = sum(ss["summary"] for ss in a["total_objects"]) if min_objects and total_objects < min_objects: continue total_subscribers = sum( ss["summary"] for ss in a["total_subscribers"] if subscribers_profile and ss["profile"] in subscribers_profile) if min_subscribers and total_subscribers < min_subscribers: continue if "segment_" in columns.split( ",") or "container_" in columns.split(","): path = ObjectPath.get_path(a["managed_object"]) if path: segment_path = [ NetworkSegment.get_by_id(s).name for s in path.segment_path if NetworkSegment.get_by_id(s) ] container_path = [ Object.get_by_id(s).name for s in path.container_path if Object.get_by_id(s) ] else: segment_path = [] container_path = [] else: segment_path = [] container_path = [] r += [ translate_row( row( [ str(a["_id"]), str(a["root"]) if a.get("root") else "", a["timestamp"], # a["clear_timestamp"], "", str(duration), moss[a["managed_object"]][0], moss[a["managed_object"]][1], mo_hostname.get(a["managed_object"], ""), Profile.get_by_id(moss[a["managed_object"]][3]) if moss[a["managed_object"]][5] else "", moss[a["managed_object"]][6], Platform.get_by_id( moss[a["managed_object"]][9]) if moss[a["managed_object"]][9] else "", Firmware.get_by_id( moss[a["managed_object"]][10]) if moss[a["managed_object"]][10] else "", AlarmClass.get_by_id(a["alarm_class"]).name, ActiveAlarm.objects.get( id=a["_id"]).subject if subject else None, "Yes" if a["managed_object"] in maintenance else "No", total_objects, total_subscribers, a.get("escalation_tt"), a.get("escalation_ts"), ", ".join(l for l in ( loc.location(moss[a["managed_object"]][5] ) if moss[a["managed_object"]] [5] is not None else "") if l), container_lookup[a["managed_object"]].get( "text", "") if container_lookup else "", ], container_path, segment_path, ), cmap, ) ] if o_format == "csv": response = HttpResponse(content_type="text/csv") response[ "Content-Disposition"] = 'attachment; filename="alarms.csv"' writer = csv.writer(response) writer.writerows(r) return response elif o_format == "xlsx": response = StringIO() wb = xlsxwriter.Workbook(response) cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1}) ws = wb.add_worksheet("Alarms") max_column_data_length = {} for rn, x in enumerate(r): for cn, c in enumerate(x): if rn and (r[0][cn] not in max_column_data_length or len(str(c)) > max_column_data_length[r[0][cn]]): max_column_data_length[r[0][cn]] = len(str(c)) ws.write(rn, cn, c, cf1) ws.autofilter(0, 0, rn, cn) ws.freeze_panes(1, 0) for cn, c in enumerate(r[0]): # Set column width width = get_column_width(c) if enable_autowidth and width < max_column_data_length[c]: width = max_column_data_length[c] ws.set_column(cn, cn, width=width) wb.close() response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/vnd.ms-excel") response[ "Content-Disposition"] = 'attachment; filename="alarms.xlsx"' response.close() return response
def get_data(self): def update_dict(s, d): for k in d: if k in s: s[k] += d[k] else: s[k] = d[k] stpl = self.object.type.card_template or self.default_title_template now = datetime.datetime.now() if self.object.start > now: status = "before" elif self.object.is_completed: status = "complete" else: status = "progress" # Calculate affected objects affected = [] summary = {"service": {}, "subscriber": {}} # Maintenance data = [ d for d in AffectedObjects._get_collection().aggregate( [ {"$match": {"maintenance": self.object.id}}, { "$project": {"objects": "$affected_objects.object"}, }, ] ) ] hide = False if data: if len(data[0].get("objects")) > 100: hide = True for mo in ( ManagedObject.objects.filter(is_managed=True, id__in=data[0].get("objects")) .values("id", "name", "platform", "address", "container") .distinct() ): ss, object = {}, None if not hide: ss = ServiceSummary.get_object_summary(mo["id"]) object = ManagedObject.get_by_id(mo["id"]) update_dict(summary["service"], ss.get("service", {})) update_dict(summary["subscriber"], ss.get("subscriber", {})) ao = { "id": mo["id"], "name": mo["name"], "address": mo["address"], "platform": Platform.get_by_id(mo["platform"]).name if mo["platform"] else "", "summary": ss, } if object: ao["object"] = object affected += [ao] # return { "title": jinja2.Template(stpl).render({"object": self.object}), "object": self.object, "subject": self.object.subject, "contacts": self.object.contacts, "start": self.object.start, "stop": self.object.stop, "description": self.object.description, "status": status, "affected": affected, "summary": summary, }
def get_data(self, request, pool=None, obj_profile=None, **kwargs): problems = {} # id -> problem mos = ManagedObject.objects.filter(is_managed=True, pool=pool) if not request.user.is_superuser: mos = mos.filter(administrative_domain__in=UserAccess.get_domains(request.user)) if obj_profile: # Get all managed objects mos = mos.filter(object_profile=obj_profile) mos = { mo[0]: (mo[1], mo[2], Profile.get_by_id(mo[3]), mo[4], mo[5]) for mo in mos.values_list("id", "name", "address", "profile", "platform", "segment") } mos_set = set(mos) # Get all managed objects with generic profile for mo in mos: if mos[mo][2] == GENERIC_PROFILE: problems[mo] = _("Profile check failed") # Get all managed objects without interfaces if_mo = dict( (x["_id"], x.get("managed_object")) for x in Interface._get_collection().find({}, {"_id": 1, "managed_object": 1}) ) for mo in mos_set - set(problems) - set(six.itervalues(if_mo)): problems[mo] = _("No interfaces") # Get all managed objects without links linked_mos = set() for d in Link._get_collection().find({}): for i in d["interfaces"]: linked_mos.add(if_mo.get(i)) for mo in mos_set - set(problems) - linked_mos: problems[mo] = _("No links") # Get all managed objects without uplinks uplinks = {} for d in ObjectData._get_collection().find(): nu = len(d.get("uplinks", [])) if nu: uplinks[d["_id"]] = nu for mo in mos_set - set(problems) - set(uplinks): problems[mo] = _("No uplinks") # data = [] for mo_id in problems: if mo_id not in mos: continue name, address, profile, platform, segment = mos[mo_id] data += [ [ name, address, profile.name, Platform.get_by_id(platform).name if platform else "", NetworkSegment.get_by_id(segment).name if segment else "", problems[mo_id], ] ] data = sorted(data) return self.from_dataset( title=self.title, columns=["Name", "Address", "Profile", "Platform", "Segment", "Problem"], data=data, enumerate=True, )
def api_report( self, request, from_date, to_date, o_format, min_duration=0, max_duration=0, min_objects=0, min_subscribers=0, segment=None, administrative_domain=None, selector=None, ex_selector=None, columns=None, source="both", alarm_class=None, subscribers=None, enable_autowidth=False, ): def row(row, container_path, segment_path): def qe(v): if v is None: return "" if isinstance(v, str): return smart_text(v) elif isinstance(v, datetime.datetime): return v.strftime("%Y-%m-%d %H:%M:%S") elif not isinstance(v, str): return smart_text(v) else: return v r = [qe(x) for x in row] if len(container_path) < self.CONTAINER_PATH_DEPTH: container_path += [""] * (self.CONTAINER_PATH_DEPTH - len(container_path)) else: container_path = container_path[:self.CONTAINER_PATH_DEPTH] if len(segment_path) < self.SEGMENT_PATH_DEPTH: segment_path += [""] * (self.SEGMENT_PATH_DEPTH - len(segment_path)) else: segment_path = segment_path[:self.SEGMENT_PATH_DEPTH] return r + container_path + segment_path def translate_row(row, cmap): return [row[i] for i in cmap] cols = ([ "id", "root_id", "from_ts", "to_ts", "duration_sec", "object_name", "object_address", "object_hostname", "object_profile", "object_admdomain", "object_platform", "object_version", "alarm_class", "alarm_subject", "maintenance", "objects", "subscribers", "tt", "escalation_ts", "location", "container_address", ] + ["container_%d" % i for i in range(self.CONTAINER_PATH_DEPTH)] + ["segment_%d" % i for i in range(self.SEGMENT_PATH_DEPTH)]) header_row = ( [ "ID", _("ROOT_ID"), _("FROM_TS"), _("TO_TS"), _("DURATION_SEC"), _("OBJECT_NAME"), _("OBJECT_ADDRESS"), _("OBJECT_HOSTNAME"), _("OBJECT_PROFILE"), _("OBJECT_ADMDOMAIN"), _("OBJECT_PLATFORM"), _("OBJECT_VERSION"), _("ALARM_CLASS"), _("ALARM_SUBJECT"), _("MAINTENANCE"), _("OBJECTS"), _("SUBSCRIBERS"), _("TT"), _("ESCALATION_TS"), _("LOCATION"), _("CONTAINER_ADDRESS"), ] + ["CONTAINER_%d" % i for i in range(self.CONTAINER_PATH_DEPTH)] + ["SEGMENT_%d" % i for i in range(self.SEGMENT_PATH_DEPTH)]) if columns: cmap = [] for c in columns.split(","): try: cmap += [cols.index(c)] except ValueError: continue else: cmap = list(range(len(cols))) subscribers_profile = self.default_subscribers_profile if subscribers: subscribers_profile = set( SubscriberProfile.objects.filter( id__in=subscribers.split(",")).scalar("id")) r = [translate_row(header_row, cmap)] fd = datetime.datetime.strptime( to_date, "%d.%m.%Y") + datetime.timedelta(days=1) match = { "timestamp": { "$gte": datetime.datetime.strptime(from_date, "%d.%m.%Y"), "$lte": fd } } match_duration = {"duration": {"$gte": min_duration}} if max_duration: match_duration = { "duration": { "$gte": min_duration, "$lte": max_duration } } mos = ManagedObject.objects.filter(is_managed=True) if segment: try: match["segment_path"] = bson.ObjectId(segment) except bson.errors.InvalidId: pass ads = [] if administrative_domain: if administrative_domain.isdigit(): administrative_domain = [int(administrative_domain)] ads = AdministrativeDomain.get_nested_ids( administrative_domain[0]) if not request.user.is_superuser: user_ads = UserAccess.get_domains(request.user) if administrative_domain and ads: if administrative_domain[0] not in user_ads: ads = list(set(ads) & set(user_ads)) if not ads: return HttpResponse( "<html><body>Permission denied: Invalid Administrative Domain</html></body>" ) else: ads = user_ads if ads: mos = mos.filter(administrative_domain__in=ads) if selector: selector = ManagedObjectSelector.get_by_id(int(selector)) mos = mos.filter(selector.Q) if ex_selector: ex_selector = ManagedObjectSelector.get_by_id(int(ex_selector)) mos = mos.exclude(ex_selector.Q) # Working if Administrative domain set if ads: try: match["adm_path"] = {"$in": ads} # @todo More 2 level hierarhy except bson.errors.InvalidId: pass mos_id = list(mos.order_by("id").values_list("id", flat=True)) mo_hostname = {} maintenance = [] if mos_id and (selector or ex_selector): match["managed_object"] = {"$in": mos_id} if "maintenance" in columns.split(","): maintenance = Maintenance.currently_affected() if "object_hostname" in columns.split(","): mo_hostname = ReportObjectsHostname1(sync_ids=mos_id) mo_hostname = mo_hostname.get_dictionary() moss = ReportAlarmObjects(mos_id).get_all() # container_lookup = ReportContainer(mos_id) container_lookup = None subject = "alarm_subject" in columns loc = AlarmApplication([]) if source in ["archive", "both"]: # Archived Alarms for a in (ArchivedAlarm._get_collection().with_options( read_preference=ReadPreference.SECONDARY_PREFERRED ).aggregate([ { "$match": match }, { "$addFields": { "duration": { "$divide": [ { "$subtract": ["$clear_timestamp", "$timestamp"] }, 1000, ] } } }, { "$match": match_duration }, # {"$sort": {"timestamp": 1}} ])): if int(a["managed_object"]) not in moss: continue dt = a["clear_timestamp"] - a["timestamp"] duration = int(dt.total_seconds()) total_objects = sum(ss["summary"] for ss in a["total_objects"]) if min_objects and total_objects < min_objects: continue total_subscribers = sum( ss["summary"] for ss in a["total_subscribers"] if subscribers_profile and ss["profile"] in subscribers_profile) if min_subscribers and total_subscribers < min_subscribers: continue if "segment_" in columns.split( ",") or "container_" in columns.split(","): path = ObjectPath.get_path(a["managed_object"]) if path: segment_path = [ NetworkSegment.get_by_id(s).name for s in path.segment_path if NetworkSegment.get_by_id(s) ] container_path = [ Object.get_by_id(s).name for s in path.container_path if Object.get_by_id(s) ] else: segment_path = [] container_path = [] else: segment_path = [] container_path = [] r += [ translate_row( row( [ smart_text(a["_id"]), smart_text(a["root"]) if a.get("root") else "", a["timestamp"], a["clear_timestamp"], smart_text(duration), moss[a["managed_object"]][0], moss[a["managed_object"]][1], smart_text( mo_hostname.get(a["managed_object"], "")), Profile.get_by_id( moss[a["managed_object"]][3]).name if moss[a["managed_object"]][5] else "", moss[a["managed_object"]][6], Platform.get_by_id( moss[a["managed_object"]][9]) if moss[a["managed_object"]][9] else "", smart_text( Firmware.get_by_id( moss[a["managed_object"]][10]).version) if moss[a["managed_object"]][10] else "", AlarmClass.get_by_id(a["alarm_class"]).name, ArchivedAlarm.objects.get( id=a["_id"]).subject if subject else "", "", total_objects, total_subscribers, a.get("escalation_tt"), a.get("escalation_ts"), ", ".join(ll for ll in ( loc.location(moss[a["managed_object"]][5] ) if moss[a["managed_object"]] [5] is not None else "") if ll), container_lookup[a["managed_object"]].get( "text", "") if container_lookup else "", ], container_path, segment_path, ), cmap, ) ] # Active Alarms if source in ["active", "both"]: datenow = datetime.datetime.now() for a in (ActiveAlarm._get_collection().with_options( read_preference=ReadPreference.SECONDARY_PREFERRED). aggregate([ { "$match": match }, { "$addFields": { "duration": { "$divide": [{ "$subtract": [fd, "$timestamp"] }, 1000] } } }, { "$match": match_duration }, # {"$sort": {"timestamp": 1}} ])): dt = datenow - a["timestamp"] duration = int(dt.total_seconds()) total_objects = sum(ss["summary"] for ss in a["total_objects"]) if min_objects and total_objects < min_objects: continue total_subscribers = sum( ss["summary"] for ss in a["total_subscribers"] if subscribers_profile and ss["profile"] in subscribers_profile) if min_subscribers and total_subscribers < min_subscribers: continue if "segment_" in columns.split( ",") or "container_" in columns.split(","): path = ObjectPath.get_path(a["managed_object"]) if path: segment_path = [ NetworkSegment.get_by_id(s).name for s in path.segment_path if NetworkSegment.get_by_id(s) ] container_path = [ Object.get_by_id(s).name for s in path.container_path if Object.get_by_id(s) ] else: segment_path = [] container_path = [] else: segment_path = [] container_path = [] r += [ translate_row( row( [ smart_text(a["_id"]), smart_text(a["root"]) if a.get("root") else "", a["timestamp"], # a["clear_timestamp"], "", smart_text(duration), moss[a["managed_object"]][0], moss[a["managed_object"]][1], smart_text( mo_hostname.get(a["managed_object"], "")), Profile.get_by_id(moss[a["managed_object"]][3]) if moss[a["managed_object"]][5] else "", moss[a["managed_object"]][6], Platform.get_by_id( moss[a["managed_object"]][9]) if moss[a["managed_object"]][9] else "", smart_text( Firmware.get_by_id( moss[a["managed_object"]][10]).version) if moss[a["managed_object"]][10] else "", AlarmClass.get_by_id(a["alarm_class"]).name, ActiveAlarm.objects.get( id=a["_id"]).subject if subject else None, "Yes" if a["managed_object"] in maintenance else "No", total_objects, total_subscribers, a.get("escalation_tt"), a.get("escalation_ts"), ", ".join(ll for ll in ( loc.location(moss[a["managed_object"]][5] ) if moss[a["managed_object"]] [5] is not None else "") if ll), container_lookup[a["managed_object"]].get( "text", "") if container_lookup else "", ], container_path, segment_path, ), cmap, ) ] if source in ["long_archive"]: o_format = "csv_zip" columns = [ "ALARM_ID", "MO_ID", "OBJECT_PROFILE", "VENDOR", "PLATFORM", "VERSION", "OPEN_TIMESTAMP", "CLOSE_TIMESTAMP", "LOCATION", "", "POOL", "ADM_DOMAIN", "MO_NAME", "IP", "ESCALATION_TT", "DURATION", "SEVERITY", "REBOOTS", ] from noc.core.clickhouse.connect import connection ch = connection() fd = datetime.datetime.strptime(from_date, "%d.%m.%Y") td = datetime.datetime.strptime( to_date, "%d.%m.%Y") + datetime.timedelta(days=1) if td - fd > datetime.timedelta(days=390): return HttpResponseBadRequest( _("Report more than 1 year not allowed. If nedeed - request it from Administrator" )) ac = AlarmClass.objects.get( name="NOC | Managed Object | Ping Failed") subs = ", ".join( "subscribers.summary[indexOf(subscribers.profile, '%s')] as `%s`" % (sp.bi_id, sp.name) for sp in SubscriberProfile.objects.filter().order_by("name")) if subs: columns += [ sp.name for sp in SubscriberProfile.objects.filter().order_by("name") ] r = ch.execute(LONG_ARCHIVE_QUERY % ( ", %s" % subs if subs else "", fd.date().isoformat(), td.date().isoformat(), ac.bi_id, )) filename = "alarms.csv" if o_format == "csv": response = HttpResponse(content_type="text/csv") response[ "Content-Disposition"] = 'attachment; filename="%s"' % filename writer = csv.writer(response) writer.writerows(r) return response elif o_format == "csv_zip": response = BytesIO() f = TextIOWrapper(TemporaryFile(mode="w+b"), encoding="utf-8") writer = csv.writer(f, dialect="excel", delimiter=";", quotechar='"') writer.writerow(columns) writer.writerows(r) f.seek(0) with ZipFile(response, "w", compression=ZIP_DEFLATED) as zf: zf.writestr(filename, f.read()) zf.filename = "%s.zip" % filename # response = HttpResponse(content_type="text/csv") response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/zip") response[ "Content-Disposition"] = 'attachment; filename="%s.zip"' % filename return response elif o_format == "xlsx": response = BytesIO() wb = xlsxwriter.Workbook(response) cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1}) ws = wb.add_worksheet("Alarms") max_column_data_length = {} for rn, x in enumerate(r): for cn, c in enumerate(x): if rn and (r[0][cn] not in max_column_data_length or len(str(c)) > max_column_data_length[r[0][cn]]): max_column_data_length[r[0][cn]] = len(str(c)) ws.write(rn, cn, c, cf1) ws.autofilter(0, 0, rn, cn) ws.freeze_panes(1, 0) for cn, c in enumerate(r[0]): # Set column width width = get_column_width(c) if enable_autowidth and width < max_column_data_length[c]: width = max_column_data_length[c] ws.set_column(cn, cn, width=width) wb.close() response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/vnd.ms-excel") response[ "Content-Disposition"] = 'attachment; filename="alarms.xlsx"' response.close() return response
def handler(self): self.logger.info("Checking version") result = self.object.scripts.get_version() changed = False # Sync vendor vendor = Vendor.ensure_vendor(result["vendor"]) if not self.object.vendor or vendor.id != self.object.vendor.id: if self.object.vendor: self.logger.info("Vendor changed: %s -> %s", self.object.vendor.name, vendor.name) else: self.logger.info("Set vendor: %s", vendor.name) self.object.vendor = vendor changed = True # Sync platform platform = Platform.ensure_platform(vendor, result["platform"]) if not self.object.platform or platform.id != self.object.platform.id: if self.object.platform: self.logger.info("Platform changed: %s -> %s", self.object.platform.name, platform.name) else: self.logger.info("Set platform: %s", platform.name) self.object.platform = platform changed = True # Platform changed, clear links if self.object.object_profile.clear_links_on_platform_change: self.clear_links() # Invalidate neighbor cache self.invalidate_neighbor_cache() # Sync version version = Firmware.ensure_firmware(self.object.profile, vendor, result["version"]) if not self.object.version or version.id != self.object.version.id: if self.object.version: self.logger.info("Version changed: %s -> %s", self.object.version.version, version.version) else: self.logger.info("Set version: %s", version.version) self.object.version = version changed = True # @todo: Check next_version and report upgrade # Sync image image = result.get("image", "") or None if image != self.object.software_image: if image: image = image.strip()[:255] # Cut to field length if not image: image = None if self.object.version: self.logger.info("Image changed: %s -> %s", self.object.software_image, image) else: self.logger.info("Set image: %s", image) self.object.software_image = image changed = True # Sync attributes if "attributes" in result: self.object.update_attributes(result["attributes"]) # if changed: self.object.save()