class KBEntryApplication(ExtModelApplication): """ AdministrativeDomain application """ title = _("Entries") menu = [_("Setup"), _("Entries")] model = KBEntry file_fields_mask = re.compile( r"^(?P<fname>description|is_hidden)(?P<findex>\d+)") def instance_to_dict(self, o, fields=None): r = super(KBEntryApplication, self).instance_to_dict(o, fields=fields) r["attachments"] = [{ "name": x.name, "size": x.size, "mtime": self.to_json(x.mtime), "description": x.description, } for x in KBEntryAttachment.objects.filter( kb_entry=o, is_hidden=False).order_by("name")] return r @view(r"^(?P<id>\d+)/history/$", access="read", api=True) def api_get_entry_history(self, request, id): o = self.get_object_or_404(KBEntry, id=id) return { "data": [{ "timestamp": self.to_json(h.timestamp), "user": str(h.user), "diff": h.diff } for h in KBEntryHistory.objects.filter( kb_entry=o).order_by("-timestamp")] } @view(r"^(?P<id>\d+)/html/$", access="read", api=True) def api_get_entry_html(self, request, id): o = self.get_object_or_404(KBEntry, id=id) return self.render_plain_text(o.html) @view(r"^most_popular/$", access="read", api=True) def api_get_most_popular(self, request): return KBEntry.most_popular() @view(r"^(?P<id>\d+)/attachments/$", access="read", api=True, method=["GET"]) def api_list_attachments(self, request, id): o = self.get_object_or_404(KBEntry, id=id) return [{ "name": x.name, "size": x.size, "mtime": self.to_json(x.mtime), "description": x.description, } for x in KBEntryAttachment.objects.filter( kb_entry=o, is_hidden=False).order_by("name")] @view(r"^(?P<id>\d+)/attachment/(?P<name>.+)/$", access="read", api=True, method=["GET"]) def api_get_attachment(self, request, id, name): o = self.get_object_or_404(KBEntry, id=id) attach = self.get_object_or_404(KBEntryAttachment, kb_entry=o, name=name) file_mime = mimetypes.guess_type(attach.file.name) response = HttpResponse(attach.file, content_type=file_mime or "application/octet-stream") response[ "Content-Disposition"] = 'attachment; filename="%s"' % attach.file.name return response @view(r"^(?P<id>\d+)/attachment/(?P<name>.+)/$", access="delete", api=True, method=["DELETE"]) def api_delete_attachment(self, request, id, name): o = self.get_object_or_404(KBEntry, id=id) attach = self.get_object_or_404(KBEntryAttachment, kb_entry=o, name=name) attach.delete() return self.response({"result": "Delete succesful"}, status=self.OK) @view(r"^(?P<id>\d+)/attach/$", access="write", api=True, method=["POST"]) def api_post_set_attachment(self, request, id): o = self.get_object_or_404(KBEntry, id=id) attach = KBEntryAttachment(kb_entry=o, name="uploaded_file1", description="", file=request.FILES["file"]) attach.save() return self.response({"result": "Upload succesful"}, status=self.OK) def update_file(self, files, o, attrs=None): left = {} # name -> data for f in files: left[f] = files[f] # errors = {} # failed = False for name in left: f = left[name] attr = attrs.get(name[4:], {}) attach = KBEntryAttachment.objects.filter(kb_entry=o, name=name) # @todo update attributes if not attach: attach = KBEntryAttachment( kb_entry=o, name=f.name, description=attr.get("description", ""), is_hidden=attr.get("is_hidden") == "true", file=f, ) attach.save() return True @view(r"^(?P<id>\d+)/?$", access="update", api=True, method=["POST"]) def api_post_update(self, request, id): return self.api_update(request, id)
def get_menu(self): return [_("Reports"), unicode(self.title)]
def api_action_run_discovery(self, request, ids): UnknownModel.objects.filter(id__in=[x.id for x in ids]) return _("Cleaned")
def api_data(self, request, id): def q_mo(d): x = d.copy() if x["type"] == "managedobject": del x["mo"] x["external"] = x["id"] not in mos if is_view else x.get( "role") != "segment" elif d["type"] == "cloud": del x["link"] x["external"] = False return x # Find segment segment = self.get_object_or_404(NetworkSegment, id=id) if segment.managed_objects.count() > segment.max_objects: # Too many objects return { "id": str(segment.id), "name": segment.name, "error": _("Too many objects") } # if we set selector in segment is_view = segment.selector if is_view: mos = segment.selector.managed_objects.values_list("id", flat=True) # Load settings settings = MapSettings.objects.filter(segment=id).first() node_hints = {} link_hints = {} if settings: self.logger.info("Using stored positions") for n in settings.nodes: node_hints[n.id] = { "type": n.type, "id": n.id, "x": n.x, "y": n.y } for ll in settings.links: link_hints[ll.id] = { "connector": ll.connector if len(ll.vertices) else "normal", "vertices": [{ "x": v.x, "y": v.y } for v in ll.vertices], } else: self.logger.info("Generating positions") # Generate topology topology = SegmentTopology( segment, node_hints, link_hints, force_spring=request.GET.get("force") == "spring") topology.layout() # Build output r = { "id": str(segment.id), "max_links": int(segment.max_shown_downlinks), "name": segment.name, "caps": list(topology.caps), "nodes": [q_mo(x) for x in topology.G.nodes.values()], "links": [topology.G[u][v] for u, v in topology.G.edges()], } # Parent info if segment.parent: r["parent"] = { "id": str(segment.parent.id), "name": segment.parent.name } # Save settings if not settings: self.logger.debug("Saving first-time layout") MapSettings.load_json({ "id": str(segment.id), "nodes": [{ "type": n["type"], "id": n["id"], "x": n["x"], "y": n["y"] } for n in r["nodes"] if n.get("x") is not None and n.get("y") is not None], "links": [{ "type": n["type"], "id": n["id"], "vertices": n.get("vertices", []), "connector": n.get("connector", "normal"), } for n in r["links"]], }) return r
class ReportMetricsDetailApplication(ExtApplication): menu = _("Reports") + "|" + _("Load Metrics") title = _("Load Metrics") metric_source = { "load_interfaces": ReportInterfaceMetrics, "load_cpu": ReportCPUMetrics, "load_memory": ReportMemoryMetrics, "ping": ReportPingMetrics, } @view( "^download/$", method=["GET"], access="launch", api=True, validate={ "from_date": StringParameter(required=True), "to_date": StringParameter(required=True), "reporttype": StringParameter( required=True, choices=["load_interfaces", "load_cpu", "ping"] ), "administrative_domain": StringParameter(required=False), # "pool": StringParameter(required=False), "segment": StringParameter(required=False), "selector": StringParameter(required=False), "interface_profile": StringParameter(required=False), "exclude_zero": BooleanParameter(required=False), "filter_default": BooleanParameter(required=False), "columns": StringParameter(required=False), "o_format": StringParameter(choices=["csv", "xlsx"]), }, ) def api_report( self, request, reporttype=None, from_date=None, to_date=None, object_profile=None, filter_default=None, exclude_zero=None, interface_profile=None, selector=None, administrative_domain=None, columns=None, o_format=None, enable_autowidth=False, **kwargs ): def translate_row(row, cmap): return [row[i] for i in cmap] map_table = { "load_interfaces": "/Interface\s\|\sLoad\s\|\s[In|Out]/", "load_cpu": "/[CPU|Memory]\s\|\sUsage/", "errors": "/Interface\s\|\s[Errors|Discards]\s\|\s[In|Out]/", "ping": "/Ping\s\|\sRTT/", } cols = [ "id", "object_name", "object_address", "object_platform", "object_adm_domain", "object_segment", # "object_hostname", # "object_status", # "profile_name", # "object_profile", # "object_vendor", "iface_name", "iface_description", "iface_speed", "load_in", "load_in_p", "load_out", "load_out_p", "errors_in", "errors_out", "slot", "cpu_usage", "memory_usage", "ping_rtt", "ping_attempts", "interface_flap", "interface_load_url", ] header_row = [ "ID", "OBJECT_NAME", "OBJECT_ADDRESS", "OBJECT_PLATFORM", "OBJECT_ADM_DOMAIN", "OBJECT_SEGMENT", "IFACE_NAME", "IFACE_DESCRIPTION", "IFACE_SPEED", "LOAD_IN", "LOAD_IN_P", "LOAD_OUT", "LOAD_OUT_P", "ERRORS_IN", "ERRORS_OUT", "CPU_USAGE", "MEMORY_USAGE", "PING_RTT", "PING_ATTEMPTS", "INTERFACE_FLAP", "INTERFACE_LOAD_URL", ] if columns: cmap = [] for c in columns.split(","): try: cmap += [cols.index(c)] except ValueError: continue else: cmap = list(range(len(cols))) columns_order = columns.split(",") columns_filter = set(columns_order) r = [translate_row(header_row, cmap)] object_columns = [c for c in columns_order if c.startswith("object")] # Date Time Block if not from_date: from_date = datetime.datetime.now() - datetime.timedelta(days=1) else: from_date = datetime.datetime.strptime(from_date, "%d.%m.%Y") if not to_date or from_date == to_date: to_date = from_date + datetime.timedelta(days=1) else: to_date = datetime.datetime.strptime(to_date, "%d.%m.%Y") + datetime.timedelta(days=1) # interval = (to_date - from_date).days ts_from_date = time.mktime(from_date.timetuple()) ts_to_date = time.mktime(to_date.timetuple()) # Load managed objects mos = ManagedObject.objects.filter(is_managed=True) if not request.user.is_superuser: mos = mos.filter(administrative_domain__in=UserAccess.get_domains(request.user)) if selector: mos = mos.filter(ManagedObjectSelector.objects.get(id=int(selector)).Q) if administrative_domain: mos = mos.filter( administrative_domain__in=AdministrativeDomain.get_nested_ids( int(administrative_domain) ) ) if object_profile: mos = mos.filter(object_profile=object_profile) # iface_dict = {} d_url = { "path": "/ui/grafana/dashboard/script/report.js", "rname": map_table[reporttype], "from": str(int(ts_from_date * 1000)), "to": str(int(ts_to_date * 1000)), # o.name.replace("#", "%23") "biid": "", "oname": "", "iname": "", } report_map = { "load_interfaces": { "url": "%(path)s?title=interface&biid=%(biid)s" "&obj=%(oname)s&iface=%(iname)s&from=%(from)s&to=%(to)s", "q_group": ["interface"], "q_select": { (0, "managed_object", "id"): "managed_object", (1, "path", "iface_name"): "arrayStringConcat(path)", }, }, "errors": { "url": """%(path)s?title=errors&biid=%(biid)s&obj=%(oname)s&iface=%(iname)s&from=%(from)s&to=%(to)s""", "q_group": ["interface"], }, "load_cpu": { "url": """%(path)s?title=cpu&biid=%(biid)s&obj=%(oname)s&from=%(from)s&to=%(to)s""", "q_select": { (0, "managed_object", "id"): "managed_object", (1, "path", "slot"): "arrayStringConcat(path)", }, }, "ping": { "url": """%(path)s?title=ping&biid=%(biid)s&obj=%(oname)s&from=%(from)s&to=%(to)s""", "q_select": {(0, "managed_object", "id"): "managed_object"}, }, } query_map = { # "iface_description": ('', 'iface_description', "''"), "iface_description": ( "", "iface_description", "dictGetString('interfaceattributes','description' , (managed_object, arrayStringConcat(path)))", ), "iface_speed": ( "speed", "iface_speed", "if(max(speed) = 0, dictGetUInt64('interfaceattributes', 'in_speed', " "(managed_object, arrayStringConcat(path))), max(speed))", ), "load_in": ("load_in", "l_in", "round(quantile(0.90)(load_in), 0)"), "load_in_p": ( "load_in", "l_in_p", "replaceOne(toString(round(quantile(0.90)(load_in) / " "if(max(speed) = 0, dictGetUInt64('interfaceattributes', 'in_speed', " "(managed_object, arrayStringConcat(path))), max(speed)), 4) * 100), '.', ',')", ), "load_out": ("load_out", "l_out", "round(quantile(0.90)(load_out), 0)"), "load_out_p": ( "load_out", "l_out_p", "replaceOne(toString(round(quantile(0.90)(load_out) / " "if(max(speed) = 0, dictGetUInt64('interfaceattributes', 'in_speed', " "(managed_object, arrayStringConcat(path))), max(speed)), 4) * 100), '.', ',')", ), "errors_in": ("errors_in", "err_in", "quantile(0.90)(errors_in)"), "errors_out": ("errors_out", "err_out", "quantile(0.90)(errors_out)"), "cpu_usage": ("usage", "cpu_usage", "quantile(0.90)(usage)"), "ping_rtt": ("rtt", "ping_rtt", "round(quantile(0.90)(rtt) / 1000, 2)"), "ping_attempts": ("attempts", "ping_attempts", "avg(attempts)"), } query_fields = [] for c in report_map[reporttype]["q_select"]: query_fields += [c[2]] field_shift = len(query_fields) # deny replacing field for c in columns.split(","): if c not in query_map: continue field, alias, func = query_map[c] report_map[reporttype]["q_select"][ (columns_order.index(c) + field_shift, field, alias) ] = func query_fields += [c] metrics_attrs = namedtuple("METRICSATTRs", query_fields) mo_attrs = namedtuple("MOATTRs", [c for c in cols if c.startswith("object")]) moss = {} for row in mos.values_list( "bi_id", "name", "address", "platform", "administrative_domain__name", "segment" ): moss[row[0]] = mo_attrs( *[ row[1], row[2], str(Platform.get_by_id(row[3]) if row[3] else ""), row[4], str(NetworkSegment.get_by_id(row[5])) if row[5] else "", ] ) url = report_map[reporttype].get("url", "") report_metric = self.metric_source[reporttype]( tuple(sorted(moss)), from_date, to_date, columns=None ) report_metric.SELECT_QUERY_MAP = report_map[reporttype]["q_select"] if exclude_zero and reporttype == "load_interfaces": report_metric.CUSTOM_FILTER["having"] += ["max(load_in) != 0 AND max(load_out) != 0"] if interface_profile: interface_profile = InterfaceProfile.objects.filter(id=interface_profile).first() report_metric.CUSTOM_FILTER["having"] += [ "dictGetString('interfaceattributes', 'profile', " "(managed_object, arrayStringConcat(path))) = '%s'" % interface_profile.name ] # OBJECT_PLATFORM, ADMIN_DOMAIN, SEGMENT, OBJECT_HOSTNAME for row in report_metric.do_query(): mm = metrics_attrs(*row) mo = moss[int(mm.id)] res = [] for y in columns_order: if y in object_columns: res += [getattr(mo, y)] else: res += [getattr(mm, y)] if "interface_load_url" in columns_filter: d_url["biid"] = mm.id d_url["oname"] = mo[2].replace("#", "%23") # res += [url % d_url, interval] res.insert(columns_order.index("interface_load_url"), url % d_url) r += [res] filename = "metrics_detail_report_%s" % datetime.datetime.now().strftime("%Y%m%d") if o_format == "csv": response = HttpResponse(content_type="text/csv") response["Content-Disposition"] = 'attachment; filename="%s.csv"' % filename writer = csv.writer(response, dialect="excel", delimiter=",", quoting=csv.QUOTE_MINIMAL) writer.writerows(r) return response elif o_format == "xlsx": response = StringIO() wb = xlsxwriter.Workbook(response) cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1}) ws = wb.add_worksheet("Alarms") max_column_data_length = {} for rn, x in enumerate(r): for cn, c in enumerate(x): if rn and ( r[0][cn] not in max_column_data_length or len(str(c)) > max_column_data_length[r[0][cn]] ): max_column_data_length[r[0][cn]] = len(str(c)) ws.write(rn, cn, c, cf1) ws.autofilter(0, 0, rn, cn) ws.freeze_panes(1, 0) for cn, c in enumerate(r[0]): # Set column width width = get_column_width(c) if enable_autowidth and width < max_column_data_length[c]: width = max_column_data_length[c] ws.set_column(cn, cn, width=width) wb.close() response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/vnd.ms-excel") response["Content-Disposition"] = 'attachment; filename="%s.xlsx"' % filename response.close() return response
class Calculator(CalculatorBase): """Calculator""" name = "CIR2CBR" title = _("CIR/CAR(bps) to Burst-rate") description = _( "Recommended way for policy-map values calculating is: " "normal-burst-bytes=bits-per-second*Tc/8." "Recommended way for rate-limit values calculating is: " "burst-normal=bits-per-second*Tc/8, burst-max=2*burst-normal") form_class = CalculatorForm # Templates template_ios_policy = ( "policy-map shape-%(value)d\n" " class class-default\n" " police %(value)d %(v)d %(v)d " "conform-action transmit exceed-action drop violate-action drop\n") template_ios_rate = ( "rate-limit input %(value)d %(v)d %(v1)d conform-action transmit exceed-action drop" ) template_junos_policy = ("policer policer-%(value)d {\n" " if-exceeding {\n" " bandwidth-limit %(value)d;\n" " burst-size-limit %(v)d;\n" " }\n" " then {\n" " discard;\n" " }\n" "}\n") def escape(self, s): """Escape result""" return mark_safe("<pre>%s</pre>" % s) def calculate_ios_policy(self, value, v): """Calculate IOS policy""" return [ ("CIR(bps)", value), ("normal-burst-bytes, extended-burst-bytes", v), ( "Policy-map example", self.escape(self.template_ios_policy % { "value": value, "v": v }), ), ] def calculate_ios_rate(self, value, v): """Calculate IOS rate""" v1 = 2 * v return [ ("CIR(bps)", value), ("burst-normal", v), ("burst-max", v1), ( "Rate-limit example", self.escape(self.template_ios_rate % { "value": value, "v": v, "v1": v1 }), ), ] def calculate_junos_policy(self, value, v): """Calculate Junos policy""" return [ ("CIR(bps)", value), ("burst-rate", v), ("Policer example", self.escape(self.template_junos_policy % { "value": value, "v": v })), ] def calculate(self, value, Tc, calculation): """Calculator""" v = int(value * Tc / 8) return getattr(self, "calculate_%s" % calculation)(value, v)
# --------------------------------------------------------------------- # Settings for "ip" module # --------------------------------------------------------------------- # Copyright (C) 2007-2016 The NOC Project # See LICENSE for details # --------------------------------------------------------------------- # NOC modules from noc.core.translation import ugettext as _ MODULE_NAME = _("IPAM")
def get_data(self, request, report_type=None, **kwargs): wr = ("", "") # wr_and = ("", "",) wr_and2 = ("", "") platform = { str(p["_id"]): p["name"] for p in Platform.objects.all().as_pymongo().scalar("id", "name") } version = { str(p["_id"]): p["version"] for p in Firmware.objects.all().as_pymongo().scalar( "id", "version") } profile = { str(p["_id"]): p["name"] for p in Profile.objects.all().as_pymongo().scalar("id", "name") } if not request.user.is_superuser: ad = tuple(UserAccess.get_domains(request.user)) wr = ("WHERE administrative_domain_id in ", ad) # wr_and = ("AND sam.administrative_domain_id in ", ad) wr_and2 = ("AND administrative_domain_id in ", ad) if len(ad) == 1: wr = ("WHERE administrative_domain_id in (%s)" % ad, "") # wr_and = ("AND sam.administrative_domain_id in (%s)" % ad, "") wr_and2 = ("AND administrative_domain_id in (%s)" % ad, "") # By Profile if report_type == "profile": columns = [_("Profile")] query = ("""SELECT profile,COUNT(*) FROM sa_managedobject %s%s GROUP BY 1 ORDER BY 2 DESC""" % wr) # By Administrative Domain elif report_type == "domain": columns = [_("Administrative Domain")] query = ("""SELECT a.name,COUNT(*) FROM sa_managedobject o JOIN sa_administrativedomain a ON (o.administrative_domain_id=a.id) %s%s GROUP BY 1 ORDER BY 2 DESC""" % wr) # By Profile and Administrative Domains elif report_type == "domain-profile": columns = [_("Administrative Domain"), _("Profile")] query = ("""SELECT d.name,profile,COUNT(*) FROM sa_managedobject o JOIN sa_administrativedomain d ON (o.administrative_domain_id=d.id) %s%s GROUP BY 1,2 """ % wr) # By Labels elif report_type == "label": columns = [_("Label")] query = (""" SELECT t.label, COUNT(*) FROM ( SELECT unnest(labels) AS label FROM sa_managedobject WHERE labels IS NOT NULL %s%s AND array_length(labels, 1) > 0 ) t GROUP BY 1 ORDER BY 2 DESC; """ % wr_and2) elif report_type == "platform": columns = [_("Platform"), _("Profile")] query = ("""select sam.profile, sam.platform, COUNT(platform) from sa_managedobject sam %s%s group by 1,2 order by count(platform) desc;""" % wr) elif report_type == "version": columns = [_("Profile"), _("Version")] query = ("""select sam.profile, sam.version, COUNT(version) from sa_managedobject sam %s%s group by 1,2 order by count(version) desc;""" % wr) else: raise Exception("Invalid report type: %s" % report_type) for r, t in report_types: if r == report_type: title = self.title + ": " + t break columns += [ TableColumn(_("Quantity"), align="right", total="sum", format="integer") ] cursor = self.cursor() cursor.execute(query, ()) data = [] for c in cursor.fetchall(): if report_type == "profile": data += [(profile.get(c[0]), c[1])] elif report_type == "domain-profile": data += [(c[0], profile.get(c[1]), c[2])] elif report_type == "platform": data += [(profile.get(c[0]), platform.get(c[1]), c[2])] elif report_type == "version": data += [(profile.get(c[0]), version.get(c[1]), c[2])] else: data += [c] return self.from_dataset(title=title, columns=columns, data=data, enumerate=True)
def get_data(self, request, pool=None, obj_profile=None, filter_ignore_iface=True, **kwargs): rn = re.compile( r"'remote_chassis_id': u'(?P<rem_ch_id>\S+)'.+'remote_system_name': u'(?P<rem_s_name>\S+)'", re.IGNORECASE, ) problem = { "Not found iface on remote": "->", "Not found local iface on remote": "<-", "Remote object is not found": "X", } data = [] # MAC, hostname, count not_found = defaultdict(int) # Name, IP, count local_on_remote = defaultdict(int) # Get all managed objects mos = ManagedObject.objects.filter(is_managed=True, pool=pool) if not request.user.is_superuser: mos = mos.filter( administrative_domain__in=UserAccess.get_domains(request.user)) if obj_profile: mos = mos.filter(object_profile=obj_profile) mos_id = dict((mo.id, mo) for mo in mos) report = ReportPendingLinks( list(six.iterkeys(mos_id)), ignore_profiles=list( InterfaceProfile.objects.filter(discovery_policy="I")), ) problems = report.out for mo_id in problems: mo = mos_id.get(mo_id, ManagedObject.get_by_id(mo_id)) for iface in problems[mo_id]: data += [( mo.name, mo.address, mo.profile.name, mo.administrative_domain.name, iface, problem[problems[mo_id][iface]["problem"]], problems[mo_id][iface]["remote_id"], )] if problems[mo_id][iface][ "problem"] == "Remote object is not found": match = rn.findall(problems[mo_id][iface]["remote_id"]) if match: not_found[match[0]] += 1 elif problems[mo_id][iface][ "problem"] == "Not found iface on remote": local_on_remote[(mo.name, mo.address)] += 1 data += [SectionRow(name="Summary information on u_object")] for c in not_found: if not_found[c] > 4: data += [c] data += [SectionRow(name="Summary information on agg")] for c in local_on_remote: if local_on_remote[c] > 4: data += [c] return self.from_dataset( title=self.title, columns=[ _("Managed Object"), _("Address"), _("Profile"), _("Administrative domain"), _("Interface"), _("Direction"), _("Remote Object") # _("Discovery"), _("Error") ], data=data, )
# See LICENSE for details # --------------------------------------------------------------------- # Third-party modules from django import forms # NOC modules from noc.lib.app.simplereport import SimpleReport, TableColumn, PredefinedReport from noc.sa.models.useraccess import UserAccess from noc.sa.models.profile import Profile from noc.inv.models.platform import Platform from noc.inv.models.firmware import Firmware from noc.core.translation import ugettext as _ report_types = [ ("profile", _("By Profile")), ("domain", _("By Administrative Domain")), ("domain-profile", _("By Administrative Domain and Profile")), ("label", _("By Labels")), ("platform", _("By Platform")), ("version", _("By Version")), ] class ReportForm(forms.Form): report_type = forms.ChoiceField(label=_("Report Type"), choices=report_types) class ReportObjectsSummary(SimpleReport): title = _("Managed Objects Summary")
class ReportForm(forms.Form): report_type = forms.ChoiceField(label=_("Report Type"), choices=report_types)
class Meta(object): verbose_name = _("Prefix") verbose_name_plural = _("Prefixes") db_table = "ip_prefix" app_label = "ip" unique_together = [("vrf", "afi", "prefix")]
class Prefix(NOCModel): """ Allocated prefix """ class Meta(object): verbose_name = _("Prefix") verbose_name_plural = _("Prefixes") db_table = "ip_prefix" app_label = "ip" unique_together = [("vrf", "afi", "prefix")] parent = models.ForeignKey( "self", related_name="children_set", verbose_name=_("Parent"), null=True, blank=True, on_delete=models.CASCADE, ) vrf = CachedForeignKey(VRF, verbose_name=_("VRF"), default=VRF.get_global, on_delete=models.CASCADE) afi = models.CharField(_("Address Family"), max_length=1, choices=AFI_CHOICES) prefix = CIDRField(_("Prefix")) name = models.CharField(_("Name"), max_length=255, null=True, blank=True) profile = DocumentReferenceField(PrefixProfile, null=False, blank=False) asn = CachedForeignKey( AS, verbose_name=_("AS"), help_text=_("Autonomous system granted with prefix"), null=True, blank=True, on_delete=models.CASCADE, ) project = CachedForeignKey( Project, verbose_name="Project", on_delete=models.SET_NULL, null=True, blank=True, related_name="prefix_set", ) vc = models.ForeignKey( VC, verbose_name=_("VC"), null=True, blank=True, on_delete=models.SET_NULL, help_text=_("VC bound to prefix"), ) description = models.TextField(_("Description"), blank=True, null=True) tags = TagsField("Tags", null=True, blank=True) tt = models.IntegerField("TT", blank=True, null=True, help_text=_("Ticket #")) state = DocumentReferenceField(State, null=True, blank=True) allocated_till = models.DateField( _("Allocated till"), null=True, blank=True, help_text=_("Prefix temporary allocated till the date"), ) ipv6_transition = models.OneToOneField( "self", related_name="ipv4_transition", null=True, blank=True, limit_choices_to={"afi": "6"}, on_delete=models.SET_NULL, ) prefix_discovery_policy = models.CharField( _("Prefix Discovery Policy"), max_length=1, choices=[("P", "Profile"), ("E", "Enable"), ("D", "Disable")], default="P", blank=False, null=False, ) address_discovery_policy = models.CharField( _("Address Discovery Policy"), max_length=1, choices=[("P", "Profile"), ("E", "Enable"), ("D", "Disable")], default="P", blank=False, null=False, ) source = models.CharField( "Source", max_length=1, choices=[("M", "Manual"), ("i", "Interface"), ("w", "Whois"), ("n", "Neighbor")], null=False, blank=False, default="M", ) csv_ignored_fields = ["parent"] _id_cache = cachetools.TTLCache(maxsize=1000, ttl=60) def __str__(self): return "%s(%s): %s" % (self.vrf.name, self.afi, self.prefix) @classmethod @cachetools.cachedmethod(operator.attrgetter("_id_cache"), lock=lambda _: id_lock) def get_by_id(cls, id): mo = Prefix.objects.filter(id=id)[:1] if mo: return mo[0] else: return None def get_absolute_url(self): return site.reverse("ip:ipam:vrf_index", self.vrf.id, self.afi, self.prefix) @property def has_transition(self): """ Check prefix has ipv4/ipv6 transition :return: """ if self.is_ipv4: return bool(self.ipv6_transition) else: try: # pylint: disable=pointless-statement self.ipv4_transition # noqa return True except Prefix.DoesNotExist: return False @classmethod def get_parent(cls, vrf, afi, prefix): """ Get nearest closing prefix """ r = Prefix.objects.filter(vrf=vrf, afi=str(afi)).extra( select={"masklen": "masklen(prefix)"}, where=["prefix >> %s"], params=[str(prefix)], order_by=["-masklen"], )[:1] if r: return r[0] return None @property def is_ipv4(self): return self.afi == "4" @property def is_ipv6(self): return self.afi == "6" @property def is_root(self): """ Returns true if the prefix is a root of VRF """ return (self.is_ipv4 and self.prefix == "0.0.0.0/0") or (self.is_ipv6 and self.prefix == "::/0") def clean(self): """ Field validation """ super(Prefix, self).clean() # Set defaults self.afi = "6" if ":" in self.prefix else "4" # Check prefix is of AFI type if self.is_ipv4: check_ipv4_prefix(self.prefix) elif self.is_ipv6: check_ipv6_prefix(self.prefix) # Set defaults if not self.vrf: self.vrf = VRF.get_global() if not self.is_root: # Set proper parent self.parent = Prefix.get_parent(self.vrf, self.afi, self.prefix) # Check root prefix have no parent if self.is_root and self.parent: raise ValidationError("Root prefix cannot have parent") def save(self, *args, **kwargs): """ Save prefix """ self.clean() super(Prefix, self).save(*args, **kwargs) # Rebuild tree if necessary # Reconnect children children prefixes c = connection.cursor() c.execute( """ UPDATE %s SET parent_id=%%s WHERE vrf_id=%%s AND afi=%%s AND prefix << %%s AND parent_id=%%s """ % Prefix._meta.db_table, [ self.id, self.vrf.id, self.afi, self.prefix, self.parent.id if self.parent else None ], ) # Reconnect children addresses c.execute( """ UPDATE %s SET prefix_id=%%s WHERE prefix_id=%%s AND address << %%s """ % Address._meta.db_table, [self.id, self.parent.id if self.parent else None, self.prefix], ) def delete(self, *args, **kwargs): """ Delete prefix """ if self.is_root and not getattr(self, "_disable_delete_protection", False): raise ValidationError("Cannot delete root prefix") # Reconnect children prefixes self.children_set.update(parent=self.parent) # Reconnect children addresses self.address_set.update(prefix=self.parent) # Unlink dual-stack allocations # self.clear_transition() # Remove bookmarks self.prefixbookmark_set.all().delete() # Finally delete super(Prefix, self).delete(*args, **kwargs) def delete_recursive(self): """ Delete prefix and all descendancies """ # Unlink dual-stack allocations # self.clear_transition() # Recursive delete # Get nested prefixes ids = (Prefix.objects.filter(vrf=self.vrf, afi=self.afi).extra( where=["prefix <<= %s"], params=[self.prefix]).values_list("id", flat=True)) # # Delete nested addresses Address.objects.filter(prefix__in=ids).delete() # Delete nested prefixes Prefix.objects.filter(id__in=ids).delete() # Delete permissions PrefixAccess.objects.filter(vrf=self.vrf, afi=self.afi).extra( where=["prefix <<= %s"], params=[self.prefix]) @property def maintainers(self): """ List of persons having write access @todo: PostgreSQL-independent implementation """ return User.objects.raw( """ SELECT id,username,first_name,last_name FROM %s u WHERE is_active=TRUE AND (is_superuser=TRUE OR EXISTS(SELECT id FROM %s a WHERE user_id=u.id AND vrf_id=%%s AND afi=%%s AND prefix>>=%%s AND can_change=TRUE )) ORDER BY username """ % (User._meta.db_table, PrefixAccess._meta.db_table), [self.vrf.id, self.afi, self.prefix], ) @property def short_description(self): """ Returns first line of description :return: """ if self.description: return self.description.split("\n", 1)[0].strip() return "" @property def netmask(self): """ returns Netmask for IPv4 :return: """ if self.is_ipv4: return IPv4(self.prefix).netmask.address return None @property def broadcast(self): """ Returns Broadcast for IPv4 :return: """ if self.is_ipv4: return IPv4(self.prefix).last.address return None @property def wildcard(self): """ Returns Cisco wildcard for IPv4 :return: """ if self.is_ipv4: return IPv4(self.prefix).wildcard.address return "" @property def size(self): """ Returns IPv4 prefix size :return: """ if self.is_ipv4: return IPv4(self.prefix).size return None def can_view(self, user): """ Returns True if user has view access :param user: :return: """ return PrefixAccess.user_can_view(user, self.vrf, self.afi, self.prefix) def can_change(self, user): """ Returns True if user has change access :param user: :return: """ return PrefixAccess.user_can_change(user, self.vrf, self.afi, self.prefix) def has_bookmark(self, user): """ Check the user has bookmark on prefix :param user: :return: """ from .prefixbookmark import PrefixBookmark # noqa try: PrefixBookmark.objects.get(user=user, prefix=self) return True except PrefixBookmark.DoesNotExist: return False def toggle_bookmark(self, user): """ Toggle user bookmark. Returns new bookmark state :param user: :return: """ from .prefixbookmark import PrefixBookmark # noqa b, created = PrefixBookmark.objects.get_or_create(user=user, prefix=self) if created: return True b.delete() return False def get_index(self): """ Full-text search """ content = [self.prefix] card = "Prefix %s" % self.prefix if self.description: content += [self.description] card += " (%s)" % self.description r = { "id": "ip.prefix:%s" % self.id, "title": self.prefix, "content": "\n".join(content), "card": card, } if self.tags: r["tags"] = self.tags return r @classmethod def get_search_result_url(cls, obj_id): return "/api/card/view/prefix/%s/" % obj_id def get_path(self): return (Prefix.objects.filter(vrf=self.vrf, afi=self.afi).extra( where=["prefix >>= %s"], params=[self.prefix]).order_by("prefix").values_list("id", flat=True)) @property def address_ranges(self): """ All prefix-related address ranges :return: """ return list( AddressRange.objects.raw( """ SELECT * FROM ip_addressrange WHERE vrf_id=%s AND afi=%s AND is_active=TRUE AND ( from_address << %s OR to_address << %s OR %s BETWEEN from_address AND to_address ) ORDER BY from_address, to_address """, [self.vrf.id, self.afi, self.prefix, self.prefix, self.prefix], )) def rebase(self, vrf, new_prefix): """ Rebase prefix to a new location :param vrf: :param new_prefix: :return: """ # b = IP.prefix(self.prefix) nb = IP.prefix(new_prefix) # Validation if vrf == self.vrf and self.prefix == new_prefix: raise ValueError("Cannot rebase to self") if b.afi != nb.afi: raise ValueError("Cannot change address family during rebase") if b.mask < nb.mask: raise ValueError("Cannot rebase to prefix of lesser size") # Rebase prefix and all nested prefixes # Parents are left untouched for p in Prefix.objects.filter(vrf=self.vrf, afi=self.afi).extra( where=["prefix <<= %s"], params=[self.prefix]): np = IP.prefix(p.prefix).rebase(b, nb).prefix # Prefix.objects.filter(pk=p.pk).update(prefix=np, vrf=vrf) p.prefix = np p.vrf = vrf p.save() # Raise events # Rebase addresses # Parents are left untouched for a in Address.objects.filter(vrf=self.vrf, afi=self.afi).extra( where=["address <<= %s"], params=[self.prefix]): na = IP.prefix(a.address).rebase(b, nb).address # Address.objects.filter(pk=a.pk).update(address=na, vrf=vrf) a.address = na a.vrf = vrf a.save() # Raise events # Rebase permissions # move all permissions to the nested blocks for pa in PrefixAccess.objects.filter(vrf=self.vrf).extra( where=["prefix <<= %s"], params=[self.prefix]): np = IP.prefix(pa.prefix).rebase(b, nb).prefix PrefixAccess.objects.filter(pk=pa.pk).update(prefix=np, vrf=vrf) # create permissions for covered blocks for pa in PrefixAccess.objects.filter(vrf=self.vrf).extra( where=["prefix >> %s"], params=[self.prefix]): PrefixAccess( user=pa.user, vrf=vrf, afi=pa.afi, prefix=new_prefix, can_view=pa.can_view, can_change=pa.can_change, ).save() # @todo: Rebase bookmarks # @todo: Update caches # Return rebased prefix return Prefix.objects.get(pk=self.pk) # Updated object @property def nested_prefix_set(self): """ Queryset returning all nested prefixes inside the prefix """ return Prefix.objects.filter(vrf=self.vrf, afi=self.afi).extra( where=["prefix <<= %s"], params=[self.prefix]) @property def nested_address_set(self): """ Queryset returning all nested addresses inside the prefix """ return Address.objects.filter(vrf=self.vrf, afi=self.afi).extra( where=["address <<= %s"], params=[self.prefix]) def iter_free(self): """ Generator returning all available free prefixes inside :return: """ for fp in IP.prefix(self.prefix).iter_free( [p.prefix for p in self.children_set.all()]): yield str(fp) @property def effective_address_discovery(self): if self.address_discovery_policy == "P": return self.profile.address_discovery_policy return self.address_discovery_policy @property def effective_prefix_discovery(self): if self.prefix_discovery_policy == "P": return self.profile.prefix_discovery_policy return self.prefix_discovery_policy @property def effective_prefix_special_address(self): return self.profile.prefix_special_address_policy @property def usage(self): if self.is_ipv4: usage = getattr(self, "_usage_cache", None) if usage is not None: # Use update_prefixes_usage results return usage size = IPv4(self.prefix).size if not size: return 100.0 n_ips = Address.objects.filter(prefix=self).count() if n_ips and size > 2 and self.effective_prefix_special_address == "X": # Exclude special addresses size -= len(IPv4(self.prefix).special_addresses) n_pfx = sum( IPv4(p).size for p in Prefix.objects.filter(parent=self).only( "prefix").values_list("prefix", flat=True)) return float(n_ips + n_pfx) * 100.0 / float(size) return None @property def usage_percent(self): u = self.usage if u is None: return "" return "%.2f%%" % u @staticmethod def update_prefixes_usage(prefixes): """ Bulk calculate and update prefixes usages :param prefixes: List of Prefix instances :return: """ # Filter IPv4 only ipv4_prefixes = [p for p in prefixes if p.is_ipv4] # Calculate nested prefixes usage = defaultdict(int) address_usage = defaultdict(int) for parent, prefix in Prefix.objects.filter( parent__in=ipv4_prefixes).values_list("parent", "prefix"): ln = int(prefix.split("/")[1]) usage[parent] += 2**(32 - ln) # Calculate nested addresses has_address = set() for parent, count in (Address.objects.filter( prefix__in=ipv4_prefixes).values("prefix").annotate( count=models.Count("prefix")).values_list( "prefix", "count")): usage[parent] += count has_address.add(parent) address_usage[parent] += count # Update usage cache for p in ipv4_prefixes: ln = int(p.prefix.split("/")[1]) size = 2**(32 - ln) if p.id in has_address and size > 2: # Not /31 or /32 if p.effective_prefix_special_address == "X": size -= 2 # Exclude broadcast and network p._address_usage_cache = float( address_usage[p.id]) * 100.0 / float(size) p._usage_cache = float(usage[p.id]) * 100.0 / float(size) @property def address_usage(self): if self.is_ipv4: usage = getattr(self, "_address_usage_cache", None) if usage is not None: # Use update_prefixes_usage results return usage size = IPv4(self.prefix).size if not size: return 100.0 n_ips = (Address.objects.filter(vrf=self.vrf, afi=self.afi).extra( where=["address <<= %s"], params=[str(self.prefix)]).count()) if self.effective_prefix_special_address == "X": n_pfx = (Prefix.objects.filter( vrf=self.vrf, afi=self.afi).extra(where=["prefix <<= %s"], params=[str(self.prefix)]).count()) size -= len(IPv4(self.prefix).special_addresses) * n_pfx return float(n_ips) * 100.0 / float(size) if n_ips else 0.0 else: return None @property def address_usage_percent(self): u = self.address_usage if u is None: return "" return "%.2f%%" % u def is_empty(self): """ Check prefix is empty and does not contain nested prefixes and addresses :return: """ if Prefix.objects.filter(parent=self).count() > 0: return False if Address.objects.filter(prefix=self).count() > 0: return False return True def disable_delete_protection(self): """ Disable root delete protection :return: """ self._disable_delete_protection = True def get_effective_as(self): """ Return effective AS (first found upwards) :return: AS instance or None """ if self.asn: return self.asn if not self.parent: return None return self.parent.get_effective_as()
def get_data( self, request, pool=None, obj_profile=None, selector=None, avail_status=None, profile_check_only=None, failed_scripts_only=None, filter_pending_links=None, filter_none_objects=None, filter_view_other=None, **kwargs ): data = [] match = None code_map = { "1": "Unknown error", "10000": "Unspecified CLI error", "10005": "Connection refused", "10001": "Authentication failed", "10002": "No super command defined", "10003": "No super privileges", "10004": "SSH Protocol error", } if pool: pool = Pool.get_by_id(pool) else: pool = Pool.objects.filter()[0] data += [SectionRow(name="Report by %s" % pool.name)] if selector: mos = ManagedObject.objects.filter(selector.Q) else: mos = ManagedObject.objects.filter(pool=pool, is_managed=True) if not request.user.is_superuser: mos = mos.filter(administrative_domain__in=UserAccess.get_domains(request.user)) if obj_profile: mos = mos.filter(object_profile=obj_profile) if filter_view_other: mnp_in = list(ManagedObjectProfile.objects.filter(enable_ping=False)) mos = mos.filter(profile=Profile.objects.get(name=GENERIC_PROFILE)).exclude( object_profile__in=mnp_in ) if profile_check_only: match = { "$or": [ {"job.problems.suggest_cli": {"$exists": True}}, {"job.problems.suggest_snmp": {"$exists": True}}, {"job.problems.profile.": {"$regex": "Cannot detect profile"}}, {"job.problems.version.": {"$regex": "Remote error code 1000[1234]"}}, ] } elif failed_scripts_only: match = { "$and": [ {"job.problems": {"$exists": "true", "$ne": {}}}, {"job.problems.suggest_snmp": {"$exists": False}}, {"job.problems.suggest_cli": {"$exists": False}}, ] } elif filter_view_other: match = {"job.problems.suggest_snmp": {"$exists": False}} rdp = ReportDiscoveryProblem(mos, avail_only=avail_status, match=match) exclude_method = [] if filter_pending_links: exclude_method += ["lldp", "lacp", "cdp", "huawei_ndp"] for discovery in rdp: mo = ManagedObject.get_by_id(discovery["key"]) for method in [x for x in discovery["job"][0]["problems"] if x not in exclude_method]: problem = discovery["job"][0]["problems"][method] if filter_none_objects and not problem: continue if isinstance(problem, dict) and "" in problem: problem = problem.get("", "") if "Remote error code" in problem: problem = code_map.get(problem.split(" ")[-1], problem) if isinstance(problem, six.string_types): problem = problem.replace("\n", " ").replace("\r", " ") data += [ ( mo.name, mo.address, mo.profile.name, mo.administrative_domain.name, _("Yes") if mo.get_status() else _("No"), discovery["st"].strftime("%d.%m.%Y %H:%M") if "st" in discovery else "", method, problem, ) ] return self.from_dataset( title=self.title, columns=[ _("Managed Object"), _("Address"), _("Profile"), _("Administrative Domain"), _("Avail"), _("Last successful discovery"), _("Discovery"), _("Error"), ], data=data, )
class VCApplication(ExtModelApplication): """ VC application """ title = _("VC") menu = _("Virtual Circuits") model = VC query_fields = ["name", "description"] query_condition = "icontains" int_query_fields = ["l1", "l2"] implied_permissions = {"read": ["vc:vcdomain:lookup", "main:style:lookup"]} def get_vc_domain_objects(self, vc_domain): return vc_domain.managedobject_set.all() def lookup_vcfilter(self, q, name, value): """ Resolve __vcflter lookups :param q: :param name: :param value: :return: """ value = ModelParameter(VCFilter).clean(value) x = value.to_sql(name) try: q[None] += [x] except KeyError: q[None] = [x] @cachedmethod(key="vc-interface-count-%s") def get_vc_interfaces_count(self, vc_id): vc = VC.get_by_id(vc_id) if not vc: return 0 objects = vc.vc_domain.managedobject_set.values_list("id", flat=True) l1 = vc.l1 n = SubInterface.objects.filter( Q(managed_object__in=objects) & (Q(untagged_vlan=l1, enabled_afi=["BRIDGE"]) | Q(tagged_vlans=l1, enabled_afi=["BRIDGE"]) | Q(vlan_ids=l1))).count() return n @cachedmethod(key="vc-prefixes-%s") def get_vc_prefixes(self, vc_id): vc = VC.get_by_id(vc_id) if not vc: return [] objects = vc.vc_domain.managedobject_set.values_list("id", flat=True) ipv4 = set() ipv6 = set() # @todo: Exact match on vlan_ids for si in SubInterface.objects.filter( Q(managed_object__in=objects) & Q(vlan_ids=vc.l1) & (Q(enabled_afi=["IPv4"]) | Q(enabled_afi=["IPv6"]))).only( "enabled_afi", "ipv4_addresses", "ipv6_addresses"): if "IPv4" in si.enabled_afi: ipv4.update([IP.prefix(ip).first for ip in si.ipv4_addresses]) if "IPv6" in si.enabled_afi: ipv6.update([IP.prefix(ip).first for ip in si.ipv6_addresses]) p = [str(x.first) for x in sorted(ipv4)] p += [str(x.first) for x in sorted(ipv6)] return p def field_interfaces_count(self, obj): return self.get_vc_interfaces_count(obj.id) def field_prefixes(self, obj): p = self.get_vc_prefixes(obj.id) if p: return ", ".join(p) else: return "-" def field_row_class(self, o): return o.style.css_class_name if o.style else "" @view(url="^find_free/$", method=["GET"], access="read", api=True, validate={ "vc_domain": ModelParameter(VCDomain), "vc_filter": ModelParameter(VCFilter) }) def api_find_free(self, request, vc_domain, vc_filter, **kwargs): return vc_domain.get_free_label(vc_filter) @view(url="^bulk/import/", method=["POST"], access="import", api=True, validate={ "vc_domain": ModelParameter(VCDomain), "items": ListOfParameter(element=DictParameter( attrs={ "l1": IntParameter(), "l2": IntParameter(), "name": StringParameter(), "description": StringParameter(default="") })) }) def api_bulk_import(self, request, vc_domain, items): n = 0 for i in items: if not VC.objects.filter( vc_domain=vc_domain, l1=i["l1"], l2=i["l2"]).exists(): # Add only not-existing VC(vc_domain=vc_domain, l1=i["l1"], l2=i["l2"], name=i["name"], description=i["description"]).save() n += 1 return {"status": True, "imported": n} @view(url=r"^(?P<vc_id>\d+)/interfaces/$", method=["GET"], access="read", api=True) def api_interfaces(self, request, vc_id): """ Returns a dict of {untagged: ..., tagged: ...., l3: ...} :param request: :param vc_id: :return: """ vc = self.get_object_or_404(VC, id=int(vc_id)) l1 = vc.l1 # Managed objects in VC domain objects = set( vc.vc_domain.managedobject_set.values_list("id", flat=True)) # Find untagged interfaces si_objects = defaultdict(list) for si in SubInterface.objects.filter(managed_object__in=objects, untagged_vlan=l1, enabled_afi="BRIDGE"): si_objects[si.managed_object] += [{"name": si.name}] untagged = [{ "managed_object_id": o.id, "managed_object_name": o.name, "interfaces": sorted(si_objects[o], key=lambda x: x["name"]) } for o in si_objects] # Find tagged interfaces si_objects = defaultdict(list) for si in SubInterface.objects.filter(managed_object__in=objects, tagged_vlans=l1, enabled_afi="BRIDGE"): si_objects[si.managed_object] += [{"name": si.name}] tagged = [{ "managed_object_id": o.id, "managed_object_name": o.name, "interfaces": sorted(si_objects[o], key=lambda x: x["name"]) } for o in si_objects] # Find l3 interfaces si_objects = defaultdict(list) for si in SubInterface.objects.filter(managed_object__in=objects, vlan_ids=l1): si_objects[si.managed_object] += [{ "name": si.name, "ipv4_addresses": si.ipv4_addresses, "ipv6_addresses": si.ipv6_addresses }] l3 = [{ "managed_object_id": o.id, "managed_object_name": o.name, "interfaces": sorted(si_objects[o], key=lambda x: x["name"]) } for o in si_objects] # Update caches ic = sum(len(x["interfaces"]) for x in untagged) ic += sum(len(x["interfaces"]) for x in tagged) ic += sum(len(x["interfaces"]) for x in l3) # return { "untagged": sorted(untagged, key=lambda x: x["managed_object_name"]), "tagged": sorted(tagged, key=lambda x: x["managed_object_name"]), "l3": sorted(l3, key=lambda x: x["managed_object_name"]) }
class Span(Model): class Meta: db_table = "span" engine = MergeTree("date", ("server", "service", "ts", "in_label")) date = DateField(description=_("Date")) ts = DateTimeField(description=_("Created")) ctx = UInt64Field(description=_("Span context")) id = UInt64Field(description=_("Span id")) parent = UInt64Field(description=_("Span parent")) server = StringField(description=_("Called service")) service = StringField(description=_("Called function")) client = StringField(description=_("Caller service")) duration = UInt64Field(description=_("Duration (us)")) sample = Int32Field(description=_("Sampling rate")) error_code = UInt32Field(description=_("Error code")) error_text = StringField(description=_("Error text")) in_label = StringField(description=_("Input arguments")) out_label = StringField(description=_("Output results")) @classmethod def transform_query(cls, query, user): return query
class VLANApplication(ExtDocApplication): """ VLAN application """ title = "VLAN" menu = [_("VLAN")] model = VLAN query_fields = ["name", "description"] query_condition = "icontains" int_query_fields = ["vlan"] def field_row_class(self, o): return o.profile.style.css_class_name if o.profile and o.profile.style else "" def clean_list_data(self, data): return data @view(url=r"^(?P<vlan_id>[0-9a-f]{24})/interfaces/$", method=["GET"], access="read", api=True) def api_interfaces(self, request, vlan_id): """ Returns a dict of {untagged: ..., tagged: ...., l3: ...} :param request: :param vlan_id: :return: """ vlan = self.get_object_or_404(VLAN, id=vlan_id) # Managed objects in VC domain objects = NetworkSegment.get_vlan_domain_object_ids(vlan.segment) # Find untagged interfaces si_objects = defaultdict(list) for si in SubInterface.objects.filter(managed_object__in=objects, untagged_vlan=vlan.vlan, enabled_afi="BRIDGE"): si_objects[si.managed_object] += [{"name": si.name}] untagged = [{ "managed_object_id": o.id, "managed_object_name": o.name, "interfaces": sorted(si_objects[o], key=lambda x: x["name"]) } for o in si_objects] # Find tagged interfaces si_objects = defaultdict(list) for si in SubInterface.objects.filter(managed_object__in=objects, tagged_vlans=vlan.vlan, enabled_afi="BRIDGE"): si_objects[si.managed_object] += [{"name": si.name}] tagged = [{ "managed_object_id": o.id, "managed_object_name": o.name, "interfaces": sorted(si_objects[o], key=lambda x: x["name"]) } for o in si_objects] # Find l3 interfaces si_objects = defaultdict(list) for si in SubInterface.objects.filter(managed_object__in=objects, vlan_ids=vlan.vlan): si_objects[si.managed_object] += [{ "name": si.name, "ipv4_addresses": si.ipv4_addresses, "ipv6_addresses": si.ipv6_addresses }] l3 = [{ "managed_object_id": o.id, "managed_object_name": o.name, "interfaces": sorted(si_objects[o], key=lambda x: x["name"]) } for o in si_objects] # return { "untagged": sorted(untagged, key=lambda x: x["managed_object_name"]), "tagged": sorted(tagged, key=lambda x: x["managed_object_name"]), "l3": sorted(l3, key=lambda x: x["managed_object_name"]) }
class MonmapApplication(ExtApplication): title = _("Monmap") menu = _("Monmap") glyph = "globe" link = "/api/card/view/monmap/1/"
class ReportEscalationsApplication(SimpleReport): title = _("Escalations") form = ReportForm predefined_reports = { "1d": PredefinedReport(_("Escalations (1 day)"), {"interval": 1}), "7d": PredefinedReport(_("Escalations (7 days)"), {"interval": 7}), "30d": PredefinedReport(_("Escalations (30 day)"), {"interval": 30}), } def get_data(self, request, interval, from_date=None, to_date=None, **kwargs): interval = int(interval) if not from_date: interval = 1 if interval: ts = datetime.datetime.now() - datetime.timedelta(days=interval) q = {"timestamp": {"$gte": ts}} else: t0 = datetime.datetime.strptime(from_date, "%d.%m.%Y") if not to_date: t1 = datetime.datetime.now() else: t1 = datetime.datetime.strptime( to_date, "%d.%m.%Y") + datetime.timedelta(days=1) q = {"timestamp": {"$gte": t0, "$lte": t1}} q["escalation_tt"] = {"$exists": True} if not request.user.is_superuser: q["adm_path"] = {"$in": UserAccess.get_domains(request.user)} data = [] for ac in (ActiveAlarm, ArchivedAlarm): for d in ac._get_collection().find(q): mo = ManagedObject.get_by_id(d["managed_object"]) if not mo: continue data += [( d["timestamp"].strftime("%Y-%m-%d %H:%M:%S"), d["escalation_ts"].strftime("%Y-%m-%d %H:%M:%S"), mo.name.split("#", 1)[0], mo.address, mo.platform, mo.segment.name, d["escalation_tt"], sum(ss["summary"] for ss in d["total_objects"]), sum(ss["summary"] for ss in d["total_subscribers"]), )] data = sorted(data, key=operator.itemgetter(0)) return self.from_dataset( title=self.title, columns=[ _("Timestamp"), _("Escalation Timestamp"), _("Managed Object"), _("Address"), _("Platform"), _("Segment"), _("TT"), _("Objects"), _("Subscribers"), ], data=data, enumerate=True, )
class ReportFilterApplication(SimpleReport): title = _("Failed Discovery") try: default_pool = Pool.objects.get(name="default") except Exception: default_pool = Pool.objects.all()[0] predefined_reports = { "default": PredefinedReport(_("Failed Discovery (default)"), {"pool": default_pool}) } def get_form(self): class ReportForm(forms.Form): pool = forms.ChoiceField( label=_("Managed Objects Pool"), required=False, help_text="Pool for choice", choices=list( Pool.objects.order_by("name").scalar("id", "name")) + [(None, "-" * 9)], ) selector = forms.ModelChoiceField( label=_("Managed Objects Selector"), required=False, help_text="Selector for choice", queryset=ManagedObjectSelector.objects.order_by("name"), ) return ReportForm @staticmethod def decode_problem(problems): """ :param problems: :type problems: collections.namedtuple :return: """ decode_map = { "Cannot detect profile": "SNMP Timeout", "Remote error code 10000": "CLI Problem: Unspecified CLI error", "Remote error code 10001": "CLI Problem: Authentication failed", "Remote error code 10002": "CLI Problem: No super command defined", "Remote error code 10003": "CLI Problem: No super privileges", "Remote error code 10004": "CLI Problem: SSH Protocol error", "Remote error code 10005": "CLI Problem: Connection refused", "Remote error code 10200": "SNMP Problem", "Remote error code 10201": "SNMP Timeout", "Remote error code 599": "HTTP Error: Connection Timeout", "Remote error code 1": "Adapter failed", } decode, message = None, "" if not problems: return message for index, message in enumerate(problems): if not message: continue decode = decode_map.get(str(message)) break if decode is None: decode = message return decode def get_data(self, request, pool=None, selector=None, report_type=None, **kwargs): data = [] columns, columns_desr = [], [] r_map = [ (_("Not Available"), "2is1.3isp1.3is1"), (_("Failed to guess CLI credentials"), "2is1.6is1.3isp0.2isp1"), (_("Failed to guess SNMP community"), "2is1.6is1.3isp1.3is2.1isp1"), ] for x, y in r_map: columns += [y] columns_desr += [x] mos = ManagedObject.objects.filter() if pool: pool = Pool.get_by_id(pool) mos = mos.filter(pool=pool) data += [SectionRow(name=pool.name)] if not request.user.is_superuser: mos = mos.filter( administrative_domain__in=UserAccess.get_domains(request.user)) mos = list(mos.values_list("id", flat=True).order_by("id")) mos_s = set(mos) report = ReportModelFilter() result = report.proccessed(",".join(columns)) mo_hostname = ReportObjectsHostname1(sync_ids=mos) mo_hostname = mo_hostname.get_dictionary() d_result = ReportDiscoveryResult(sync_ids=mos) d_result = d_result.get_dictionary() for col in columns: for mo_id in result[col.strip()].intersection(mos_s): mo = ManagedObject.get_by_id(mo_id) problem = self.decode_problem(d_result.get(mo_id)) if not problem and mo_id not in d_result: problem = "Discovery disabled" data += [( mo.name, mo.address, mo.administrative_domain.name, mo.profile.name, mo_hostname.get(mo.id, ""), mo.auth_profile if mo.auth_profile else "", mo.auth_profile.user if mo.auth_profile else mo.user, mo.auth_profile.snmp_ro if mo.auth_profile else mo.snmp_ro, _("No") if not mo.get_status() else _("Yes"), columns_desr[columns.index(col)], problem, )] return self.from_dataset( title=self.title, columns=[ _("Managed Object"), _("Address"), _("Administrative Domain"), _("Profile"), _("Hostname"), _("Auth Profile"), _("Username"), _("SNMP Community"), _("Avail"), _("Error"), _("Error Detail"), ], data=data, )
class MapApplication(ExtApplication): """ inv.net application """ title = _("Network Map") menu = _("Network Map") glyph = "globe" implied_permissions = {"launch": ["inv:networksegment:lookup"]} # Object statuses ST_UNKNOWN = 0 # Object state is unknown ST_OK = 1 # Object is OK ST_ALARM = 2 # Object is reachable, Active alarms ST_UNREACH = 3 # Object is unreachable due to other's object failure ST_DOWN = 4 # Object is down ST_MAINTENANCE = 32 # Maintenance bit @view(r"^(?P<id>[0-9a-f]{24})/data/$", method=["GET"], access="read", api=True) def api_data(self, request, id): def q_mo(d): x = d.copy() if x["type"] == "managedobject": del x["mo"] x["external"] = x["id"] not in mos if is_view else x.get( "role") != "segment" elif d["type"] == "cloud": del x["link"] x["external"] = False return x # Find segment segment = self.get_object_or_404(NetworkSegment, id=id) if segment.managed_objects.count() > segment.max_objects: # Too many objects return { "id": str(segment.id), "name": segment.name, "error": _("Too many objects") } # if we set selector in segment is_view = segment.selector if is_view: mos = segment.selector.managed_objects.values_list("id", flat=True) # Load settings settings = MapSettings.objects.filter(segment=id).first() node_hints = {} link_hints = {} if settings: self.logger.info("Using stored positions") for n in settings.nodes: node_hints[n.id] = { "type": n.type, "id": n.id, "x": n.x, "y": n.y } for ll in settings.links: link_hints[ll.id] = { "connector": ll.connector if len(ll.vertices) else "normal", "vertices": [{ "x": v.x, "y": v.y } for v in ll.vertices], } else: self.logger.info("Generating positions") # Generate topology topology = SegmentTopology( segment, node_hints, link_hints, force_spring=request.GET.get("force") == "spring") topology.layout() # Build output r = { "id": str(segment.id), "max_links": int(segment.max_shown_downlinks), "name": segment.name, "caps": list(topology.caps), "nodes": [q_mo(x) for x in topology.G.nodes.values()], "links": [topology.G[u][v] for u, v in topology.G.edges()], } # Parent info if segment.parent: r["parent"] = { "id": str(segment.parent.id), "name": segment.parent.name } # Save settings if not settings: self.logger.debug("Saving first-time layout") MapSettings.load_json({ "id": str(segment.id), "nodes": [{ "type": n["type"], "id": n["id"], "x": n["x"], "y": n["y"] } for n in r["nodes"] if n.get("x") is not None and n.get("y") is not None], "links": [{ "type": n["type"], "id": n["id"], "vertices": n.get("vertices", []), "connector": n.get("connector", "normal"), } for n in r["links"]], }) return r @view(r"^(?P<id>[0-9a-f]{24})/data/$", method=["POST"], access="write", api=True) def api_save(self, request, id): self.get_object_or_404(NetworkSegment, id=id) data = self.deserialize(request.body) data["id"] = id MapSettings.load_json(data, request.user.username) return {"status": True} @view(url=r"^(?P<id>[0-9a-f]{24})/info/segment/$", method=["GET"], access="read", api=True) def api_info_segment(self, request, id): segment = self.get_object_or_404(NetworkSegment, id=id) r = { "name": segment.name, "description": segment.description, "objects": segment.managed_objects.count(), } return r @view( url=r"^(?P<id>[0-9a-f]{24})/info/managedobject/(?P<mo_id>\d+)/$", method=["GET"], access="read", api=True, ) def api_info_managedobject(self, request, id, mo_id): segment = self.get_object_or_404(NetworkSegment, id=id) object = self.get_object_or_404(ManagedObject, id=int(mo_id)) s = {1: "telnet", 2: "ssh", 3: "http", 4: "https"}[object.scheme] r = { "id": object.id, "name": object.name, "description": object.description, "address": object.address, "platform": object.platform.full_name if object.platform else "", "profile": object.profile.name, "external": object.segment.id != segment.id, "external_segment": { "id": str(object.segment.id), "name": object.segment.name }, "caps": object.get_caps(), "console_url": "%s://%s/" % (s, object.address), } return r @view( url=r"^(?P<id>[0-9a-f]{24})/info/link/(?P<link_id>[0-9a-f]{24})/$", method=["GET"], access="read", api=True, ) def api_info_link(self, request, id, link_id): def q(s): if isinstance(s, str): s = s.encode("utf-8") return s self.get_object_or_404(NetworkSegment, id=id) link = self.get_object_or_404(Link, id=link_id) r = { "id": str(link.id), "name": link.name or None, "description": link.description or None, "objects": [], "method": link.discovery_method, } o = defaultdict(list) for i in link.interfaces: o[i.managed_object] += [i] for mo in sorted(o, key=lambda x: x.name): r["objects"] += [{ "id": mo.id, "name": mo.name, "interfaces": [{ "name": i.name, "description": i.description or None, "status": i.status } for i in sorted(o[mo], key=lambda x: alnum_key(x.name))], }] # Get link bandwidth mo_in = defaultdict(float) mo_out = defaultdict(float) mos = [ManagedObject.get_by_id(mo["id"]) for mo in r["objects"]] metric_map, last_ts = get_interface_metrics(list(o)) for mo in o: if mo not in metric_map: continue for i in o[mo]: if i.name not in metric_map[mo]: continue mo_in[mo] += metric_map[mo][i.name]["Interface | Load | In"] mo_out[mo] += metric_map[mo][i.name]["Interface | Load | Out"] if len(mos) == 2: mo1, mo2 = mos r["utilisation"] = [ int(max(mo_in[mo1], mo_out[mo2])), int(max(mo_in[mo2], mo_out[mo1])), ] else: mv = list(mo_in.values()) + list(mo_out.values()) if mv: r["utilisation"] = [int(max(mv))] else: r["utilisation"] = 0 return r @view( url=r"^(?P<id>[0-9a-f]{24})/info/cloud/(?P<link_id>[0-9a-f]{24})/$", method=["GET"], access="read", api=True, ) def api_info_cloud(self, request, id, link_id): self.get_object_or_404(NetworkSegment, id=id) link = self.get_object_or_404(Link, id=link_id) r = { "id": str(link.id), "name": link.name or None, "description": link.description or None, "objects": [], "method": link.discovery_method, } o = defaultdict(list) for i in link.interfaces: o[i.managed_object] += [i] for mo in sorted(o, key=lambda x: x.name): r["objects"] += [{ "id": mo.id, "name": mo.name, "interfaces": [{ "name": i.name, "description": i.description or None, "status": i.status } for i in sorted(o[mo], key=lambda x: alnum_key(x.name))], }] return r @view( url=r"^objects_statuses/$", method=["POST"], access="read", api=True, validate={"objects": ListOfParameter(IntParameter())}, ) def api_objects_statuses(self, request, objects: List[int]): def get_alarms(objects: List[int]) -> Set[int]: """ Returns a set of objects with alarms """ alarms: Set[int] = set() coll = ActiveAlarm._get_collection() while objects: chunk, objects = objects[:500], objects[500:] a = coll.aggregate([ { "$match": { "managed_object": { "$in": chunk } } }, { "$group": { "_id": "$managed_object", "count": { "$sum": 1 } } }, ]) alarms.update(d["_id"] for d in a) return alarms def get_maintenance(objects: List[int]) -> Set[int]: """ Returns a set of objects currently in maintenance :param objects: :return: """ now = datetime.datetime.now() so = set(objects) mnt_objects = set() pipeline = [ { "$match": { "affected_objects.object": { "$in": list(so) } } }, { "$unwind": "$affected_objects" }, { "$lookup": { "from": "noc.maintenance", "as": "m", "let": { "maintenance": "_id" }, "pipeline": [{ "$match": { "m.is_completed": False, "m.start": { "$lte": now }, "m.stop": { "gte": now }, }, }], }, }, { "$project": { "_id": 0, "object": "$affected_objects.object", } }, { "$group": { "_id": "$object" } }, ] mnt_objects |= so & { x["_id"] for x in AffectedObjects._get_collection().aggregate(pipeline) } return mnt_objects # Mark all as unknown r = {o: self.ST_UNKNOWN for o in objects} sr = ObjectStatus.get_statuses(objects) sa = get_alarms(objects) mo = get_maintenance(objects) for o in sr: if sr[o]: # Check for alarms if o in sa: r[o] = self.ST_ALARM else: r[o] = self.ST_OK else: r[o] = self.ST_DOWN if o in mo: r[o] |= self.ST_MAINTENANCE return r @classmethod @cachedmethod(key="managedobject-name-to-id-%s", lock=lambda _: tags_lock) def managedobject_name_to_id(cls, name): r = ManagedObject.objects.filter(name=name).values_list("id") if r: return r[0][0] return None @classmethod @cachedmethod(key="interface-tags-to-id-%s-%s", lock=lambda _: tags_lock) def interface_tags_to_id(cls, object_name, interface_name): mo = cls.managedobject_name_to_id(object_name) i = Interface._get_collection().find_one({ "managed_object": mo, "name": interface_name }) if i: return i["_id"] return None @view( url=r"^metrics/$", method=["POST"], access="read", api=True, validate={ "metrics": DictListParameter( attrs={ "id": StringParameter(), "metric": StringParameter(), "tags": DictParameter(), }) }, ) def api_metrics(self, request, metrics): def q(s): if isinstance(s, str): s = s.encode("utf-8") return s def qt(t): return "|".join(["%s=%s" % (v, t[v]) for v in sorted(t)]) # Build query tag_id = {} # object, interface -> id if_ids = {} # id -> port id mlst = [] # (metric, object, interface) for m in metrics: if "object" in m["tags"] and "interface" in m["tags"]: if not m["tags"]["object"]: continue try: if_ids[self.interface_tags_to_id( m["tags"]["object"], m["tags"]["interface"])] = m["id"] object = ManagedObject.objects.get( name=m["tags"]["object"]) tag_id[object, m["tags"]["interface"]] = m["id"] mlst += [(m["metric"], object, m["tags"]["interface"])] except KeyError: pass # @todo: Get last values from cache if not mlst: return {} r = {} # Apply interface statuses for d in Interface._get_collection().find( {"_id": { "$in": list(if_ids) }}, { "_id": 1, "admin_status": 1, "oper_status": 1 }): r[if_ids[d["_id"]]] = { "admin_status": d.get("admin_status", True), "oper_status": d.get("oper_status", True), } metric_map, last_ts = get_interface_metrics([m[1] for m in mlst]) # Apply metrics for rq_mo, rq_iface in tag_id: pid = tag_id.get((rq_mo, rq_iface)) if not pid: continue if pid not in r: r[pid] = {} if rq_mo not in metric_map: continue if rq_iface not in metric_map[rq_mo]: continue r[pid]["Interface | Load | In"] = metric_map[rq_mo][rq_iface][ "Interface | Load | In"] r[pid]["Interface | Load | Out"] = metric_map[rq_mo][rq_iface][ "Interface | Load | Out"] return r @view(r"^(?P<id>[0-9a-f]{24})/data/$", method=["DELETE"], access="write", api=True) def api_reset(self, request, id): self.get_object_or_404(NetworkSegment, id=id) MapSettings.objects.filter(segment=id).delete() return {"status": True} @view( url=r"^stp/status/$", method=["POST"], access="read", api=True, validate={"objects": ListOfParameter(IntParameter())}, ) def api_objects_stp_status(self, request, objects): def get_stp_status(object_id): roots = set() blocked = set() object = ManagedObject.get_by_id(object_id) sr = object.scripts.get_spanning_tree() for instance in sr["instances"]: ro = DiscoveryID.find_object(instance["root_id"]) if ro: roots.add(ro) for i in instance["interfaces"]: if i["state"] == "discarding" and i["role"] == "alternate": iface = object.get_interface(i["interface"]) if iface: link = iface.link if link: blocked.add(str(link.id)) return object_id, roots, blocked r = {"roots": [], "blocked": []} futures = [] with ThreadPoolExecutor(max_workers=10) as executor: for o in objects: futures += [executor.submit(get_stp_status, o)] for future in as_completed(futures): try: obj, roots, blocked = future.result() for ro in roots: if ro.id not in r["roots"]: r["roots"] += [ro.id] r["blocked"] += blocked except Exception as e: self.logger.error("[stp] Exception: %s", e) return r
def get_data(self, request, pool=None, selector=None, report_type=None, **kwargs): data = [] columns, columns_desr = [], [] r_map = [ (_("Not Available"), "2is1.3isp1.3is1"), (_("Failed to guess CLI credentials"), "2is1.6is1.3isp0.2isp1"), (_("Failed to guess SNMP community"), "2is1.6is1.3isp1.3is2.1isp1"), ] for x, y in r_map: columns += [y] columns_desr += [x] mos = ManagedObject.objects.filter() if pool: pool = Pool.get_by_id(pool) mos = mos.filter(pool=pool) data += [SectionRow(name=pool.name)] if not request.user.is_superuser: mos = mos.filter( administrative_domain__in=UserAccess.get_domains(request.user)) mos = list(mos.values_list("id", flat=True).order_by("id")) mos_s = set(mos) report = ReportModelFilter() result = report.proccessed(",".join(columns)) mo_hostname = ReportObjectsHostname1(sync_ids=mos) mo_hostname = mo_hostname.get_dictionary() d_result = ReportDiscoveryResult(sync_ids=mos) d_result = d_result.get_dictionary() for col in columns: for mo_id in result[col.strip()].intersection(mos_s): mo = ManagedObject.get_by_id(mo_id) problem = self.decode_problem(d_result.get(mo_id)) if not problem and mo_id not in d_result: problem = "Discovery disabled" data += [( mo.name, mo.address, mo.administrative_domain.name, mo.profile.name, mo_hostname.get(mo.id, ""), mo.auth_profile if mo.auth_profile else "", mo.auth_profile.user if mo.auth_profile else mo.user, mo.auth_profile.snmp_ro if mo.auth_profile else mo.snmp_ro, _("No") if not mo.get_status() else _("Yes"), columns_desr[columns.index(col)], problem, )] return self.from_dataset( title=self.title, columns=[ _("Managed Object"), _("Address"), _("Administrative Domain"), _("Profile"), _("Hostname"), _("Auth Profile"), _("Username"), _("SNMP Community"), _("Avail"), _("Error"), _("Error Detail"), ], data=data, )
def get_data(self, request, pool=None, filter_dup_macs=False, **kwargs): data = [] # Find object with equal ID find = DiscoveryID._get_collection().aggregate( [ { "$unwind": "$macs" }, { "$group": { "_id": "$macs", "count": { "$sum": 1 }, "mo": { "$push": "$object" } } }, { "$match": { "count": { "$gt": 1 } } }, { "$group": { "_id": "$mo", "macs": { "$push": "$_id" } } }, ], allowDiskUse=True, ) for f in find: # DiscoveryID.objects.filter(chassis_mac=f["_id"]) if not f["_id"]: # Empty DiscoveryID continue data_c = [] pool_c = set() reason = "Other" for mo in ManagedObject.objects.filter(id__in=f["_id"]): pool_c.add(mo.pool.name) data_c.append((mo.name, mo.address, mo.profile.name, mo.pool.name, mo.is_managed)) if len(data_c) > 0: if data_c[0][1] == data_c[1][1]: reason = _("Duplicate MO") elif not data_c[0][4] == data_c[1][4]: reason = _("MO is move") if pool and pool not in pool_c: continue if reason == "Other" and MACBlacklist.is_banned_mac( f["macs"][0], is_duplicated=True): if filter_dup_macs: continue data += [ SectionRow(name="%s %s (%s)" % (MAC(f["macs"][0]), reason, "On duplicated")) ] else: data += [ SectionRow(name="%s %s" % (MAC(f["macs"][0]), reason)) ] data += data_c return self.from_dataset( title=self.title, columns=[ _("Managed Object"), _("Address"), _("Profile"), _("Pool"), _("is managed") ], data=data, )
def humanize_timedelta(delta): def round(x): return int(x + 0.5) d = delta.days s = delta.seconds if not d: if s < 30: return _("less than a minute") elif s < 90: # 1:30 return _("1 minute") elif s < 2670: # 44:30 return _("%d minutes") % round(float(s) / 60.0) elif s < 5370: # 1:29:30 return _("about 1 hour") elif s < 86370: # 24:59:30 return _("about %d hours") % round(float(s) / 3600.0) else: return _("1 day") else: if d == 1 and s < 84600: # 1D23:59:30 return _("1 day") elif d < 30 and s < 84600: # 29D23:59:30 return _("%d days") % round((float(d) * 86400.0 + s) / 86400) elif d < 60 and s < 84600: # 59D23:59:30 return _("about 1 month") elif d < 365: return _("%d months") % round(float(d) / 30) elif d < 446: # 1Y 3M return _("about 1 year") elif d < 626: # 1Y 9M return _("over 1 year") elif d < 730: # 2Y return _("almost 2 years") else: n = d // 365 dd = d - n * 356 if dd < 446: return _("about %d years") % n elif dd < 626: return _("over %d years") % n else: return _("almost %d years") % (n + 1)
class LanguageApplication(ExtModelApplication): title = _("Languages") model = Language menu = [_("Setup"), _("Languages")] query_fields = ["name__icontains", "native_name__icontains"]
def humanize_distance(d): try: dist = humanize_timedelta(datetime.datetime.now() - d) except TypeError: dist = _("Never") return dist
def get_menu(self): return [_("Reports"), smart_text(self.title)]
class ReportInterfaceStatusApplication(ExtApplication): menu = _("Reports") + "|" + _("Interface Status") title = _("Interface Status") @view( "^download/$", method=["GET"], access="launch", api=True, validate={ "administrative_domain": StringParameter(required=False), "interface_profile": StringParameter(required=False), "selector": StringParameter(required=False), "zero": StringParameter(required=False), "def_profile": StringParameter(required=False), "columns": StringParameter(required=False), "o_format": StringParameter(choices=["csv", "xlsx"]), }, ) def api_report( self, request, o_format, administrative_domain=None, selector=None, interface_profile=None, zero=None, def_profile=None, columns=None, enable_autowidth=False, ): def humanize_speed(speed): if not speed: return "-" for t, n in [(1000000, "G"), (1000, "M"), (1, "k")]: if speed >= t: if speed // t * t == speed: return "%d%s" % (speed // t, n) else: return "%.2f%s" % (float(speed) / t, n) return str(speed) def row(row): def qe(v): if v is None: return "" if isinstance(v, text_type): return v.encode("utf-8") elif isinstance(v, datetime.datetime): return v.strftime("%Y-%m-%d %H:%M:%S") elif not isinstance(v, str): return str(v) else: return v return [qe(x) for x in row] def translate_row(row, cmap): return [row[i] for i in cmap] cols = [ "object_name", "object_address", "object_model", "object_software", "object_port_name", "object_port_profile_name", "object_port_status", "object_link_status", "object_port_speed", "object_port_duplex", "object_port_untagged_vlan", "object_port_tagged_vlans", ] header_row = [ "MANAGED_OBJECT", "OBJECT_ADDRESS", "OBJECT_MODEL", "OBJECT_SOFTWARE", "PORT_NAME", "PORT_PROFILE_NAME", "PORT_STATUS", "LINK_STATUS", "PORT_SPEED", "PORT_DUPLEX", "PORT_UNTAGGED_VLAN", "PORT_TAGGED_VLANS", ] if columns: cmap = [] for c in columns.split(","): try: cmap += [cols.index(c)] except ValueError: continue else: cmap = list(range(len(cols))) r = [translate_row(header_row, cmap)] mo = {} if_p = {} DUPLEX = {True: "Full", False: "Half"} for ifp in InterfaceProfile.objects.filter(): if_p[ifp.id] = {"name": ifp.name} mos = ManagedObject.objects.filter(is_managed=True) if (request.user.is_superuser and not administrative_domain and not selector and not interface_profile): mos = ManagedObject.objects.filter(is_managed=True) if not request.user.is_superuser: mos = mos.filter( administrative_domain__in=UserAccess.get_domains(request.user)) if administrative_domain: ads = AdministrativeDomain.get_nested_ids( int(administrative_domain)) mos = mos.filter(administrative_domain__in=ads) if selector: selector = ManagedObjectSelector.get_by_id(int(selector)) mos = mos.filter(selector.Q) for o in mos: mo[o.id] = { "type": "managedobject", "id": str(o.id), "name": o.name, "status": o.is_managed, "address": o.address, "vendor": o.vendor, "version": o.version, "platform": o.platform, } mos_id = list(mos.values_list("id", flat=True)) rld = ReportInterfaceStatus(mos_id, zero, def_profile, interface_profile) for i in rld.out: untag, tagged = "", "" if i["subs"]: untag = i["subs"][0].get("untagged_vlan", "") tagged = list_to_ranges(i["subs"][0].get("tagged_vlans", [])) r += [ translate_row( row([ mo[i["managed_object"]]["name"], mo[i["managed_object"]]["address"], "%s %s" % ( str(mo[i["managed_object"]]["vendor"]), str(mo[i["managed_object"]]["platform"]), ), str(mo[i["managed_object"]]["version"]), i["name"], if_p[i["profile"]]["name"], "UP" if i["admin_status"] is True else "Down", "UP" if "oper_status" in i and i["oper_status"] is True else "Down", humanize_speed(i["in_speed"]) if "in_speed" in i else "-", DUPLEX.get(i["full_duplex"]) if "full_duplex" in i and "in_speed" in i else "-", untag, tagged, ]), cmap, ) ] filename = "interface_status_report_%s" % datetime.datetime.now( ).strftime("%Y%m%d") if o_format == "csv": response = HttpResponse(content_type="text/csv") response[ "Content-Disposition"] = 'attachment; filename="%s.csv"' % filename writer = csv.writer(response, dialect="excel", delimiter=";") writer.writerows(r) return response elif o_format == "xlsx": response = StringIO() wb = xlsxwriter.Workbook(response) cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1}) ws = wb.add_worksheet("Objects") max_column_data_length = {} for rn, x in enumerate(r): for cn, c in enumerate(x): if rn and (r[0][cn] not in max_column_data_length or len(str(c)) > max_column_data_length[r[0][cn]]): max_column_data_length[r[0][cn]] = len(str(c)) ws.write(rn, cn, c, cf1) ws.autofilter(0, 0, rn, cn) ws.freeze_panes(1, 0) for cn, c in enumerate(r[0]): # Set column width width = get_column_width(c) if enable_autowidth and width < max_column_data_length[c]: width = max_column_data_length[c] ws.set_column(cn, cn, width=width) wb.close() response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/vnd.ms-excel") # response = HttpResponse( # content_type="application/x-ms-excel") response[ "Content-Disposition"] = 'attachment; filename="%s.xlsx"' % filename response.close() return response
class RefBookAppplication(Application): title = _("Reference Books") @view(url=r"^$", url_name="index", menu=[_("Setup"), _("Reference Books")], access="view") def view_index(self, request): """ Render list of refbooks :param request: :return: """ ref_books = RefBook.objects.filter(is_enabled=True).order_by("name") return self.render(request, "index.html", ref_books=ref_books) @view(url=r"^(?P<refbook_id>\d+)/$", url_name="view", access="view") def view_view(self, request, refbook_id): """ Refbook preview :param request: :param refbook_id: :return: """ rb = get_object_or_404(RefBook, id=int(refbook_id)) can_edit = not rb.is_builtin and Permission.has_perm( request.user, "main.change_refbookdata" ) queryset = rb.refbookdata_set.all() # Search if request.GET and "query" in request.GET and request.GET["query"]: query = request.GET["query"] # Build query clause w = [] p = [] for f in rb.refbookfield_set.filter(search_method__isnull=False): x = f.get_extra(query) if not x: continue w += x["where"] p += x["params"] w = " OR ".join(["(%s)" % xx for xx in w]) queryset = queryset.extra(where=["(%s)" % w], params=p) else: query = "" # Use generic view for final result request._gv_queryset = queryset request._gv_ctx = {"rb": rb, "can_edit": can_edit, "query": query, "app": self} return RefBookList().get(request) @view(url=r"^(?P<refbook_id>\d+)/(?P<record_id>\d+)/$", url_name="item", access="view") def view_item(self, request, refbook_id, record_id): """ Item preview :param request: :param refbook_id: :param record_id: :return: """ rb = get_object_or_404(RefBook, id=int(refbook_id)) rbr = get_object_or_404(RefBookData, id=int(record_id), ref_book=rb) can_edit = not rb.is_builtin and Permission.has_perm( request.user, "main.change_refbookdata" ) return self.render(request, "item.html", {"rb": rb, "record": rbr, "can_edit": can_edit}) @view(url=r"^(?P<refbook_id>\d+)/(?P<record_id>\d+)/edit/$", url_name="edit", access="change") def view_edit(self, request, refbook_id, record_id=0): """ Edit item :param request: :param refbook_id: :param record_id: :return: """ rb = get_object_or_404(RefBook, id=int(refbook_id)) rbr = get_object_or_404(RefBookData, id=int(record_id), ref_book=rb) can_edit = not rb.is_builtin and Permission.has_perm( request.user, "main.change_refbookdata" ) if not can_edit: return self.response_forbidden("Read-only refbook") if request.POST: # Edit refbook if not can_edit: return self.response_forbidden("Read-only refbook") # Retrieve record data fns = [int(k[6:]) for k in request.POST if k.startswith("field_")] data = ["" for i in range(max(fns) + 1)] for i in fns: data[i] = request.POST["field_%d" % i] rbr.value = data rbr.save() self.message_user(request, "Record updated successfully") return self.response_redirect("main:refbook:item", rb.id, rbr.id) return self.render(request, "edit.html", {"rb": rb, "record": rbr}) @view( url=r"^(?P<refbook_id>\d+)/(?P<record_id>\d+)/delete/$", url_name="delete", access="delete" ) def view_delete(self, request, refbook_id, record_id): """ Delete refbook record :param request: :param refbook_id: :param record_id: :return: """ rb = get_object_or_404(RefBook, id=int(refbook_id)) can_edit = not rb.is_builtin and Permission.has_perm( request.user, "main.change_refbookdata" ) if not can_edit: return self.response_forbidden() rbd = get_object_or_404(RefBookData, ref_book=rb, id=int(record_id)) rbd.delete() self.message_user(request, "Record deleted") return self.response_redirect("main:refbook:view", rb.id) @view(url=r"^(?P<refbook_id>\d+)/new/$", url_name="new", access="add") def view_new(self, request, refbook_id): """ Create refbook record :param request: :param refbook_id: :return: """ rb = get_object_or_404(RefBook, id=int(refbook_id)) can_edit = not rb.is_builtin and Permission.has_perm( request.user, "main.change_refbookdata" ) if not can_edit: return self.response_forbidden("Read-only refbook") if request.POST: # Edit refbook if not can_edit: return self.response_forbidden("Read-only refbook") # Retrieve record data fns = [int(k[6:]) for k in request.POST if k.startswith("field_")] data = ["" for i in range(max(fns) + 1)] for i in fns: data[i] = request.POST["field_%d" % i] rbr = RefBookData(ref_book=rb, value=data) rbr.save() self.message_user(request, "Record added") return self.response_redirect("main:refbook:item", rb.id, rbr.id) return self.render(request, "new.html", {"rb": rb})
# --------------------------------------------------------------------- # Copyright (C) 2007-2016 The NOC Project # See LICENSE for details # --------------------------------------------------------------------- # Third-party modules from django import forms # NOC modules from noc.lib.app.simplereport import SimpleReport, TableColumn, PredefinedReport from noc.sa.models.useraccess import UserAccess from noc.sa.models.profile import Profile from noc.inv.models.platform import Platform from noc.inv.models.firmware import Firmware from noc.core.translation import ugettext as _ report_types = [("profile", _("By Profile")), ("domain", _("By Administrative Domain")), ("domain-profile", _("By Administrative Domain and Profile")), ("tag", _("By Tags")), ("platform", _("By Platform")), ("version", _("By Version"))] class ReportForm(forms.Form): report_type = forms.ChoiceField(label=_("Report Type"), choices=report_types) class ReportObjectsSummary(SimpleReport): title = _("Managed Objects Summary") form = ReportForm predefined_reports = {