def get_validator(self, field): """ Returns Parameter instance or None to clean up field :param field: :type field: Field :return: """ from noc.core.model.fields import AutoCompleteTagsField if isinstance(field, BooleanField): return BooleanParameter() elif isinstance(field, IntegerField): return IntParameter() elif isinstance(field, FloatField): return FloatParameter() elif isinstance(field, AutoCompleteTagsField): return TagsParameter(required=not field.null) elif isinstance(field, TextArrayField): return StringListParameter(required=not field.null) elif isinstance(field, related.ForeignKey): self.fk_fields[field.name] = field.remote_field.model return ModelParameter(field.remote_field.model, required=not field.null) else: return None
def init_plugin(self): super(ConduitsPlugin, self).init_plugin() self.add_view( "api_plugin_%s_get_neighbors" % self.name, self.api_get_neighbors, url="^(?P<id>[0-9a-f]{24})/plugin/%s/get_neighbors/$" % self.name, method=["GET"] ) self.add_view( "api_plugin_%s_create_ducts" % self.name, self.api_create_ducts, url="^(?P<id>[0-9a-f]{24})/plugin/%s/$" % self.name, method=["POST"], validate={ "ducts": DictListParameter(attrs={ "target": DocumentParameter(Object), "project_distance": FloatParameter(), "conduits": DictListParameter(attrs={ "id": DocumentParameter(Object, required=False), "n": IntParameter(), "x": IntParameter(), "y": IntParameter(), "status": BooleanParameter() }) }) } ) # self.conduits_model = ObjectModel.objects.filter(name=self.CONDUITS_MODEL).first()
def __init__(self, model): self.model = model self.app = None self.pk_field_name = "id" # Prepare field converters self.clean_fields = self.clean_fields.copy() # name -> Parameter for name, f in six.iteritems(self.model._fields): if isinstance(f, BooleanField): self.clean_fields[name] = BooleanParameter() elif isinstance(f, IntField): self.clean_fields[name] = IntParameter() elif isinstance(f, PlainReferenceField): self.clean_fields[name] = DocumentParameter(f.document_type) # if not self.query_fields: self.query_fields = [ "%s__%s" % (n, self.query_condition) for n, f in six.iteritems(self.model._fields) if f.unique and isinstance(f, StringField) ] # Find field_* and populate custom fields self.custom_fields = {} for fn in [n for n in dir(self) if n.startswith("field_")]: h = getattr(self, fn) if callable(h): self.custom_fields[fn[6:]] = h
def init_plugin(self): super().init_plugin() self.add_view( "api_plugin_%s_get_layer" % self.name, self.api_get_layer, url=r"^plugin/%s/layers/(?P<layer>\S+)/$" % self.name, method=["GET"], ) self.add_view( "api_plugin_%s_object_data" % self.name, self.api_object_data, url="^(?P<id>[0-9a-f]{24})/plugin/%s/object_data/$" % self.name, method=["GET"], ) self.add_view( "api_plugin_%s_set_geopoint" % self.name, self.api_set_geopoint, url="^(?P<id>[0-9a-f]{24})/plugin/%s/set_geopoint/$" % self.name, method=["POST"], validate={ "srid": StringParameter(), "x": FloatParameter(), "y": FloatParameter() }, ) self.add_view( "api_plugin_%s_set_layer_visibility" % self.name, self.api_set_layer_visibility, url="^plugin/%s/layer_visibility/$" % self.name, method=["POST"], validate={ "layer": StringParameter(), "status": BooleanParameter() }, ) self.add_view( "api_plugin_%s_create" % self.name, self.api_create, url="^plugin/%s/$" % self.name, method=["POST"], validate={ "model": DocumentParameter(ObjectModel), "name": UnicodeParameter(), "srid": StringParameter(), "x": FloatParameter(), "y": FloatParameter(), }, )
def cleaned_query(self, q): q = q.copy() for p in self.ignored_params: if p in q: del q[p] for p in ( self.limit_param, self.page_param, self.start_param, self.format_param, self.sort_param, self.query_param, self.only_param, ): if p in q: del q[p] # Extract IN # extjs not working with same parameter name in query for p in list(q.keys()): if p.endswith("__in") and self.rx_oper_splitter.match(p): field = self.rx_oper_splitter.match(p).group("field") + "__in" if field not in q: q[field] = [q[p]] else: q[field] += [q[p]] del q[p] # Normalize parameters for p in q: if p.endswith("__exists"): v = BooleanParameter().clean(q[p]) q[p] = v elif p in self.clean_fields: q[p] = self.clean_fields[p].clean(q[p]) # @todo: correct __ lookups if any(p for p in q if p.endswith("__referred")): del q[p] # builtin is_builtin = q.pop("is_builtin", None) if self.json_collection and is_builtin in ("true", "false"): builtins = [ uuid.UUID(x) for x in Collection.get_builtins(self.json_collection) ] if is_builtin == "true": q["uuid__in"] = builtins else: q["uuid__nin"] = builtins return q
def cleaned_query(self, q): q = q.copy() for p in self.ignored_params: if p in q: del q[p] for p in ( self.limit_param, self.page_param, self.start_param, self.format_param, self.sort_param, self.query_param, self.only_param ): if p in q: del q[p] # Normalize parameters for p in q: if p.endswith("__exists"): v = BooleanParameter().clean(q[p]) q[p] = v elif p in self.clean_fields: q[p] = self.clean_fields[p].clean(q[p]) # @todo: correct __ lookups if any(p for p in q if p.endswith("__referred")): del q[p] return q
def cleaned_query(self, q): q = q.copy() for p in self.ignored_params: if p in q: del q[p] for p in ( self.limit_param, self.page_param, self.start_param, self.format_param, self.sort_param, self.query_param, self.only_param, ): if p in q: del q[p] # Extract IN # extjs not working with same parameter name in query for p in q: if p.endswith("__in") and self.rx_oper_splitter.match(p): field = self.rx_oper_splitter.match(p).group("field") + "__in" if field not in q: q[field] = [q[p]] else: q[field] += [q[p]] del q[p] # Normalize parameters for p in q: if p.endswith("__exists"): v = BooleanParameter().clean(q[p]) q[p] = v elif p in self.clean_fields: q[p] = self.clean_fields[p].clean(q[p]) # @todo: correct __ lookups if any(p for p in q if p.endswith("__referred")): del q[p] return q
RequestFrom = ObjectPointer | InterfacePointer | ServicePointer # to: section RequestTo = ObjectPointer | LevelPointer | InterfacePointer | ServicePointer # config: section RequestConfig = DictParameter( attrs={ "max_depth": IntParameter(default=MAX_DEPTH_DEFAULT), "n_shortest": IntParameter(default=N_SHORTEST_DEFAULT), }, required=False, ) # constraints: section RequestVLANConstraint = DictParameter( attrs={ "vlan": VLANIDParameter(required=False), "interface_untagged": BooleanParameter(required=False), "strict": BooleanParameter(default=False), }, required=False, ) RequestConstraints = DictParameter( attrs={ "vlan": RequestVLANConstraint, "upwards": BooleanParameter(default=False) }, required=False, ) Request = DictParameter( attrs={ "from": RequestFrom, "to": RequestTo,
class ManagedObjectApplication(ExtModelApplication): """ ManagedObject application """ title = _("Managed Objects") menu = _("Managed Objects") model = ManagedObject query_condition = "icontains" query_fields = ["name", "description"] secret_fields = {"password", "super_password", "snmp_ro", "snmp_rw"} # Inlines attrs = ModelInline(ManagedObjectAttribute) cfg = RepoInline("config", access="config") extra_permissions = ["alarm", "change_interface"] implied_permissions = { "read": ["inv:networksegment:lookup", "main:handler:lookup"] } diverged_permissions = {"config": "read", "console": "script"} order_map = { "address": " cast_test_to_inet(address) ", "-address": " cast_test_to_inet(address) ", "profile": "CASE %s END" % " ".join([ "WHEN %s='%s' THEN %s" % ("profile", pk, i) for i, pk in enumerate( Profile.objects.filter().order_by("name").values_list("id")) ]), "-profile": "CASE %s END" % " ".join([ "WHEN %s='%s' THEN %s" % ("profile", pk, i) for i, pk in enumerate( Profile.objects.filter().order_by("-name").values_list("id")) ]), "platform": "CASE %s END" % " ".join([ "WHEN %s='%s' THEN %s" % ("platform", pk, i) for i, pk in enumerate(Platform.objects.filter().order_by( "name").values_list("id")) ]), "-platform": "CASE %s END" % " ".join([ "WHEN %s='%s' THEN %s" % ("platform", pk, i) for i, pk in enumerate(Platform.objects.filter().order_by( "-name").values_list("id")) ]), "version": "CASE %s END" % " ".join([ "WHEN %s='%s' THEN %s" % ("version", pk, i) for i, pk in enumerate(Firmware.objects.filter().order_by( "version").values_list("id")) ]), "-version": "CASE %s END" % " ".join([ "WHEN %s='%s' THEN %s" % ("version", pk, i) for i, pk in enumerate(Firmware.objects.filter().order_by( "-version").values_list("id")) ]), } resource_group_fields = [ "static_service_groups", "effective_service_groups", "static_client_groups", "effective_client_groups", ] DISCOVERY_JOBS = [ ("box", "noc.services.discovery.jobs.box.job.BoxDiscoveryJob"), ("periodic", "noc.services.discovery.jobs.periodic.job.PeriodicDiscoveryJob"), ] def field_row_class(self, o): return o.object_profile.style.css_class_name if o.object_profile.style else "" def bulk_field_interface_count(self, data): """ Apply interface_count fields :param data: :return: """ mo_ids = [x["id"] for x in data] if not mo_ids: return data # Collect interface counts r = Interface._get_collection().aggregate([ { "$match": { "managed_object": { "$in": mo_ids }, "type": "physical" } }, { "$group": { "_id": "$managed_object", "total": { "$sum": 1 } } }, ]) ifcount = dict((x["_id"], x["total"]) for x in r) # Apply interface counts for x in data: x["interface_count"] = ifcount.get(x["id"]) or 0 return data def bulk_field_link_count(self, data): """ Apply link_count fields :param data: :return: """ mo_ids = [x["id"] for x in data] if not mo_ids: return data # Collect interface counts r = Link._get_collection().aggregate([ { "$match": { "linked_objects": { "$in": mo_ids } } }, { "$unwind": "$linked_objects" }, { "$group": { "_id": "$linked_objects", "total": { "$sum": 1 } } }, ]) links_count = dict((x["_id"], x["total"]) for x in r) # Apply interface counts for x in data: x["link_count"] = links_count.get(x["id"]) or 0 return data def instance_to_dict(self, o, fields=None): def sg_to_list(items): return [{ "group": x, "group__label": unicode(ResourceGroup.get_by_id(x)) } for x in items] data = super(ManagedObjectApplication, self).instance_to_dict(o, fields) # Expand resource groups fields for fn in self.resource_group_fields: data[fn] = sg_to_list(data.get(fn) or []) return data def clean(self, data): # Clean resource groups for fn in self.resource_group_fields: if fn.startswith("effective_") and fn in data: del data[fn] continue data[fn] = [x["group"] for x in (data.get(fn) or [])] # Clean other return super(ManagedObjectApplication, self).clean(data) def cleaned_query(self, q): if "administrative_domain" in q: ad = AdministrativeDomain.get_nested_ids( int(q["administrative_domain"])) if ad: del q["administrative_domain"] else: ad = None if "selector" in q: s = self.get_object_or_404(ManagedObjectSelector, id=int(q["selector"])) del q["selector"] else: s = None r = super(ManagedObjectApplication, self).cleaned_query(q) if s: r["id__in"] = ManagedObject.objects.filter(s.Q) if ad: r["administrative_domain__in"] = ad return r def get_Q(self, request, query): q = super(ManagedObjectApplication, self).get_Q(request, query) sq = ManagedObject.get_search_Q(query) if sq: q |= sq return q def queryset(self, request, query=None): qs = super(ManagedObjectApplication, self).queryset(request, query) if not request.user.is_superuser: qs = qs.filter(UserAccess.Q(request.user)) qs = qs.exclude(name__startswith="wiping-") return qs @view(url=r"^(?P<id>\d+)/links/$", method=["GET"], access="read", api=True) def api_links(self, request, id): o = self.get_object_or_404(ManagedObject, id=id) if not o.has_access(request.user): return self.response_forbidden("Access denied") # Get links result = [] for link in Link.object_links(o): ifaces = [] r = [] for i in link.interfaces: if i.managed_object.id == o.id: ifaces += [i] else: r += [i] for li, ri in zip(ifaces, r): result += [{ "link_id": str(link.id), "local_interface": str(li.id), "local_interface__label": li.name, "remote_object": ri.managed_object.id, "remote_object__label": ri.managed_object.name, "remote_platform": ri.managed_object.platform.name if ri.managed_object.platform else "", "remote_interface": str(ri.id), "remote_interface__label": ri.name, "discovery_method": link.discovery_method, "local_description": li.description, "remote_description": ri.description, "first_discovered": link.first_discovered.isoformat() if link.first_discovered else None, "last_seen": link.last_seen.isoformat() if link.last_seen else None, }] return result @view(url=r"^(?P<id>\d+)/discovery/$", method=["GET"], access="read", api=True) def api_discovery(self, request, id): from noc.core.scheduler.job import Job o = self.get_object_or_404(ManagedObject, id=id) if not o.has_access(request.user): return self.response_forbidden("Access denied") link_count = defaultdict(int) for link in Link.object_links(o): m = link.discovery_method or "" if "+" in m: m = m.split("+")[0] link_count[m] += 1 r = [{ "name": "ping", "enable_profile": o.object_profile.enable_ping, "status": o.get_status(), "last_run": None, "last_status": None, "next_run": None, "jcls": None, }] for name, jcls in self.DISCOVERY_JOBS: job = Job.get_job_data( "discovery", jcls=jcls, key=o.id, pool=o.pool.name) or {} d = { "name": name, "enable_profile": getattr(o.object_profile, "enable_%s_discovery" % name), "status": job.get(Job.ATTR_STATUS), "last_run": self.to_json(job.get(Job.ATTR_LAST)), "last_status": job.get(Job.ATTR_LAST_STATUS), "next_run": self.to_json(job.get(Job.ATTR_TS)), "jcls": jcls, } r += [d] return r @view( url=r"^actions/set_managed/$", method=["POST"], access="create", api=True, validate={ "ids": ListOfParameter(element=ModelParameter(ManagedObject), convert=True) }, ) def api_action_set_managed(self, request, ids): for o in ids: if not o.has_access(request.user): continue o.is_managed = True o.save() return "Selected objects set to managed state" @view( url=r"^actions/set_unmanaged/$", method=["POST"], access="create", api=True, validate={ "ids": ListOfParameter(element=ModelParameter(ManagedObject), convert=True) }, ) def api_action_set_unmanaged(self, request, ids): for o in ids: if not o.has_access(request.user): continue o.is_managed = False o.save() return "Selected objects set to unmanaged state" @view(url=r"^(?P<id>\d+)/discovery/run/$", method=["POST"], access="change_discovery", api=True) def api_run_discovery(self, request, id): o = self.get_object_or_404(ManagedObject, id=id) if not o.has_access(request.user): return self.response_forbidden("Access denied") r = ujson.loads(request.body).get("names", []) for name, jcls in self.DISCOVERY_JOBS: if name not in r: continue if not getattr(o.object_profile, "enable_%s_discovery" % name): continue # Disabled by profile Job.submit("discovery", jcls, key=o.id, pool=o.pool.name) return {"success": True} @view(url=r"^(?P<id>\d+)/discovery/stop/$", method=["POST"], access="change_discovery", api=True) def api_stop_discovery(self, request, id): o = self.get_object_or_404(ManagedObject, id=id) if not o.has_access(request.user): return self.response_forbidden("Access denied") r = ujson.loads(request.body).get("names", []) for name, jcls in self.DISCOVERY_JOBS: if name not in r: continue if not getattr(o.object_profile, "enable_%s_discovery" % name): continue # Disabled by profile Job.remove("discovery", jcls, key=o.id, pool=o.pool.name) return {"success": True} @view(url=r"^(?P<id>\d+)/interface/$", method=["GET"], access="read", api=True) def api_interface(self, request, id): """ GET interfaces :param managed_object: :return: """ def sorted_iname(s): return sorted(s, key=lambda x: split_alnum(x["name"])) def get_style(i): profile = i.profile if profile: try: return style_cache[profile.id] except KeyError: pass if profile.style: s = profile.style.css_class_name else: s = "" style_cache[profile.id] = s return s else: return "" def get_link(i): link = i.link if not link: return None if link.is_ptp: # ptp o = link.other_ptp(i) label = "%s:%s" % (o.managed_object.name, o.name) elif link.is_lag: # unresolved LAG o = [ ii for ii in link.other(i) if ii.managed_object.id != i.managed_object.id ] label = "LAG %s: %s" % (o[0].managed_object.name, ", ".join( ii.name for ii in o)) else: # Broadcast label = ", ".join("%s:%s" % (ii.managed_object.name, ii.name) for ii in link.other(i)) return {"id": str(link.id), "label": label} # Get object o = self.get_object_or_404(ManagedObject, id=int(id)) if not o.has_access(request.user): return self.response_forbidden("Permission denied") # Physical interfaces # @todo: proper ordering default_state = ResourceState.get_default() style_cache = {} # profile_id -> css_style l1 = [{ "id": str(i.id), "name": i.name, "description": i.description, "status": i.status, "mac": i.mac, "ifindex": i.ifindex, "lag": (i.aggregated_interface.name if i.aggregated_interface else ""), "link": get_link(i), "profile": str(i.profile.id) if i.profile else None, "profile__label": unicode(i.profile) if i.profile else None, "enabled_protocols": i.enabled_protocols, "project": i.project.id if i.project else None, "project__label": unicode(i.project) if i.project else None, "state": i.state.id if i.state else default_state.id, "state__label": unicode(i.state if i.state else default_state), "vc_domain": i.vc_domain.id if i.vc_domain else None, "vc_domain__label": unicode(i.vc_domain) if i.vc_domain else None, "row_class": get_style(i), } for i in Interface.objects.filter(managed_object=o.id, type="physical")] # LAG lag = [{ "id": str(i.id), "name": i.name, "description": i.description, "profile": str(i.profile.id) if i.profile else None, "profile__label": unicode(i.profile) if i.profile else None, "members": [ j.name for j in Interface.objects.filter(managed_object=o.id, aggregated_interface=i.id) ], "row_class": get_style(i), } for i in Interface.objects.filter(managed_object=o.id, type="aggregated")] # L2 interfaces l2 = [{ "name": i.name, "description": i.description, "untagged_vlan": i.untagged_vlan, "tagged_vlans": i.tagged_vlans, } for i in SubInterface.objects.filter(managed_object=o.id, enabled_afi="BRIDGE")] # L3 interfaces q = MQ(enabled_afi="IPv4") | MQ(enabled_afi="IPv6") l3 = [{ "name": i.name, "description": i.description, "ipv4_addresses": i.ipv4_addresses, "ipv6_addresses": i.ipv6_addresses, "enabled_protocols": i.enabled_protocols, "vlan": i.vlan_ids, "vrf": i.forwarding_instance.name if i.forwarding_instance else "", "mac": i.mac, } for i in SubInterface.objects.filter(managed_object=o.id).filter(q)] return { "l1": sorted_iname(l1), "lag": sorted_iname(lag), "l2": sorted_iname(l2), "l3": sorted_iname(l3), } @view(url=r"^(?P<id>\d+)/interface/$", method=["POST"], access="change_interface", api=True) def api_set_interface(self, request, id): def get_or_none(c, v): if not v: return None return c.objects.get(id=v) o = self.get_object_or_404(ManagedObject, id=int(id)) if not o.has_access(request.user): return self.response_forbidden("Access denied") d = ujson.loads(request.body) if "id" in d: i = self.get_object_or_404(Interface, id=d["id"]) if i.managed_object.id != o.id: return self.response_not_found() # Set profile if "profile" in d: p = get_or_none(InterfaceProfile, d["profile"]) i.profile = p if p: i.profile_locked = True # Project if "project" in d: i.project = get_or_none(Project, d["project"]) # State if "state" in d: i.state = get_or_none(ResourceState, d["state"]) # VC Domain if "vc_domain" in d: i.vc_domain = get_or_none(VCDomain, d["vc_domain"]) # i.save() return {"success": True} @view(method=["DELETE"], url=r"^(?P<id>\d+)/?$", access="delete", api=True) def api_delete(self, request, id): """ Override default method :param request: :param id: :return: """ try: o = self.queryset(request).get(id=int(id)) except self.model.DoesNotExist: return self.render_json({ "status": False, "message": "Not found" }, status=self.NOT_FOUND) if not o.has_access(request.user): return self.response_forbidden("Access denied") # Run sa.wipe_managed_object job instead o.name = "wiping-%d" % o.id o.is_managed = False o.description = "Wiping! Do not touch!" o.save() call_later("noc.sa.wipe.managedobject.wipe", o=o.id) return HttpResponse(status=self.DELETED) @view( url=r"^actions/run_discovery/$", method=["POST"], access="launch", api=True, validate={ "ids": ListOfParameter(element=ModelParameter(ManagedObject), convert=True) }, ) def api_action_run_discovery(self, request, ids): d = 0 for o in ids: if not o.has_access(request.user): continue o.run_discovery(delta=d) d += 1 return "Discovery processes has been scheduled" def get_nested_inventory(self, o): rev = o.get_data("asset", "revision") if rev == "None": rev = "" r = { "id": str(o.id), "serial": o.get_data("asset", "serial"), "revision": rev or "", "description": o.model.description, "model": o.model.name, } children = [] for n in o.model.connections: if n.direction == "i": c, r_object, _ = o.get_p2p_connection(n.name) if c is None: children += [{ "id": None, "name": n.name, "leaf": True, "serial": None, "description": "--- EMPTY ---", "model": None, }] else: cc = self.get_nested_inventory(r_object) cc["name"] = n.name children += [cc] elif n.direction == "s": children += [{ "id": None, "name": n.name, "leaf": True, "serial": None, "description": n.description, "model": ", ".join(n.protocols), }] if children: to_expand = "Transceiver" not in o.model.name r["children"] = children r["expanded"] = to_expand else: r["leaf"] = True return r @view(url=r"^(?P<id>\d+)/inventory/$", method=["GET"], access="read", api=True) def api_inventory(self, request, id): o = self.get_object_or_404(ManagedObject, id=id) if not o.has_access(request.user): return self.response_forbidden("Access denied") r = [] for p in o.get_inventory(): c = self.get_nested_inventory(p) c["name"] = p.name or o.name r += [c] return {"expanded": True, "children": r} @view(url=r"^(?P<id>\d+)/confdb/$", method=["GET"], access="config", api=True) def api_confdb(self, request, id): o = self.get_object_or_404(ManagedObject, id=id) if not o.has_access(request.user): return self.response_forbidden("Access denied") cleanup = True if "cleanup" in request.GET: c = request.GET["cleanup"].strip().lower() cleanup = c not in ("no", "false", "0") cdb = o.get_confdb(cleanup=cleanup) return self.render_plain_text(cdb.dump("json"), content_type="text/json") @view( url=r"^(?P<id>\d+)/confdb/$", method=["POST"], validate={ "query": StringParameter(), "cleanup": BooleanParameter(default=True), "dump": BooleanParameter(default=False), }, access="config", api=True, ) def api_confdb_query(self, request, id, query="", cleanup=True, dump=False): o = self.get_object_or_404(ManagedObject, id=id) if not o.has_access(request.user): return self.response_forbidden("Access denied") cdb = o.get_confdb(cleanup=cleanup) try: r = list(cdb.query(query)) result = {"status": True, "result": r} if dump: result["confdb"] = ujson.loads(cdb.dump("json")) except SyntaxError as e: result = {"status": False, "error": str(e)} return result @view(url=r"^(?P<id>\d+)/job_log/(?P<job>\S+)/$", method=["GET"], access="read", api=True) def api_job_log(self, request, id, job): o = self.get_object_or_404(ManagedObject, id=id) if not o.has_access(request.user): return self.response_forbidden("Access denied") # fs = gridfs.GridFS(get_db(), "noc.joblog") key = "discovery-%s-%s" % (job, o.id) d = get_db()["noc.joblog"].find_one({"_id": key}) if d and d["log"]: return self.render_plain_text(zlib.decompress(str(d["log"]))) else: return self.render_plain_text("No data") @view(url=r"^(?P<id>\d+)/interactions/$", method=["GET"], access="interactions", api=True) def api_interactions(self, request, id): o = self.get_object_or_404(ManagedObject, id=id) if not o.has_access(request.user): return self.response_forbidden("Access denied") return [{ "ts": self.to_json(i.timestamp), "op": i.op, "user": i.user, "text": i.text } for i in InteractionLog.objects.filter( object=o.id).order_by("-timestamp")] @view(url=r"^(?P<id>\d+)/scripts/$", method=["GET"], access="script", api=True) def api_scripts(self, request, id): o = self.get_object_or_404(ManagedObject, id=id) if not o.has_access(request.user): return self.response_forbidden("Access denied") r = [] for s in o.scripts: sn = o.profile.name + "." + s script = script_loader.get_script(sn) if not script: self.logger.error("Failed to load script: %s", sn) continue interface = script.interface() ss = { "name": s, "has_input": any(interface.gen_parameters()), "require_input": interface.has_required_params, "form": interface.get_form(), "preview": interface.preview or "NOC.sa.managedobject.scripts.JSONPreview", } r += [ss] return r @view(url=r"^(?P<id>\d+)/scripts/(?P<name>[^/]+)/$", method=["POST"], access="script", api=True) def api_run_script(self, request, id, name): o = self.get_object_or_404(ManagedObject, id=id) if not o.has_access(request.user): return {"error": "Access denied"} if name not in o.scripts: return {"error": "Script not found: %s" % name} params = self.deserialize(request.body) try: result = o.scripts[name](**params) except Exception as e: return {"error": str(e)} return {"result": result} @view(url=r"^(?P<id>\d+)/console/$", method=["POST"], access="console", api=True) def api_console_command(self, request, id): o = self.get_object_or_404(ManagedObject, id=id) if not o.has_access(request.user): return {"error": "Access denied"} if "commands" not in o.scripts: return {"error": "Script not found: commands"} params = self.deserialize(request.body) try: result = o.scripts.commands(**params) except Exception as e: return {"error": str(e)} return {"result": result} @view(url=r"(?P<id>\d+)/caps/$", method=["GET"], access="read", api=True) def api_get_caps(self, request, id): o = self.get_object_or_404(ManagedObject, id=id) if not o.has_access(request.user): return self.response_forbidden("Access denied") r = [] oc = ObjectCapabilities.objects.filter(object=o).first() if oc: for c in oc.caps: r += [{ "capability": c.capability.name, "description": c.capability.description, "type": c.capability.type, "value": c.value, "source": c.source, }] return sorted(r, key=lambda x: x["capability"]) @view(url=r"(?P<id>\d+)/facts/$", method=["GET"], access="read", api=True) def api_get_facts(self, request, id): o = self.get_object_or_404(ManagedObject, id=id) if not o.has_access(request.user): return self.response_forbidden("Access denied") return sorted( ({ "cls": f.cls, "label": f.label, "attrs": [{ "name": a, "value": f.attrs[a] } for a in f.attrs], "introduced": f.introduced.isoformat(), "changed": f.changed.isoformat(), } for f in ObjectFact.objects.filter(object=o.id)), key=lambda x: (x["cls"], x["label"]), ) @view(url=r"(?P<id>\d+)/revalidate/$", method=["POST"], access="read", api=True) def api_revalidate(self, request, id): def revalidate(o): engine = Engine(o) engine.check() return self.response({"status": True}, self.OK) o = self.get_object_or_404(ManagedObject, id=id) if not o.has_access(request.user): return self.response_forbidden("Access denied") return self.submit_slow_op(request, revalidate, o) @view(url=r"(?P<id>\d+)/actions/(?P<action>\S+)/$", method=["POST"], access="action", api=True) def api_action(self, request, id, action): def execute(o, a, args): return a.execute(o, **args) o = self.get_object_or_404(ManagedObject, id=id) if not o.has_access(request.user): return self.response_forbidden("Access denied") a = self.get_object_or_404(Action, name=action) # @todo: Check access body = request.body if body: args = ujson.loads(body) else: args = {} return self.submit_slow_op(request, execute, o, a, args) @view(url=r"^link/fix/(?P<link_id>[0-9a-f]{24})/$", method=["POST"], access="change_link") def api_fix_links(self, request, link_id): def get_mac(arp, ip): for r in arp: if r["ip"] == ip: return r["mac"] return None def get_interface(macs, mac): for m in macs: if m["mac"] == mac: return m["interfaces"][0] return None def error_status(message, *args): self.logger.error(message, *args) return {"status": False, "message": message % args} def success_status(message, *args): self.logger.error(message, *args) return {"status": True, "message": message % args} link = self.get_object_or_404(Link, id=link_id) if len(link.interfaces) != 2: return error_status("Cannot fix link: Not P2P") mo1 = link.interfaces[0].managed_object mo2 = link.interfaces[1].managed_object if mo1.id == mo2.id: return error_status("Cannot fix circular links") # Ping each other self.logger.info("[%s] Pinging %s", mo1.name, mo2.address) r1 = mo1.scripts.ping(address=mo2.address) if not r1["success"]: return error_status("Failed to ping %s", mo2.name) self.logger.info("[%s] Pinging %s", mo2.name, mo1.address) r2 = mo2.scripts.ping(address=mo1.address) if not r2["success"]: return error_status("Failed to ping %s", mo1.name) # Get ARPs mac2 = get_mac(mo1.scripts.get_arp(), mo2.address) if not mac2: return error_status("[%s] ARP cache is not filled properly", mo1.name) self.logger.info("[%s] MAC=%s", mo2.name, mac2) mac1 = get_mac(mo2.scripts.get_arp(), mo1.address) if not mac1: return error_status("[%s] ARP cache is not filled properly", mo2.name) self.logger.info("[%s] MAC=%s", mo1.name, mac1) # Get MACs r1 = mo1.scripts.get_mac_address_table(mac=mac2) self.logger.info("[%s] MACS=%s", mo1.name, r1) r2 = mo2.scripts.get_mac_address_table(mac=mac1) self.logger.info("[%s] MACS=%s", mo2.name, r2) # mo1: Find mo2 i1 = get_interface(r1, mac2) if not i1: return error_status("[%s] Cannot find %s in the MAC address table", mo1.name, mo2.name) # mo2: Find mo1 i2 = get_interface(r2, mac1) if not i1: return error_status("[%s] Cannot find %s in the MAC address table", mo2.name, mo1.name) self.logger.info("%s:%s -- %s:%s", mo1.name, i1, mo2.name, i2) if link.interfaces[0].name == i1 and link.interfaces[1].name == i2: return success_status("Linked properly") # Get interfaces iface1 = mo1.get_interface(i1) if not iface1: return error_status("[%s] Interface not found: %s", mo1.name, i1) iface2 = mo2.get_interface(i2) if not iface2: return error_status("[%s] Interface not found: %s", mo2.name, i2) # Check we can relink if_ids = [i.id for i in link.interfaces] if iface1.id not in if_ids and iface1.is_linked: return error_status("[%s] %s is already linked", mo1.name, i1) if iface2.id not in if_ids and iface2.is_linked: return error_status("[%s] %s is already linked", mo2.name, i2) # Relink self.logger.info("Relinking") link.delete() iface1.link_ptp(iface2, method="macfix") return success_status("Relinked") @view(url=r"^(?P<id>\d+)/cpe/$", method=["GET"], access="read", api=True) def api_cpe(self, request, id): """ GET CPEs :param request: :param id: :return: """ def sorted_iname(s): return sorted(s, key=lambda x: split_alnum(x["name"])) # Get object o = self.get_object_or_404(ManagedObject, id=int(id)) if not o.has_access(request.user): return self.response_forbidden("Permission denied") # CPE # @todo: proper ordering # default_state = ResourceState.get_default() # style_cache = {} # profile_id -> css_style l1 = [ { "global_id": str(c.global_id), "name": c.name or "", "interface": c.interface, "local_id": c.local_id, "serial": c.serial or "", "status": c.status, "description": c.description or "", "address": c.ip or "", "model": c.model or "", "version": c.version or "", "mac": c.mac or "", "location": c.location or "", "distance": str(c.distance) # "row_class": get_style(i) } for c in CPEStatus.objects.filter(managed_object=o.id) ] return {"cpe": sorted_iname(l1)}
def __init__(self, *args, **kwargs): super(ExtDocApplication, self).__init__(*args, **kwargs) self.pk = "id" # @todo: detect properly self.has_uuid = False # Prepare field converters self.clean_fields = self.clean_fields.copy() # name -> Parameter for name, f in six.iteritems(self.model._fields): if isinstance(f, BooleanField): self.clean_fields[name] = BooleanParameter() elif isinstance(f, GeoPointField): self.clean_fields[name] = GeoPointParameter() elif isinstance(f, ForeignKeyField): self.clean_fields[f.name] = ModelParameter(f.document_type, required=f.required) elif isinstance(f, ListField): if isinstance(f.field, EmbeddedDocumentField): self.clean_fields[f.name] = ListOfParameter( element=EmbeddedDocumentParameter(f.field.document_type) ) elif isinstance(f, ReferenceField): dt = f.document_type_obj if dt == "self": dt = self.model self.clean_fields[f.name] = DocumentParameter(dt, required=f.required) if f.primary_key: self.pk = name if name == "uuid": self.has_uuid = True # if not self.query_fields: self.query_fields = [ "%s__%s" % (n, self.query_condition) for n, f in six.iteritems(self.model._fields) if f.unique and isinstance(f, StringField) ] self.unique_fields = [n for n, f in six.iteritems(self.model._fields) if f.unique] # Install JSON API call when necessary self.json_collection = self.model._meta.get("json_collection") if ( self.has_uuid and hasattr(self.model, "to_json") and not hasattr(self, "api_to_json") and not hasattr(self, "api_json") ): self.add_view( "api_json", self._api_to_json, url=r"^(?P<id>[0-9a-f]{24})/json/$", method=["GET"], access="read", api=True, ) self.add_view( "api_share_info", self._api_share_info, url=r"^(?P<id>[0-9a-f]{24})/share_info/$", method=["GET"], access="read", api=True, ) if self.json_collection: self.bulk_fields += [self._bulk_field_is_builtin] # Find field_* and populate custom fields self.custom_fields = {} for fn in [n for n in dir(self) if n.startswith("field_")]: h = getattr(self, fn) if callable(h): self.custom_fields[fn[6:]] = h
class ReportObjectDetailApplication(ExtApplication): menu = _("Reports") + "|" + _("Object Detail") title = _("Object Detail") SEGMENT_PATH_DEPTH = 7 CONTAINER_PATH_DEPTH = 7 def get_report_object(self, user=None, is_managed=None, adm=None, selector=None, pool=None, segment=None, ids=None): mos = ManagedObject.objects.filter() if user.is_superuser and not adm and not selector and not segment: mos = ManagedObject.objects.filter() if ids: mos = ManagedObject.objects.filter(id__in=[ids]) if is_managed is not None: mos = ManagedObject.objects.filter(is_managed=is_managed) if pool: p = Pool.get_by_name(pool or "default") mos = mos.filter(pool=p) if not user.is_superuser: mos = mos.filter( administrative_domain__in=UserAccess.get_domains(user)) if adm: ads = AdministrativeDomain.get_nested_ids(int(adm)) mos = mos.filter(administrative_domain__in=ads) if selector: selector = ManagedObjectSelector.get_by_id(int(selector)) mos = mos.filter(selector.Q) if segment: segment = NetworkSegment.objects.filter(id=segment).first() if segment: mos = mos.filter(segment__in=segment.get_nested_ids()) return mos @view( "^download/$", method=["GET"], access="launch", api=True, validate={ "administrative_domain": StringParameter(required=False), "pool": StringParameter(required=False), "segment": StringParameter(required=False), "selector": StringParameter(required=False), "ids": StringParameter(required=False), "detail_stat": StringParameter(required=False), "is_managed": BooleanParameter(required=False), "avail_status": BooleanParameter(required=False), "columns": StringParameter(required=False), "o_format": StringParameter(choices=["csv", "xlsx"]), }, ) def api_report( self, request, o_format, is_managed=None, administrative_domain=None, selector=None, pool=None, segment=None, avail_status=False, columns=None, ids=None, detail_stat=None, enable_autowidth=False, ): def row(row): def qe(v): if v is None: return "" if isinstance(v, unicode): return v.encode("utf-8") elif isinstance(v, datetime.datetime): return v.strftime("%Y-%m-%d %H:%M:%S") elif not isinstance(v, str): return str(v) else: return v return [qe(x) for x in row] def translate_row(row, cmap): return [row[i] for i in cmap] type_columns = ["Up/10G", "Up/1G", "Up/100M", "Up/10M", "Down/-", "-"] cols = [ "id", "object_name", "object_address", "object_hostname", "object_status", "profile_name", "object_profile", "object_vendor", "object_platform", "object_attr_hwversion", "object_version", "object_attr_bootprom", "object_serial", "object_attr_patch", "auth_profile", "avail", "admin_domain", "container", "segment", "phys_interface_count", "link_count", "last_config_ts" # "discovery_problem" # "object_tags" # "sorted_tags" # "object_caps" # "interface_type_count" ] header_row = [ "ID", "OBJECT_NAME", "OBJECT_ADDRESS", "OBJECT_HOSTNAME", "OBJECT_STATUS", "PROFILE_NAME", "OBJECT_PROFILE", "OBJECT_VENDOR", "OBJECT_PLATFORM", "OBJECT_HWVERSION", "OBJECT_VERSION", "OBJECT_BOOTPROM", "OBJECT_SERIAL", "OBJECT_ATTR_PATCH", "AUTH_PROFILE", "AVAIL", "ADMIN_DOMAIN", "CONTAINER", "SEGMENT", "PHYS_INTERFACE_COUNT", "LINK_COUNT", "LAST_CONFIG_TS", ] # "DISCOVERY_PROBLEM" # "ADM_PATH # "DISCOVERY_PROBLEM" # "OBJECT_TAGS" # "SORTED_TAGS" # "OBJECT_CAPS" # "INTERFACE_TYPE_COUNT" if columns: cmap = [] for c in columns.split(","): try: cmap += [cols.index(c)] except ValueError: continue else: cmap = list(range(len(cols))) r = [translate_row(header_row, cmap)] mos = self.get_report_object(request.user, is_managed, administrative_domain, selector, pool, segment, ids) columns_filter = set(columns.split(",")) mos_id = tuple(mos.order_by("id").values_list("id", flat=True)) mos_filter = None if detail_stat: ref = ReportModelFilter() ids = list(six.itervalues(ref.proccessed(detail_stat))) mos_filter = set(mos_id).intersection(ids[0]) mos_id = sorted(mos_filter) avail = {} if "avail" in columns_filter: avail = ObjectStatus.get_statuses(mos_id) link_count = iter(ReportObjectLinkCount(mos_id)) iface_count = iter(ReportObjectIfacesTypeStat(mos_id)) if "container" in columns_filter: container_lookup = iter(ReportContainerData(mos_id)) else: container_lookup = None if "object_serial" in columns_filter: container_serials = iter(ReportContainer(mos_id)) else: container_serials = None if "interface_type_count" in columns_filter: iss = iter(ReportObjectIfacesStatusStat(mos_id)) else: iss = None if "object_attr_patch" in columns_filter or "object_serial" in columns_filter: roa = iter(ReportObjectAttributes(mos_id)) else: roa = None hn = iter(ReportObjectsHostname1(mos_id)) rc = iter(ReportObjectConfig(mos_id)) # ccc = iter(ReportObjectCaps(mos_id)) if "adm_path" in columns_filter: ad_path = ReportAdPath() r[-1].extend([_("ADM_PATH1"), _("ADM_PATH1"), _("ADM_PATH1")]) if "interface_type_count" in columns_filter: r[-1].extend(type_columns) if "object_caps" in columns_filter: object_caps = ReportObjectCaps(mos_id) caps_columns = list(six.itervalues(object_caps.ATTRS)) ccc = iter(object_caps) r[-1].extend(caps_columns) if "object_tags" in columns_filter: r[-1].extend([_("OBJECT_TAGS")]) if "sorted_tags" in columns_filter: tags = set() for s in (ManagedObject.objects.filter().exclude( tags=None).values_list("tags", flat=True).distinct()): tags.update(set(s)) tags_o = sorted([t for t in tags if "{" not in t]) r[-1].extend(tags_o) if "discovery_problem" in columns.split(","): discovery_result = ReportDiscoveryResult(mos_id) discovery_result.safe_output = True discovery_result.unknown_value = ([""] * len(discovery_result.ATTRS), ) dp_columns = discovery_result.ATTRS dp = iter(discovery_result) r[-1].extend(dp_columns) for ( mo_id, name, address, is_managed, sa_profile, o_profile, auth_profile, ad, m_segment, vendor, platform, version, tags, ) in (mos.values_list( "id", "name", "address", "is_managed", "profile", "object_profile__name", "auth_profile__name", "administrative_domain__name", "segment", "vendor", "platform", "version", "tags", ).order_by("id").iterator()): if (mos_filter and mo_id not in mos_filter) or not mos_id: continue if container_serials: mo_serials = next(container_serials) else: mo_serials = [{}] if container_lookup: mo_continer = next(container_lookup) else: mo_continer = ("", ) if roa: serial, hw_ver, boot_prom, patch = next(roa)[0] # noqa else: serial, hw_ver, boot_prom, patch = "", "", "", "" # noqa r.append( translate_row( row([ mo_id, name, address, next(hn)[0], "managed" if is_managed else "unmanaged", Profile.get_by_id(sa_profile), o_profile, Vendor.get_by_id(vendor) if vendor else "", Platform.get_by_id(platform) if platform else "", hw_ver, Firmware.get_by_id(version) if version else "", boot_prom, # Serial mo_serials[0].get("serial", "") or serial, patch or "", auth_profile, _("Yes") if avail.get(mo_id, None) else _("No"), ad, mo_continer[0], NetworkSegment.get_by_id(m_segment) if m_segment else "", next(iface_count)[0], next(link_count)[0], next(rc)[0], ]), cmap, )) if "adm_path" in columns_filter: r[-1].extend([ad] + list(ad_path[ad])) if "interface_type_count" in columns_filter: r[-1].extend(next(iss)[0]) if "object_caps" in columns_filter: r[-1].extend(next(ccc)[0]) if "object_tags" in columns_filter: r[-1].append(",".join(tags if tags else [])) if "sorted_tags" in columns_filter: out_tags = [""] * len(tags_o) try: if tags: for m in tags: out_tags[tags_o.index(m)] = m except ValueError: logger.warning("Bad value for tag: %s", m) r[-1].extend(out_tags) if "discovery_problem" in columns_filter: r[-1].extend(next(dp)[0]) filename = "mo_detail_report_%s" % datetime.datetime.now().strftime( "%Y%m%d") if o_format == "csv": response = HttpResponse(content_type="text/csv") response[ "Content-Disposition"] = 'attachment; filename="%s.csv"' % filename writer = csv.writer(response, dialect="excel", delimiter=";", quotechar='"') writer.writerows(r) return response elif o_format == "xlsx": response = StringIO() wb = xlsxwriter.Workbook(response) cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1}) ws = wb.add_worksheet("Objects") max_column_data_length = {} for rn, x in enumerate(r): for cn, c in enumerate(x): if rn and (r[0][cn] not in max_column_data_length or len(str(c)) > max_column_data_length[r[0][cn]]): max_column_data_length[r[0][cn]] = len(str(c)) ws.write(rn, cn, c, cf1) # for ws.autofilter(0, 0, rn, cn) ws.freeze_panes(1, 0) for cn, c in enumerate(r[0]): # Set column width width = get_column_width(c) if enable_autowidth and width < max_column_data_length[c]: width = max_column_data_length[c] ws.set_column(cn, cn, width=width) wb.close() response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/vnd.ms-excel") # response = HttpResponse( # content_type="application/x-ms-excel") response[ "Content-Disposition"] = 'attachment; filename="%s.xlsx"' % filename response.close() return response
RequestFrom = ObjectPointer | InterfacePointer | ServicePointer # to: section RequestTo = ObjectPointer | LevelPointer | InterfacePointer | ServicePointer # config: section RequestConfig = DictParameter( attrs={ "max_depth": IntParameter(default=MAX_DEPTH_DEFAULT), "n_shortest": IntParameter(default=N_SHORTEST_DEFAULT), }, required=False, ) # constraints: section RequestVLANConstraint = DictParameter( attrs={ "vlan": VLANIDParameter(required=False), "interface_untagged": BooleanParameter(required=False), "strict": BooleanParameter(default=False), }, required=False, ) RequestConstraints = DictParameter( attrs={"vlan": RequestVLANConstraint, "upwards": BooleanParameter(default=False)}, required=False, ) Request = DictParameter( attrs={ "from": RequestFrom, "to": RequestTo, "config": RequestConfig, "constraints": RequestConstraints, }
from noc.sa.interfaces.base import DictListParameter, ObjectIdParameter, BooleanParameter from noc.core.bi.decorator import bi_sync from noc.ip.models.prefixprofile import PrefixProfile from noc.ip.models.addressprofile import AddressProfile from noc.vc.models.vpnprofile import VPNProfile from noc.main.models.extstorage import ExtStorage from noc.main.models.template import Template from noc.core.datastream.decorator import datastream from noc.cm.models.objectvalidationpolicy import ObjectValidationPolicy from .authprofile import AuthProfile from .capsprofile import CapsProfile m_valid = DictListParameter( attrs={ "metric_type": ObjectIdParameter(required=True), "enable_box": BooleanParameter(default=False), "enable_periodic": BooleanParameter(default=True), "is_stored": BooleanParameter(default=True), "threshold_profile": ObjectIdParameter(required=False), }) id_lock = Lock() @on_init @on_save @bi_sync @datastream @on_delete_check(check=[ ("sa.ManagedObject", "object_profile"), ("sa.ManagedObjectProfile", "cpe_profile"),
class ReportLinkDetailApplication(ExtApplication): menu = _("Reports") + "|" + _("Link Detail") title = _("Object Detail") SEGMENT_PATH_DEPTH = 7 CONTAINER_PATH_DEPTH = 7 @view("^download/$", method=["GET"], access="launch", api=True, validate={ "administrative_domain": StringParameter(required=False), "pool": StringParameter(required=False), "segment": StringParameter(required=False), "selector": StringParameter(required=False), "ids": StringParameter(required=False), "is_managed": BooleanParameter(required=False), "avail_status": BooleanParameter(required=False), "columns": StringParameter(required=False), "o_format": StringParameter(choices=["csv", "xlsx"])}) def api_report(self, request, o_format, is_managed=None, administrative_domain=None, selector=None, pool=None, segment=None, avail_status=False, columns=None, ids=None): def row(row): def qe(v): if v is None: return "" if isinstance(v, unicode): return v.encode("utf-8") elif isinstance(v, datetime.datetime): return v.strftime("%Y-%m-%d %H:%M:%S") elif not isinstance(v, str): return str(v) else: return v return [qe(x) for x in row] def translate_row(row, cmap): return [row[i] for i in cmap] type_columns = ["Up/10G", "Up/1G", "Up/100M", "Down/-", "-"] cols = [ "admin_domain", # "id", "object1_name", "object1_address", "object1_iface", "object2_name", "object2_address", "object2_iface", "link_proto", "last_seen" ] header_row = [ "ADMIN_DOMAIN", "OBJECT1_NAME", "OBJECT1_ADDRESS", "OBJECT1_IFACE", "OBJECT2_NAME", "OBJECT2_ADDRESS", "OBJECT2_IFACE", "LINK_PROTO", "LAST_SEEN" ] if columns: cmap = [] for c in columns.split(","): try: cmap += [cols.index(c)] except ValueError: continue else: cmap = list(range(len(cols))) r = [translate_row(header_row, cmap)] if "interface_type_count" in columns.split(","): r[-1].extend(type_columns) # self.logger.info(r) # self.logger.info("---------------------------------") # print("-----------%s------------%s" % (administrative_domain, columns)) p = Pool.get_by_name(pool or "default") mos = ManagedObject.objects.filter() if request.user.is_superuser and not administrative_domain and not selector and not segment: mos = ManagedObject.objects.filter(pool=p) if ids: mos = ManagedObject.objects.filter(id__in=[ids]) if is_managed is not None: mos = ManagedObject.objects.filter(is_managed=is_managed) if pool: mos = mos.filter(pool=p) if not request.user.is_superuser: mos = mos.filter(administrative_domain__in=UserAccess.get_domains(request.user)) if administrative_domain: ads = AdministrativeDomain.get_nested_ids(int(administrative_domain)) mos = mos.filter(administrative_domain__in=ads) if selector: selector = ManagedObjectSelector.get_by_id(int(selector)) mos = mos.filter(selector.Q) if segment: segment = NetworkSegment.objects.filter(id=segment).first() if segment: mos = mos.filter(segment__in=segment.get_nested_ids()) mos_id = list(mos.values_list("id", flat=True)) rld = ReportLinksDetail(mos_id) mo_resolv = dict((mo[0], mo[1:]) for mo in ManagedObject.objects.filter().values_list( "id", "administrative_domain__name", "name", "address")) for link in rld.out: if len(rld.out[link]) != 2: # Multilink or bad link continue s1, s2 = rld.out[link] r += [translate_row(row([ mo_resolv[s1["mo"][0]][0], mo_resolv[s1["mo"][0]][1], mo_resolv[s1["mo"][0]][2], s1["iface_n"][0], mo_resolv[s2["mo"][0]][1], mo_resolv[s2["mo"][0]][2], s2["iface_n"][0], s1.get("dis_method", ""), s1.get("last_seen", "") ]), cmap)] filename = "links_detail_report_%s" % datetime.datetime.now().strftime("%Y%m%d") if o_format == "csv": response = HttpResponse(content_type="text/csv") response[ "Content-Disposition"] = "attachment; filename=\"%s.csv\"" % filename writer = csv.writer(response, dialect='excel', delimiter=';') writer.writerows(r) return response elif o_format == "xlsx": with tempfile.NamedTemporaryFile(mode="wb") as f: wb = xlsxwriter.Workbook(f.name) ws = wb.add_worksheet("Objects") for rn, x in enumerate(r): for cn, c in enumerate(x): ws.write(rn, cn, c) ws.autofilter(0, 0, rn, cn) wb.close() response = HttpResponse( content_type="application/x-ms-excel") response[ "Content-Disposition"] = "attachment; filename=\"%s.xlsx\"" % filename with open(f.name) as ff: response.write(ff.read()) return response
class ReportMovedMacApplication(ExtApplication): menu = _("Reports") + "|" + _("Moved MACs") title = _("Moved MACs") SEGMENT_PATH_DEPTH = 7 CONTAINER_PATH_DEPTH = 7 def get_report_object( self, user=None, is_managed=None, adm=None, selector=None, pool=None, segment=None ): mos = ManagedObject.objects.filter() if user.is_superuser and not adm and not selector and not segment: mos = ManagedObject.objects.filter() if is_managed is not None: mos = ManagedObject.objects.filter(is_managed=is_managed) if pool: p = Pool.get_by_name(pool or "default") mos = mos.filter(pool=p) if not user.is_superuser: mos = mos.filter(administrative_domain__in=UserAccess.get_domains(user)) if adm: ads = AdministrativeDomain.get_nested_ids(int(adm)) mos = mos.filter(administrative_domain__in=ads) if selector: selector = ManagedObjectSelector.get_by_id(int(selector)) mos = mos.filter(selector.Q) if segment: segment = NetworkSegment.objects.filter(id=segment).first() if segment: mos = mos.filter(segment__in=segment.get_nested_ids()) return mos @view( r"^download/$", method=["GET"], access="launch", api=True, validate={ "from_date": StringParameter(required=True), "to_date": StringParameter(required=True), "administrative_domain": StringParameter(required=False), # "pool": StringParameter(required=False), "segment": StringParameter(required=False), "selector": StringParameter(required=False), "interface_profile": StringParameter(required=False), "exclude_serial_change": BooleanParameter(default=False), "columns": StringParameter(required=False), "o_format": StringParameter(choices=["csv", "csv_zip", "xlsx"]), }, ) def api_report( self, request, reporttype=None, from_date=None, to_date=None, object_profile=None, filter_default=None, exclude_zero=None, interface_profile=None, selector=None, administrative_domain=None, columns=None, o_format=None, enable_autowidth=False, exclude_serial_change=False, **kwargs, ): def translate_row(row, cmap): return [row[i] for i in cmap] cols = [ "object_name", "object_address", "object_adm_domain", "event_type", "sn_changed", "vendor_mac", "mac", "migrate_ts", "from_iface_name", "from_iface_down", "to_iface_name", "to_iface_down", ] header_row = [ "OBJECT_NAME", "OBJECT_ADDRESS", "OBJECT_ADM_DOMAIN", "EVENT_TYPE", "SN_CHANGED", "VENDOR_MAC", "MAC", "MIGRATE_TS", "FROM_IFACE_NAME", "FROM_IFACE_DOWN", "TO_IFACE_NAME", "TO_IFACE_DOWN", ] if columns: cmap = [] for c in columns.split(","): try: cmap += [cols.index(c)] except ValueError: continue else: cmap = list(range(len(cols))) r = [translate_row(header_row, cmap)] # Date Time Block if not from_date: from_date = datetime.datetime.now() - datetime.timedelta(days=1) else: from_date = datetime.datetime.strptime(from_date, "%d.%m.%Y") if not to_date or from_date == to_date: to_date = from_date + datetime.timedelta(days=1) else: to_date = datetime.datetime.strptime(to_date, "%d.%m.%Y") + datetime.timedelta(days=1) # interval = (to_date - from_date).days # ts_from_date = time.mktime(from_date.timetuple()) # ts_to_date = time.mktime(to_date.timetuple()) mos = self.get_report_object( user=request.user, adm=administrative_domain, selector=selector ) mos_id = set(mos.order_by("bi_id").values_list("bi_id", flat=True)) if interface_profile: interface_profile = InterfaceProfile.objects.get(id=interface_profile) iface_filter = ( "dictGetString('interfaceattributes', 'profile', (managed_object, interface)) == '%s'" % interface_profile.name ) else: iface_filter = "is_uni = 1" serials_changed = {} ch = connection() for row in ch.execute( DEVICE_MOVED_QUERY % ( from_date.date().isoformat(), (to_date.date() - datetime.timedelta(days=1)).isoformat(), ) ): serials_changed[int(row[0])] = row[1] for ( mo, mac, mo_name, mo_address, mo_adm_domain, ifaces, migrate_ifaces, migrate_count, ) in ch.execute( MAC_MOVED_QUERY % (iface_filter, from_date.date().isoformat(), to_date.date().isoformat()) ): if int(mo) not in mos_id: continue if exclude_serial_change and int(mo) in serials_changed: continue iface_from, iface_to, migrate = get_interface(ifaces) event_type = _("Migrate") if ( rx_port_num.search(iface_from).group() == rx_port_num.search(iface_to).group() and iface_from != iface_to ): event_type = _("Migrate (Device Changed)") r += [ translate_row( [ mo_name, mo_address, mo_adm_domain, event_type, _("Yes") if int(mo) in serials_changed else _("No"), MACVendor.get_vendor(mac), mac, datetime.datetime.fromtimestamp(migrate[1]).isoformat(sep=" "), # TS iface_from, "--", iface_to, "--", ], cmap, ) ] filename = "macs_move_report_%s" % datetime.datetime.now().strftime("%Y%m%d") if o_format == "csv": response = HttpResponse(content_type="text/csv") response["Content-Disposition"] = 'attachment; filename="%s.csv"' % filename writer = csv.writer(response, dialect="excel", delimiter=",", quoting=csv.QUOTE_MINIMAL) writer.writerows(r) return response elif o_format == "csv_zip": response = BytesIO() f = TextIOWrapper(TemporaryFile(mode="w+b"), encoding="utf-8") writer = csv.writer(f, dialect="excel", delimiter=";", quotechar='"') writer.writerows(r) f.seek(0) with ZipFile(response, "w", compression=ZIP_DEFLATED) as zf: zf.writestr("%s.csv" % filename, f.read()) zf.filename = "%s.csv.zip" % filename # response = HttpResponse(content_type="text/csv") response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/zip") response["Content-Disposition"] = 'attachment; filename="%s.csv.zip"' % filename return response elif o_format == "xlsx": response = BytesIO() wb = xlsxwriter.Workbook(response) cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1}) ws = wb.add_worksheet("Alarms") max_column_data_length = {} for rn, x in enumerate(r): for cn, c in enumerate(x): if rn and ( r[0][cn] not in max_column_data_length or len(str(c)) > max_column_data_length[r[0][cn]] ): max_column_data_length[r[0][cn]] = len(str(c)) ws.write(rn, cn, c, cf1) ws.autofilter(0, 0, rn, cn) ws.freeze_panes(1, 0) for cn, c in enumerate(r[0]): # Set column width width = get_column_width(c) if enable_autowidth and width < max_column_data_length[c]: width = max_column_data_length[c] ws.set_column(cn, cn, width=width) wb.close() response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/vnd.ms-excel") response["Content-Disposition"] = 'attachment; filename="%s.xlsx"' % filename response.close() return response
class ReportMetricsDetailApplication(ExtApplication): menu = _("Reports") + "|" + _("Load Metrics") title = _("Load Metrics") metric_source = { "load_interfaces": ReportInterfaceMetrics, "load_cpu": ReportCPUMetrics, "load_memory": ReportMemoryMetrics, "ping": ReportPingMetrics, } @view( "^download/$", method=["GET"], access="launch", api=True, validate={ "from_date": StringParameter(required=True), "to_date": StringParameter(required=True), "reporttype": StringParameter( required=True, choices=["load_interfaces", "load_cpu", "ping"] ), "administrative_domain": StringParameter(required=False), # "pool": StringParameter(required=False), "segment": StringParameter(required=False), "selector": StringParameter(required=False), "interface_profile": StringParameter(required=False), "exclude_zero": BooleanParameter(required=False), "filter_default": BooleanParameter(required=False), "columns": StringParameter(required=False), "o_format": StringParameter(choices=["csv", "xlsx"]), }, ) def api_report( self, request, reporttype=None, from_date=None, to_date=None, object_profile=None, filter_default=None, exclude_zero=None, interface_profile=None, selector=None, administrative_domain=None, columns=None, o_format=None, enable_autowidth=False, **kwargs ): def translate_row(row, cmap): return [row[i] for i in cmap] map_table = { "load_interfaces": "/Interface\s\|\sLoad\s\|\s[In|Out]/", "load_cpu": "/[CPU|Memory]\s\|\sUsage/", "errors": "/Interface\s\|\s[Errors|Discards]\s\|\s[In|Out]/", "ping": "/Ping\s\|\sRTT/", } cols = [ "id", "object_name", "object_address", "object_platform", "object_adm_domain", "object_segment", # "object_hostname", # "object_status", # "profile_name", # "object_profile", # "object_vendor", "iface_name", "iface_description", "iface_speed", "load_in", "load_in_p", "load_out", "load_out_p", "errors_in", "errors_out", "slot", "cpu_usage", "memory_usage", "ping_rtt", "ping_attempts", "interface_flap", "interface_load_url", ] header_row = [ "ID", "OBJECT_NAME", "OBJECT_ADDRESS", "OBJECT_PLATFORM", "OBJECT_ADM_DOMAIN", "OBJECT_SEGMENT", "IFACE_NAME", "IFACE_DESCRIPTION", "IFACE_SPEED", "LOAD_IN", "LOAD_IN_P", "LOAD_OUT", "LOAD_OUT_P", "ERRORS_IN", "ERRORS_OUT", "CPU_USAGE", "MEMORY_USAGE", "PING_RTT", "PING_ATTEMPTS", "INTERFACE_FLAP", "INTERFACE_LOAD_URL", ] if columns: cmap = [] for c in columns.split(","): try: cmap += [cols.index(c)] except ValueError: continue else: cmap = list(range(len(cols))) columns_order = columns.split(",") columns_filter = set(columns_order) r = [translate_row(header_row, cmap)] object_columns = [c for c in columns_order if c.startswith("object")] # Date Time Block if not from_date: from_date = datetime.datetime.now() - datetime.timedelta(days=1) else: from_date = datetime.datetime.strptime(from_date, "%d.%m.%Y") if not to_date or from_date == to_date: to_date = from_date + datetime.timedelta(days=1) else: to_date = datetime.datetime.strptime(to_date, "%d.%m.%Y") + datetime.timedelta(days=1) # interval = (to_date - from_date).days ts_from_date = time.mktime(from_date.timetuple()) ts_to_date = time.mktime(to_date.timetuple()) # Load managed objects mos = ManagedObject.objects.filter(is_managed=True) if not request.user.is_superuser: mos = mos.filter(administrative_domain__in=UserAccess.get_domains(request.user)) if selector: mos = mos.filter(ManagedObjectSelector.objects.get(id=int(selector)).Q) if administrative_domain: mos = mos.filter( administrative_domain__in=AdministrativeDomain.get_nested_ids( int(administrative_domain) ) ) if object_profile: mos = mos.filter(object_profile=object_profile) # iface_dict = {} d_url = { "path": "/ui/grafana/dashboard/script/report.js", "rname": map_table[reporttype], "from": str(int(ts_from_date * 1000)), "to": str(int(ts_to_date * 1000)), # o.name.replace("#", "%23") "biid": "", "oname": "", "iname": "", } report_map = { "load_interfaces": { "url": "%(path)s?title=interface&biid=%(biid)s" "&obj=%(oname)s&iface=%(iname)s&from=%(from)s&to=%(to)s", "q_group": ["interface"], "q_select": { (0, "managed_object", "id"): "managed_object", (1, "path", "iface_name"): "arrayStringConcat(path)", }, }, "errors": { "url": """%(path)s?title=errors&biid=%(biid)s&obj=%(oname)s&iface=%(iname)s&from=%(from)s&to=%(to)s""", "q_group": ["interface"], }, "load_cpu": { "url": """%(path)s?title=cpu&biid=%(biid)s&obj=%(oname)s&from=%(from)s&to=%(to)s""", "q_select": { (0, "managed_object", "id"): "managed_object", (1, "path", "slot"): "arrayStringConcat(path)", }, }, "ping": { "url": """%(path)s?title=ping&biid=%(biid)s&obj=%(oname)s&from=%(from)s&to=%(to)s""", "q_select": {(0, "managed_object", "id"): "managed_object"}, }, } query_map = { # "iface_description": ('', 'iface_description', "''"), "iface_description": ( "", "iface_description", "dictGetString('interfaceattributes','description' , (managed_object, arrayStringConcat(path)))", ), "iface_speed": ( "speed", "iface_speed", "if(max(speed) = 0, dictGetUInt64('interfaceattributes', 'in_speed', " "(managed_object, arrayStringConcat(path))), max(speed))", ), "load_in": ("load_in", "l_in", "round(quantile(0.90)(load_in), 0)"), "load_in_p": ( "load_in", "l_in_p", "replaceOne(toString(round(quantile(0.90)(load_in) / " "if(max(speed) = 0, dictGetUInt64('interfaceattributes', 'in_speed', " "(managed_object, arrayStringConcat(path))), max(speed)), 4) * 100), '.', ',')", ), "load_out": ("load_out", "l_out", "round(quantile(0.90)(load_out), 0)"), "load_out_p": ( "load_out", "l_out_p", "replaceOne(toString(round(quantile(0.90)(load_out) / " "if(max(speed) = 0, dictGetUInt64('interfaceattributes', 'in_speed', " "(managed_object, arrayStringConcat(path))), max(speed)), 4) * 100), '.', ',')", ), "errors_in": ("errors_in", "err_in", "quantile(0.90)(errors_in)"), "errors_out": ("errors_out", "err_out", "quantile(0.90)(errors_out)"), "cpu_usage": ("usage", "cpu_usage", "quantile(0.90)(usage)"), "ping_rtt": ("rtt", "ping_rtt", "round(quantile(0.90)(rtt) / 1000, 2)"), "ping_attempts": ("attempts", "ping_attempts", "avg(attempts)"), } query_fields = [] for c in report_map[reporttype]["q_select"]: query_fields += [c[2]] field_shift = len(query_fields) # deny replacing field for c in columns.split(","): if c not in query_map: continue field, alias, func = query_map[c] report_map[reporttype]["q_select"][ (columns_order.index(c) + field_shift, field, alias) ] = func query_fields += [c] metrics_attrs = namedtuple("METRICSATTRs", query_fields) mo_attrs = namedtuple("MOATTRs", [c for c in cols if c.startswith("object")]) moss = {} for row in mos.values_list( "bi_id", "name", "address", "platform", "administrative_domain__name", "segment" ): moss[row[0]] = mo_attrs( *[ row[1], row[2], str(Platform.get_by_id(row[3]) if row[3] else ""), row[4], str(NetworkSegment.get_by_id(row[5])) if row[5] else "", ] ) url = report_map[reporttype].get("url", "") report_metric = self.metric_source[reporttype]( tuple(sorted(moss)), from_date, to_date, columns=None ) report_metric.SELECT_QUERY_MAP = report_map[reporttype]["q_select"] if exclude_zero and reporttype == "load_interfaces": report_metric.CUSTOM_FILTER["having"] += ["max(load_in) != 0 AND max(load_out) != 0"] if interface_profile: interface_profile = InterfaceProfile.objects.filter(id=interface_profile).first() report_metric.CUSTOM_FILTER["having"] += [ "dictGetString('interfaceattributes', 'profile', " "(managed_object, arrayStringConcat(path))) = '%s'" % interface_profile.name ] # OBJECT_PLATFORM, ADMIN_DOMAIN, SEGMENT, OBJECT_HOSTNAME for row in report_metric.do_query(): mm = metrics_attrs(*row) mo = moss[int(mm.id)] res = [] for y in columns_order: if y in object_columns: res += [getattr(mo, y)] else: res += [getattr(mm, y)] if "interface_load_url" in columns_filter: d_url["biid"] = mm.id d_url["oname"] = mo[2].replace("#", "%23") # res += [url % d_url, interval] res.insert(columns_order.index("interface_load_url"), url % d_url) r += [res] filename = "metrics_detail_report_%s" % datetime.datetime.now().strftime("%Y%m%d") if o_format == "csv": response = HttpResponse(content_type="text/csv") response["Content-Disposition"] = 'attachment; filename="%s.csv"' % filename writer = csv.writer(response, dialect="excel", delimiter=",", quoting=csv.QUOTE_MINIMAL) writer.writerows(r) return response elif o_format == "xlsx": response = StringIO() wb = xlsxwriter.Workbook(response) cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1}) ws = wb.add_worksheet("Alarms") max_column_data_length = {} for rn, x in enumerate(r): for cn, c in enumerate(x): if rn and ( r[0][cn] not in max_column_data_length or len(str(c)) > max_column_data_length[r[0][cn]] ): max_column_data_length[r[0][cn]] = len(str(c)) ws.write(rn, cn, c, cf1) ws.autofilter(0, 0, rn, cn) ws.freeze_panes(1, 0) for cn, c in enumerate(r[0]): # Set column width width = get_column_width(c) if enable_autowidth and width < max_column_data_length[c]: width = max_column_data_length[c] ws.set_column(cn, cn, width=width) wb.close() response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/vnd.ms-excel") response["Content-Disposition"] = 'attachment; filename="%s.xlsx"' % filename response.close() return response
class ReportLinkDetailApplication(ExtApplication): menu = _("Reports") + "|" + _("Link Detail") title = _("Link Detail") SEGMENT_PATH_DEPTH = 7 CONTAINER_PATH_DEPTH = 7 @view( "^download/$", method=["GET"], access="launch", api=True, validate={ "administrative_domain": StringParameter(required=False), "pool": StringParameter(required=False), "segment": StringParameter(required=False), "selector": StringParameter(required=False), "ids": StringParameter(required=False), "is_managed": BooleanParameter(required=False), "avail_status": BooleanParameter(required=False), "columns": StringParameter(required=False), "o_format": StringParameter(choices=["csv", "csv_zip", "xlsx"]), }, ) def api_report( self, request, o_format, is_managed=None, administrative_domain=None, selector=None, pool=None, segment=None, avail_status=False, columns=None, ids=None, enable_autowidth=False, ): def row(row): def qe(v): if v is None: return "" if isinstance(v, str): return smart_text(v) elif isinstance(v, datetime.datetime): return v.strftime("%Y-%m-%d %H:%M:%S") elif not isinstance(v, str): return str(v) else: return v return [qe(x) for x in row] def translate_row(row, cmap): return [row[i] for i in cmap] type_columns = ["Up/10G", "Up/1G", "Up/100M", "Down/-", "-"] cols = [ "object1_admin_domain", # "id", "object1_name", "object1_address", "object1_platform", "object1_segment", "object1_tags", "object1_iface", "object1_descr", "object1_speed", "object2_admin_domain", "object2_name", "object2_address", "object2_platform", "object2_segment", "object2_tags", "object2_iface", "object2_descr", "object2_speed", "link_proto", "last_seen", ] header_row = [ "OBJECT1_ADMIN_DOMAIN", "OBJECT1_NAME", "OBJECT1_ADDRESS", "OBJECT1_PLATFORM", "OBJECT1_SEGMENT", "OBJECT1_TAGS", "OBJECT1_IFACE", "OBJECT1_DESCR", "OBJECT1_SPEED", "OBJECT2_ADMIN_DOMAIN", "OBJECT2_NAME", "OBJECT2_ADDRESS", "OBJECT2_PLATFORM", "OBJECT2_SEGMENT", "OBJECT2_TAGS", "OBJECT2_IFACE", "OBJECT2_DESCR", "OBJECT2_SPEED", "LINK_PROTO", "LAST_SEEN", ] if columns: cmap = [] for c in columns.split(","): try: cmap += [cols.index(c)] except ValueError: continue else: cmap = list(range(len(cols))) r = [translate_row(header_row, cmap)] if "interface_type_count" in columns.split(","): r[-1].extend(type_columns) # self.logger.info(r) # self.logger.info("---------------------------------") # print("-----------%s------------%s" % (administrative_domain, columns)) p = Pool.get_by_name(pool or "default") mos = ManagedObject.objects.filter() if request.user.is_superuser and not administrative_domain and not selector and not segment: mos = ManagedObject.objects.filter(pool=p) if ids: mos = ManagedObject.objects.filter(id__in=[ids]) if is_managed is not None: mos = ManagedObject.objects.filter(is_managed=is_managed) if pool: mos = mos.filter(pool=p) if not request.user.is_superuser: mos = mos.filter( administrative_domain__in=UserAccess.get_domains(request.user)) if administrative_domain: ads = AdministrativeDomain.get_nested_ids( int(administrative_domain)) mos = mos.filter(administrative_domain__in=ads) if selector: selector = ManagedObjectSelector.get_by_id(int(selector)) mos = mos.filter(selector.Q) if segment: segment = NetworkSegment.objects.filter(id=segment).first() if segment: mos = mos.filter(segment__in=segment.get_nested_ids()) mos_id = list(mos.values_list("id", flat=True)) rld = ReportLinksDetail(mos_id) mo_resolv = { mo[0]: mo[1:] for mo in ManagedObject.objects.filter().values_list( "id", "administrative_domain__name", "name", "address", "segment", "platform", "labels", ) } for link in rld.out: if len(rld.out[link]) != 2: # Multilink or bad link continue s1, s2 = rld.out[link] seg1, seg2 = None, None if "object1_segment" in columns.split( ",") or "object2_segment" in columns.split(","): seg1, seg2 = mo_resolv[s1["mo"][0]][3], mo_resolv[s2["mo"] [0]][3] plat1, plat2 = None, None if "object1_platform" in columns.split( ",") or "object2_platform" in columns.split(","): plat1, plat2 = mo_resolv[s1["mo"][0]][4], mo_resolv[s2["mo"] [0]][4] r += [ translate_row( row([ mo_resolv[s1["mo"][0]][0], mo_resolv[s1["mo"][0]][1], mo_resolv[s1["mo"][0]][2], "" if not plat1 else Platform.get_by_id(plat1), "" if not seg1 else NetworkSegment.get_by_id(seg1), ";".join(mo_resolv[s1["mo"][0]][5] or []), s1["iface_n"][0], s1.get("iface_descr")[0] if s1.get("iface_descr") else "", s1.get("iface_speed")[0] if s1.get("iface_speed") else 0, mo_resolv[s2["mo"][0]][0], mo_resolv[s2["mo"][0]][1], mo_resolv[s2["mo"][0]][2], "" if not plat2 else Platform.get_by_id(plat2), "" if not seg2 else NetworkSegment.get_by_id(seg2), ";".join(mo_resolv[s2["mo"][0]][5] or []), s2["iface_n"][0], s2.get("iface_descr")[0] if s2.get("iface_descr") else "", s2.get("iface_speed")[0] if s2.get("iface_speed") else 0, s2.get("dis_method", ""), s2.get("last_seen", ""), ]), cmap, ) ] filename = "links_detail_report_%s" % datetime.datetime.now().strftime( "%Y%m%d") if o_format == "csv": response = HttpResponse(content_type="text/csv") response[ "Content-Disposition"] = 'attachment; filename="%s.csv"' % filename writer = csv.writer(response, dialect="excel", delimiter=",", quoting=csv.QUOTE_MINIMAL) writer.writerows(r) return response elif o_format == "csv_zip": response = BytesIO() f = TextIOWrapper(TemporaryFile(mode="w+b"), encoding="utf-8") writer = csv.writer(f, dialect="excel", delimiter=";", quotechar='"') writer.writerows(r) f.seek(0) with ZipFile(response, "w", compression=ZIP_DEFLATED) as zf: zf.writestr("%s.csv" % filename, f.read()) zf.filename = "%s.csv.zip" % filename # response = HttpResponse(content_type="text/csv") response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/zip") response[ "Content-Disposition"] = 'attachment; filename="%s.csv.zip"' % filename return response elif o_format == "xlsx": response = BytesIO() wb = xlsxwriter.Workbook(response) cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1}) ws = wb.add_worksheet("Objects") max_column_data_length = {} for rn, x in enumerate(r): for cn, c in enumerate(x): if rn and (r[0][cn] not in max_column_data_length or len(str(c)) > max_column_data_length[r[0][cn]]): max_column_data_length[r[0][cn]] = len(str(c)) ws.write(rn, cn, c, cf1) ws.autofilter(0, 0, rn, cn) ws.freeze_panes(1, 0) for cn, c in enumerate(r[0]): # Set column width width = get_column_width(c) if enable_autowidth and width < max_column_data_length[c]: width = max_column_data_length[c] ws.set_column(cn, cn, width=width) wb.close() response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/vnd.ms-excel") # response = HttpResponse( # content_type="application/x-ms-excel") response[ "Content-Disposition"] = 'attachment; filename="%s.xlsx"' % filename response.close() return response
class ReportMaxMetricsmaxDetailApplication(ExtApplication): menu = _("Reports") + "|" + _("Load Metrics max") title = _("Load Metrics max") @view( r"^download/$", method=["GET"], access="launch", api=True, validate={ "from_date": StringParameter(required=True), "to_date": StringParameter(required=True), "administrative_domain": StringParameter(required=False), # "pool": StringParameter(required=False), "segment": StringParameter(required=False), "selector": StringParameter(required=False), "object_profile": StringParameter(required=False), "interface_profile": StringParameter(required=False), "exclude_zero": BooleanParameter(required=False), "filter_default": BooleanParameter(required=False), "columns": StringParameter(required=False), "description": StringParameter(required=False), "o_format": StringParameter(choices=["csv", "csv_zip", "xlsx"]), }, ) def api_report( self, request, reporttype=None, from_date=None, to_date=None, object_profile=None, filter_default=None, exclude_zero=True, interface_profile=None, selector=None, administrative_domain=None, columns=None, description=None, o_format=None, enable_autowidth=False, **kwargs, ): def load(mo_ids): # match = {"links.mo": {"$in": mo_ids}} match = {"int.managed_object": {"$in": mo_ids}} group = { "_id": "$_id", "links": { "$push": { "iface_n": "$int.name", # "iface_id": "$int._id", # "iface_descr": "$int.description", # "iface_speed": "$int.in_speed", # "dis_method": "$discovery_method", # "last_seen": "$last_seen", "mo": "$int.managed_object", "linked_obj": "$linked_objects", } }, } value = (get_db()["noc.links"].with_options( read_preference=ReadPreference.SECONDARY_PREFERRED).aggregate( [ { "$unwind": "$interfaces" }, { "$lookup": { "from": "noc.interfaces", "localField": "interfaces", "foreignField": "_id", "as": "int", } }, { "$match": match }, { "$group": group }, ], allowDiskUse=True, )) res = defaultdict(dict) for v in value: if v["_id"]: for vv in v["links"]: if len(vv["linked_obj"]) == 2: mo = vv["mo"][0] iface = vv["iface_n"] for i in vv["linked_obj"]: if mo != i: res[mo][i] = iface[0] return res def translate_row(row, cmap): return [row[i] for i in cmap] def str_to_float(str): return float("{0:.3f}".format(float(str))) cols = [ "object_id", "object_name", "object_address", "object_platform", "object_adm_domain", "object_segment", "object_container", # "object_hostname", # "object_status", # "profile_name", # "object_profile", # "object_vendor", "iface_name", "iface_description", "iface_speed", "max_load_in", "max_load_in_time", "max_load_out", "max_load_out_time", "avg_load_in", "avg_load_out", "total_in", "total_out", "uplink_iface_name", "uplink_iface_description", "uplink_iface_speed", "uplink_max_load_in", "uplink_max_load_in_time", "uplink_max_load_out", "uplink_max_load_out_time", "uplink_avg_load_in", "uplink_avg_load_out", "uplink_total_in", "uplink_total_out", ] header_row = [ "ID", _("OBJECT_NAME"), _("OBJECT_ADDRESS"), _("OBJECT_PLATFORM"), _("OBJECT_ADMDOMAIN"), _("OBJECT_SEGMENT"), _("CONTAINER_ADDRESS"), _("IFACE_NAME"), _("IFACE_DESCRIPTION"), _("IFACE_SPEED"), _("MAX_LOAD_IN, Mbps"), _("MAX_LOAD_IN_TIME"), _("MAX_LOAD_OUT, Mbps"), _("MAX_LOAD_OUT_TIME"), _("AVG_LOAD_IN, Mbps"), _("AVG_LOAD_OUT, Mbps"), _("TOTAL_IN, Mbyte"), _("TOTAL_OUT, Mbyte"), _("UPLINK_IFACE_NAME"), _("UPLINK_IFACE_DESCRIPTION"), _("UPLINK_IFACE_SPEED"), _("UPLINK_MAX_LOAD_IN, Mbps"), _("UPLINK_MAX_TIME_IN"), _("UPLINK_MAX_LOAD_OUT, Mbps"), _("UPLINK_MAX_TIME_OUT"), _("UPLINK_AVG_LOAD_IN, Mbps"), _("UPLINK_AVG_LOAD_OUT, Mbps"), _("UPLINK_TOTAL_IN, Mbyte"), _("UPLINK_TOTAL_OUT, Mbyte"), ] if columns: cmap = [] for c in columns.split(","): try: cmap += [cols.index(c)] except ValueError: continue else: cmap = list(range(len(cols))) columns_order = columns.split(",") columns_filter = set(columns_order) r = [translate_row(header_row, cmap)] # Date Time Block if not from_date: from_date = datetime.datetime.now() - datetime.timedelta(days=1) else: from_date = datetime.datetime.strptime(from_date, "%d.%m.%Y") if not to_date or from_date == to_date: to_date = from_date + datetime.timedelta(days=1) else: to_date = datetime.datetime.strptime( to_date, "%d.%m.%Y") + datetime.timedelta(days=1) diff = to_date - from_date # Load managed objects mos = ManagedObject.objects.filter(is_managed=True) if not request.user.is_superuser: mos = mos.filter( administrative_domain__in=UserAccess.get_domains(request.user)) if selector: mos = mos.filter( ManagedObjectSelector.objects.get(id=int(selector)).Q) if administrative_domain: mos = mos.filter( administrative_domain__in=AdministrativeDomain.get_nested_ids( int(administrative_domain))) if object_profile: mos = mos.filter(object_profile=object_profile) if interface_profile: interface_profile = InterfaceProfile.objects.filter( id=interface_profile).first() mo_attrs = namedtuple("MOATTRs", [c for c in cols if c.startswith("object")]) containers_address = {} if "object_container" in columns_filter: containers_address = ReportContainerData( set(mos.values_list("id", flat=True))) containers_address = dict(list(containers_address.extract())) moss = {} for row in mos.values_list("bi_id", "name", "address", "platform", "administrative_domain__name", "segment", "id"): moss[row[0]] = mo_attrs(*[ row[6], row[1], row[2], smart_text(Platform.get_by_id(row[3]) if row[3] else ""), row[4], smart_text(NetworkSegment.get_by_id(row[5])) if row[5] else "", containers_address. get(row[6], "") if containers_address and row[6] else "", ]) report_metric = ReportInterfaceMetrics(tuple(sorted(moss)), from_date, to_date, columns=None) report_metric.SELECT_QUERY_MAP = { (0, "managed_object", "id"): "managed_object", (1, "path", "iface_name"): "arrayStringConcat(path)", ( 2, "", "iface_description", ): "dictGetString('interfaceattributes','description' , (managed_object, arrayStringConcat(path)))", ( 3, "", "profile", ): "dictGetString('interfaceattributes', 'profile', (managed_object, arrayStringConcat(path)))", ( 4, "speed", "iface_speed", ): "dictGetUInt64('interfaceattributes', 'in_speed', (managed_object, arrayStringConcat(path)))", (5, "load_in_max", "load_in_max"): "divide(max(load_in),1048576)", (6, "load_out_max", "load_out_max"): "divide(max(load_out),1048576)", (7, "max_load_in_time", "max_load_in_time"): "argMax(ts,load_in)", (8, "max_load_out_time", "max_load_out_time"): "argMax(ts,load_out)", (9, "avg_load_in", "avg_load_in"): "divide(avg(load_in),1048576)", (10, "avg_load_out", "avg_load_out"): "divide(avg(load_out),1048576)", } ifaces_metrics = defaultdict(dict) for row in report_metric.do_query(): avg_in = str_to_float(row[9]) avg_out = str_to_float(row[10]) total_in = avg_in * diff.total_seconds() / 8 total_out = avg_out * diff.total_seconds() / 8 ifaces_metrics[row[0]][row[1]] = { "description": row[2], "profile": row[3], "bandwidth": row[4], "max_load_in": str_to_float(row[5]), "max_load_out": str_to_float(row[6]), "max_load_in_time": row[7], "max_load_out_time": row[8], "avg_load_in": avg_in, "avg_load_out": avg_out, "total_in": float("{0:.1f}".format(total_in)), "total_out": float("{0:.1f}".format(total_out)), } # find uplinks links = {} if cmap[-1] > 17: mos_id = list(mos.values_list("id", flat=True)) uplinks = {obj: [] for obj in mos_id} for d in ObjectData._get_collection().find( {"_id": { "$in": mos_id }}, { "_id": 1, "uplinks": 1 }): uplinks[d["_id"]] = d.get("uplinks", []) rld = load(mos_id) for mo in uplinks: for uplink in uplinks[mo]: if rld[mo]: if mo in links: links[mo] += [rld[mo][uplink]] else: links[mo] = [rld[mo][uplink]] for mo_bi in ifaces_metrics: mo_id = moss[int(mo_bi)] mo_ids = getattr(mo_id, "object_id") for i in ifaces_metrics[mo_bi]: if not exclude_zero: if (ifaces_metrics[mo_bi][i]["max_load_in"] == 0 and ifaces_metrics[mo_bi][i]["max_load_out"] == 0): continue if description: if description not in ifaces_metrics[mo_bi][i][ "description"]: continue if interface_profile: if interface_profile.name not in ifaces_metrics[mo_bi][i][ "profile"]: continue row2 = [ mo_ids, getattr(mo_id, "object_name"), getattr(mo_id, "object_address"), getattr(mo_id, "object_platform"), getattr(mo_id, "object_adm_domain"), getattr(mo_id, "object_segment"), getattr(mo_id, "object_container"), i, ifaces_metrics[mo_bi][i]["description"], ifaces_metrics[mo_bi][i]["bandwidth"], ifaces_metrics[mo_bi][i]["max_load_in"], ifaces_metrics[mo_bi][i]["max_load_in_time"], ifaces_metrics[mo_bi][i]["max_load_out"], ifaces_metrics[mo_bi][i]["max_load_out_time"], ifaces_metrics[mo_bi][i]["avg_load_in"], ifaces_metrics[mo_bi][i]["avg_load_out"], ifaces_metrics[mo_bi][i]["total_in"], ifaces_metrics[mo_bi][i]["total_out"], "", "", "", "", "", "", "", "", "", "", "", ] ss = True if mo_ids in links: for ifname_uplink in links[mo_ids]: if ifname_uplink in ifaces_metrics[mo_bi]: row2[18] = ifname_uplink row2[19] = ifaces_metrics[mo_bi][ifname_uplink][ "description"] row2[20] = ifaces_metrics[mo_bi][ifname_uplink][ "bandwidth"] row2[21] = ifaces_metrics[mo_bi][ifname_uplink][ "max_load_in"] row2[22] = ifaces_metrics[mo_bi][ifname_uplink][ "max_load_in_time"] row2[23] = ifaces_metrics[mo_bi][ifname_uplink][ "max_load_out"] row2[24] = ifaces_metrics[mo_bi][ifname_uplink][ "max_load_out_time"] row2[25] = ifaces_metrics[mo_bi][ifname_uplink][ "avg_load_in"] row2[26] = ifaces_metrics[mo_bi][ifname_uplink][ "avg_load_out"] row2[27] = ifaces_metrics[mo_bi][ifname_uplink][ "total_in"] row2[28] = ifaces_metrics[mo_bi][ifname_uplink][ "total_out"] r += [translate_row(row2, cmap)] ss = False if ss: r += [translate_row(row2, cmap)] filename = "metrics_detail_report_%s" % datetime.datetime.now( ).strftime("%Y%m%d") if o_format == "csv": response = HttpResponse(content_type="text/csv") response[ "Content-Disposition"] = 'attachment; filename="%s.csv"' % filename writer = csv.writer(response, dialect="excel", delimiter=",", quoting=csv.QUOTE_MINIMAL) writer.writerows(r) return response elif o_format == "csv_zip": response = BytesIO() f = TextIOWrapper(TemporaryFile(mode="w+b"), encoding="utf-8") writer = csv.writer(f, dialect="excel", delimiter=";", quotechar='"') writer.writerows(r) f.seek(0) with ZipFile(response, "w", compression=ZIP_DEFLATED) as zf: zf.writestr("%s.csv" % filename, f.read()) zf.filename = "%s.csv.zip" % filename # response = HttpResponse(content_type="text/csv") response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/zip") response[ "Content-Disposition"] = 'attachment; filename="%s.csv.zip"' % filename return response elif o_format == "xlsx": response = BytesIO() wb = xlsxwriter.Workbook(response) cf1 = wb.add_format({"bottom": 1, "left": 1, "right": 1, "top": 1}) ws = wb.add_worksheet("Metrics") max_column_data_length = {} for rn, x in enumerate(r): for cn, c in enumerate(x): if rn and (r[0][cn] not in max_column_data_length or len(str(c)) > max_column_data_length[r[0][cn]]): max_column_data_length[r[0][cn]] = len(str(c)) ws.write(rn, cn, c, cf1) ws.autofilter(0, 0, rn, cn) ws.freeze_panes(1, 0) for cn, c in enumerate(r[0]): # Set column width width = get_column_width(c) if enable_autowidth and width < max_column_data_length[c]: width = max_column_data_length[c] ws.set_column(cn, cn, width=width) wb.close() response.seek(0) response = HttpResponse(response.getvalue(), content_type="application/vnd.ms-excel") response[ "Content-Disposition"] = 'attachment; filename="%s.xlsx"' % filename response.close() return response
class InvApplication(ExtApplication): """ inv.inv application """ title = _("Inventory") menu = _("Inventory") # Undeletable nodes UNDELETABLE = { # Global Lost&Found "b0fae773-b214-4edf-be35-3468b53b03f2" } def __init__(self, *args, **kwargs): ExtApplication.__init__(self, *args, **kwargs) # Load plugins from .plugins.base import InvPlugin self.plugins = {} for f in os.listdir("services/web/apps/inv/inv/plugins/"): if not f.endswith(".py") or f == "base.py" or f.startswith("_"): continue mn = "noc.services.web.apps.inv.inv.plugins.%s" % f[:-3] m = __import__(mn, {}, {}, "*") for on in dir(m): o = getattr(m, on) if inspect.isclass(o) and issubclass( o, InvPlugin) and o.__module__.startswith(mn): assert o.name self.plugins[o.name] = o(self) def get_plugin_data(self, name): return {"name": name, "xtype": self.plugins[name].js} @view("^node/$", method=["GET"], access="read", api=True) def api_node(self, request): children = [] if request.GET and "node" in request.GET: container = request.GET["node"] if is_objectid(container): container = Object.get_by_id(container) if not container: return self.response_not_found() children = [ (o.name, o) for o in Object.objects.filter(container=container.id) ] # Collect inner connections children += [ (name, o) for name, o, _ in container.get_inner_connections() ] elif container == "root": cmodels = [ d["_id"] for d in ObjectModel._get_collection().find( {"data.container.container": True}, {"_id": 1}) ] children = [(o.name, o) for o in Object.objects.filter(__raw__={ "container": None, "model": { "$in": cmodels } })] else: return self.response_bad_request() r = [] # Build node interface for name, o in children: m_plugins = o.model.plugins or [] disabled_plugins = set(p[1:] for p in m_plugins if p.startswith("-")) n = { "id": str(o.id), "name": name, "plugins": [], "can_add": bool(o.get_data("container", "container")), "can_delete": str(o.model.uuid) not in self.UNDELETABLE, } if o.get_data("container", "container") or o.has_inner_connections(): # n["expanded"] = Object.objects.filter(container=o.id).count() == 1 n["expanded"] = False else: n["leaf"] = True if o.get_data("rack", "units"): n["plugins"] += [self.get_plugin_data("rack")] if o.model.connections: n["plugins"] += [self.get_plugin_data("inventory")] if o.get_data("geopoint", "layer"): n["plugins"] += [self.get_plugin_data("map")] if o.get_data("management", "managed_object"): n["plugins"] += [self.get_plugin_data("managedobject")] if o.get_data("contacts", "has_contacts"): n["plugins"] += [self.get_plugin_data("contacts")] # Append model's plugins for p in m_plugins: if not p.startswith("-"): n["plugins"] += [self.get_plugin_data(p)] n["plugins"] += [ self.get_plugin_data("data"), self.get_plugin_data("comment"), self.get_plugin_data("file"), self.get_plugin_data("log"), ] # Process disabled plugins n["plugins"] = [ p for p in n["plugins"] if p["name"] not in disabled_plugins ] r += [n] return r @view( "^add_group/$", method=["POST"], access="create_group", api=True, validate={ "container": ObjectIdParameter(required=False), "type": ObjectIdParameter(), "name": UnicodeParameter(), "serial": UnicodeParameter(required=False), }, ) def api_add_group(self, request, type, name, container=None, serial=None): if is_objectid(container): c = Object.get_by_id(container) if not c: return self.response_not_found() c = c.id elif container: return self.response_bad_request() else: c = None m = ObjectModel.get_by_id(type) if not m: return self.response_not_found() o = Object(name=name, model=m, container=c) if serial and m.get_data("asset", "part_no0"): o.set_data("asset", "serial", serial) o.save() o.log("Created", user=request.user.username, system="WEB", op="CREATE") return str(o.id) @view( "^remove_group/$", method=["DELETE"], access="remove_group", api=True, validate={"container": ObjectIdParameter(required=True)}, ) def api_remove_group(self, request, container=None): c = self.get_object_or_404(Object, id=container) c.delete() return True @view( "^insert/$", method=["POST"], access="reorder", api=True, validate={ "container": ObjectIdParameter(required=False), "objects": ListOfParameter(element=ObjectIdParameter()), "position": StringParameter(), }, ) def api_insert(self, request, container, objects, position): """ :param request: :param container: ObjectID after/in that insert :param objects: List ObjectID for insert :param position: 'append', 'before', 'after' :return: """ c = self.get_object_or_404(Object, id=container) o = [] for r in objects: o += [self.get_object_or_404(Object, id=r)] if position == "append": for x in o: x.put_into(c) elif position in ("before", "after"): cc = self.get_object_or_404( Object, id=c.container.id) if c.container else None for x in o: x.put_into(cc) return True @view("^(?P<id>[0-9a-f]{24})/path/$", method=["GET"], access="read", api=True) def api_get_path(self, request, id): o = self.get_object_or_404(Object, id=id) path = [{"id": str(o.id), "name": o.name}] while o.container: o = o.container path.insert(0, {"id": str(o.id), "name": o.name}) return path @view( "^crossing_proposals/$", method=["GET"], access="read", api=True, validate={ "o1": ObjectIdParameter(required=True), "o2": ObjectIdParameter(required=False), "left_filter": UnicodeParameter(required=False), "right_filter": UnicodeParameter(required=False), "cable_filter": UnicodeParameter(required=False), }, ) def api_get_crossing_proposals( self, request, o1, o2=None, left_filter: Optional[str] = None, right_filter: Optional[str] = None, cable_filter: Optional[str] = None, ): """ API for connnection form. 1) If cable_filter set, checked connection capable with cable. 2) If left_filter set, check renmote object :param request: :param o1: :param o2: :param left_filter: :param right_filter: :param cable_filter: :return: """ self.logger.info( "Crossing proposals: %s:%s, %s:%s. Cable: %s", o1, left_filter, o2, right_filter, cable_filter, ) lo: Object = self.get_object_or_404(Object, id=o1) ro: Optional[Object] = None if o2: ro = self.get_object_or_404(Object, id=o2) lcs: List[Dict[str, Any]] = [] cable: Optional[ObjectModel] = None # Getting cable cables = ObjectModel.objects.filter(data__length__length__gte=0) if cable_filter: cable = ObjectModel.get_by_name(cable_filter) for c in lo.model.connections: valid, disable_reason = True, "" if cable_filter: # If select cable_filter - check every connection to cable cable_connections = [ c for c in lo.model.get_connection_proposals(c.name) if c[0] == cable.id ] valid = bool(cable_connections) elif ro and right_filter: rc = ro.model.get_model_connection(right_filter) if not rc: raise valid, disable_reason = lo.model.check_connection(c, rc) elif ro: valid = bool([ c for c in lo.model.get_connection_proposals(c.name) if c[0] == ro.model.id ]) oc, oo, _ = lo.get_p2p_connection(c.name) lcs += [{ "name": c.name, "type": str(c.type.id), "type__label": c.type.name, "gender": c.gender, "direction": c.direction, "protocols": c.protocols, "free": not bool(oc), "valid": valid, "disable_reason": disable_reason, }] rcs: List[Dict[str, Any]] = [] if ro: for c in ro.model.connections: valid, disable_reason = True, "" if cable_filter: cable_connections = [ c for c in ro.model.get_connection_proposals(c.name) if c[0] == cable.id ] valid = bool(cable_connections) elif left_filter: lc = lo.model.get_model_connection(left_filter) if not lc: raise valid, disable_reason = lo.model.check_connection(c, lc) else: valid = bool([ c for c in ro.model.get_connection_proposals(c.name) if c[0] == lo.model.id ]) oc, oo, _ = ro.get_p2p_connection(c.name) rcs += [{ "name": c.name, "type": str(c.type.id), "type__label": c.type.name, "gender": c.gender, "direction": c.direction, "protocols": c.protocols, "free": not bool(oc), "valid": valid, "disable_reason": disable_reason, }] # Forming cable return { "left": { "connections": lcs }, "right": { "connections": rcs }, "cable": [{ "name": c.name, "available": True } for c in cables], "valid": lcs and rcs and left_filter and right_filter, } @view( "^connect/$", method=["POST"], access="connect", api=True, validate={ "object": ObjectIdParameter(required=True), "name": StringParameter(required=True), "remote_object": ObjectIdParameter(required=True), "remote_name": StringParameter(required=True), # "cable": ObjectIdParameter(required=False), "cable": StringParameter(required=False), "reconnect": BooleanParameter(default=False, required=False), }, ) def api_connect( self, request, object, name, remote_object, remote_name, cable: Optional[str] = None, reconnect=False, ): lo: Object = self.get_object_or_404(Object, id=object) ro: Object = self.get_object_or_404(Object, id=remote_object) cable_o: Optional[Object] = None if cable: cable = ObjectModel.get_by_name(cable) cable_o = Object( name="Wire %s:%s <-> %s:%s" % (lo.name, name, ro.name, remote_name), model=cable, container=lo.container.id, ) cable_o.save() print(lo, ro, cable_o) try: if cable_o: c1, c2 = cable_o.model.connections[:2] self.logger.debug("Wired connect c1:c2", c1, c2) lo.connect_p2p(name, cable_o, c1.name, {}, reconnect=reconnect) ro.connect_p2p(remote_name, cable_o, c2.name, {}, reconnect=reconnect) lo.save() ro.save() else: lo.connect_p2p(name, ro, remote_name, {}, reconnect=reconnect) except ConnectionError as e: self.logger.warning("Connection Error: %s", str(e)) return self.render_json({"status": False, "text": str(e)}) return True
# NOC modules from error import ModelDataError from noc.lib.utils import deep_copy from noc.lib.escape import json_escape as q from noc.sa.interfaces.base import (StringParameter, BooleanParameter, FloatParameter, IntParameter, StringListParameter) id_lock = Lock() T_MAP = { "str": StringParameter(), "int": IntParameter(), "float": FloatParameter(), "bool": BooleanParameter(), "strlist": StringListParameter() } A_TYPE = ["str", "int", "float", "bool", "objectid", "ref", "strlist"] class ModelInterfaceAttr(EmbeddedDocument): meta = { "strict": False, "auto_create_index": False } name = StringField() type = StringField(choices=[(t, t) for t in A_TYPE]) description = StringField() required = BooleanField(default=False)
def test_boolean_parameter(raw, config, expected): assert BooleanParameter(**config).clean(raw) == expected
class ValidationPolicySettingsApplication(ExtDocApplication): """ ValidationPolicySettings application """ title = _("Validation Policy Settings") model = ValidationPolicySettings MODEL_SCOPES = { "sa.ManagedObject": 2, "sa.ManagedObjectProfile": 2, "inv.Interface": 2, "inv.InterfaceProfile": 2, } @view( "^(?P<model_id>[^/]+)/(?P<object_id>[^/]+)/settings/$", method=["GET"], access="read", api=True, ) def api_get_settings(self, request, model_id, object_id): if model_id not in self.MODEL_SCOPES: return self.response_not_found("Invalid model") o = ValidationPolicySettings.objects.filter( model_id=model_id, object_id=object_id).first() if o: # Policy settings return [{ "policy": str(p.policy.id), "policy__label": p.policy.name, "is_active": p.is_active, } for p in o.policies] else: return {} @view( "^(?P<model_id>[^/]+)/(?P<object_id>[^/]+)/settings/$", method=["POST"], access="read", api=True, validate={ "policies": DictListParameter( attrs={ "policy": DocumentParameter(ValidationPolicy), "is_active": BooleanParameter(), }) }, ) def api_save_settings(self, request, model_id, object_id, policies): def save_settings(o): o.save() return self.response({"status": True}, self.OK) o = ValidationPolicySettings.objects.filter( model_id=model_id, object_id=object_id).first() seen = set() ps = [] for p in policies: if p["policy"].id in seen: continue ps += [ ValidationPolicyItem(policy=p["policy"], is_active=p["is_active"]) ] seen.add(p["policy"].id) if o: o.policies = ps else: o = ValidationPolicySettings(model_id=model_id, object_id=object_id, policies=ps) self.submit_slow_op(request, save_settings, o)
def test_boolean_parameter_error(raw, config): with pytest.raises(InterfaceTypeError): assert BooleanParameter(**config).clean(raw)
class MetricSettingsApplication(ExtDocApplication): """ MetricSettings application """ title = "Metric Settings" model = MetricSettings @view("^(?P<model_id>[^/]+)/(?P<object_id>[^/]+)/settings/$", method=["GET"], access="read", api=True) def api_get_settings(self, request, model_id, object_id): o = MetricSettings.objects.filter(model_id=model_id, object_id=object_id).first() if o: return [{ "metric_set": str(ms.metric_set.id), "metric_set__label": ms.metric_set.name, "is_active": ms.is_active } for ms in o.metric_sets] else: return [] @view("^(?P<model_id>[^/]+)/(?P<object_id>[^/]+)/settings/$", method=["POST"], access="read", api=True, validate={ "metric_sets": DictListParameter( attrs={ "metric_set": DocumentParameter(MetricSet), "is_active": BooleanParameter() }) }) def api_save_settings(self, request, model_id, object_id, metric_sets): def save_settings(o): o.save() return self.response({"status": True}, self.OK) o = MetricSettings.objects.filter(model_id=model_id, object_id=object_id).first() seen = set() mset = [] for ms in metric_sets: if ms["metric_set"].id in seen: continue mset += [ MetricSettingsItem(metric_set=ms["metric_set"], is_active=ms["is_active"]) ] seen.add(ms["metric_set"].id) if o: o.metric_sets = mset else: o = MetricSettings(model_id=model_id, object_id=object_id, metric_sets=mset) self.submit_slow_op(request, save_settings, o) @view("^(?P<model_id>[^/]+)/(?P<object_id>[^/]+)/effective/trace/$", method=["GET"], access="read", api=True) def api_trace_effective(self, request, model_id, object_id): o = MetricSettings(model_id=model_id, object_id=object_id).get_object() if not o: return self.response_not_found() r = [] for es in MetricSettings.get_effective_settings(o, trace=True, recursive=True): for m in es.metrics: r += [{ "metric": m.metric or None, "metric_type": m.metric_type.name, "is_active": es.is_active, "probe": es.probe.name if es.probe else None, "interval": es.interval if es.interval else None, "thresholds": m.thresholds, "handler": es.handler, "config": es.config, "errors": es.errors, "traces": es.traces }] return r
def cleaned_query(self, q): nq = {} q = q.copy() # Extract IN # extjs not working with same parameter name in query for p in list(q.keys()): if p.endswith(self.in_param): match = self.rx_oper_splitter.match(p) if match: field = self.rx_oper_splitter.match(p).group("field") + self.in_param if field not in q: q[field] = "%s" % (q[p]) else: q[field] += ",%s" % (q[p]) del q[p] for p in q: if p.endswith("__exists"): v = BooleanParameter().clean(q[p]) nq[p.replace("__exists", "__isnull")] = not v continue if "__" in p: np, lt = p.split("__", 1) else: np, lt = p, None # Skip ignored params if np in self.ignored_params or p in ( self.limit_param, self.page_param, self.start_param, self.format_param, self.sort_param, self.group_param, self.query_param, self.only_param, ): continue v = q[p] if self.in_param in p: v = v.split(",") if v == "\x00": v = None # Pass through interface cleaners if lt == "referred": # Unroll __referred app, fn = v.split("__", 1) model = self.site.apps[app].model if not is_document(model): extra_where = '%s."%s" IN (SELECT "%s" FROM %s)' % ( self.model._meta.db_table, self.model._meta.pk.name, model._meta.get_field(fn).attname, model._meta.db_table, ) if None in nq: nq[None] += [extra_where] else: nq[None] = [extra_where] continue elif lt and hasattr(self, "lookup_%s" % lt): # Custom lookup getattr(self, "lookup_%s" % lt)(nq, np, v) continue elif np in self.fk_fields and lt: # dereference try: nq[np] = self.fk_fields[np].objects.get(**{lt: v}) except self.fk_fields[np].DoesNotExist: nq[np] = 0 # False search continue elif np in self.clean_fields and self.in_param in p: v = ListOfParameter(self.clean_fields[np]).clean(v) elif np in self.clean_fields: # @todo: Check for valid lookup types v = self.clean_fields[np].clean(v) # Write back nq[p] = v return nq
class WorkflowApplication(ExtDocApplication): """ Workflow application """ title = _("Workflows") menu = [_("Setup"), _("Workflow")] model = Workflow NEW_ID = "000000000000000000000000" @view(r"^(?P<id>[0-9a-f]{24})/config/", method=["GET"], access="write", api=True) def api_get_config(self, request, id): wf = self.get_object_or_404(Workflow, id=id) r = { "id": str(wf.id), "name": wf.name, "is_active": wf.is_active, "description": wf.description, "states": [], "transitions": [], } for state in State.objects.filter(workflow=wf.id): sr = { "id": str(state.id), "name": state.name, "description": state.description, "is_default": state.is_default, "is_productive": state.is_productive, "update_last_seen": state.update_last_seen, "ttl": state.ttl, "update_expired": state.update_expired, "on_enter_handlers": state.on_enter_handlers, "job_handler": state.job_handler, "on_leave_handlers": state.on_leave_handlers, "bi_id": str(state.bi_id) if state.bi_id else None, "x": state.x, "y": state.y, } r["states"] += [sr] for t in Transition.objects.filter(workflow=wf.id): tr = { "id": str(t.id), "from_state": t.from_state.name, "to_state": t.to_state.name, "is_active": t.is_active, "event": t.event, "label": t.label, "description": t.description, "enable_manual": t.enable_manual, "handlers": t.handlers, "vertices": [{ "x": v.x, "y": v.y } for v in t.vertices], "bi_id": str(t.bi_id) if t.bi_id else None, } r["transitions"] += [tr] return r @view( r"^(?P<id>[0-9a-f]{24})/config/", method=["POST"], access="write", api=True, validate={ "name": StringParameter(), "description": StringParameter(default=""), "is_active": BooleanParameter(default=False), "states": DictListParameter( attrs={ "id": StringParameter(default=""), "name": StringParameter(), "description": StringParameter(default=""), "is_default": BooleanParameter(default=False), "is_productive": BooleanParameter(default=False), "update_last_seen": BooleanParameter(default=False), "ttl": IntParameter(default=0), "update_expired": BooleanParameter(default=False), "on_enter_handlers": StringListParameter(), "job_handler": StringParameter(required=False), "on_leave_handlers": StringListParameter(), "x": IntParameter(), "y": IntParameter(), }), "transitions": DictListParameter( attrs={ "id": StringParameter(default=""), "from_state": StringParameter(), "to_state": StringParameter(), "is_active": BooleanParameter(default=False), "event": StringParameter(), "label": StringParameter(), "description": StringParameter(default=""), "enable_manual": BooleanParameter(), "handlers": StringListParameter(), "vertices": DictListParameter(attrs={ "x": IntParameter(), "y": IntParameter() }), }), }, ) def api_save_config(self, request, id, name, description, states, transitions, **kwargs): if id == self.NEW_ID: wf = Workflow() else: wf = self.get_object_or_404(Workflow, id=id) # Update workflow wf.name = name wf.description = description wf.save() # Get current state current_states = {} # str(id) -> state for st in State.objects.filter(workflow=wf.id): current_states[str(st.id)] = st # Synchronize states seen_states = set() state_names = {} # name -> state for s in states: state = None if s["id"]: # Existing state seen_states.add(s["id"]) state = current_states.get(s["id"]) if not hasattr(s, "workflow"): s["workflow"] = wf.id # Update state attributes if not state: state = State() changed = True else: changed = False for k in s: if k in ("id", "bi_id"): continue if getattr(state, k) != s[k]: setattr(state, k, s[k]) changed = True if changed: state.save() state_names[state.name] = state # Get current transitions current_transitions = {} # str(id) -> transition for ct in Transition.objects.filter(workflow=wf.id): current_transitions[str(ct.id)] = ct # Synchronize transitions seen_transitions = set() for t in transitions: transition = None if t["id"]: # Existing transitions seen_transitions.add(t["id"]) transition = current_transitions.get(t["id"]) # Update transition attributes if not transition: transition = Transition(workflow=wf) changed = True else: changed = False for k in t: if k in ("id", "bi_id"): continue elif k in ("from_state", "to_state"): t[k] = state_names[t[k]] elif k == "vertices": t[k] = [ TransitionVertex(x=vx["x"], y=vx["y"]) for vx in t[k] ] old = getattr(transition, k) if old != t[k]: setattr(transition, k, t[k]) changed = True if changed: transition.save() # Delete hanging transitions for tid in set(current_transitions) - seen_transitions: current_transitions[tid].delete() # Delete hanging state for sid in set(current_states) - seen_states: current_states[sid].delete() rx_clone_name = re.compile(r"\(Copy #(\d+)\)$") @view(r"^(?P<id>[0-9a-f]{24})/clone/", method=["POST"], access="write", api=True) def api_clone(self, request, id): wf = self.get_object_or_404(Workflow, id=id) # Get all clone names m = 0 for d in Workflow._get_collection().find( { "name": { "$regex": re.compile( r"^%s\(Copy #\d+\)$" % re.escape(wf.name)) } }, { "_id": 0, "name": 1 }, ): match = self.rx_clone_name.search(d["name"]) if match: n = int(match.group(1)) if n > m: m = n # Generate name name = "%s (Copy #%d)" % (wf.name, m + 1) # Clone workflow new_wf = deepcopy(wf) new_wf.name = name new_wf.id = None new_wf.bi_id = None new_wf.save() # Clone states smap = {} # old id -> new state for state in State.objects.filter(workflow=wf.id): new_state = deepcopy(state) new_state.workflow = new_wf new_state.id = None new_state.bi_id = None new_state.save() smap[state.id] = new_state # Clone transitions for transition in Transition.objects.filter(workflow=wf.id): new_transition = deepcopy(transition) new_transition.workflow = new_wf new_transition.from_state = smap[transition.from_state.id] new_transition.to_state = smap[transition.to_state.id] new_transition.id = None new_transition.bi_id = None new_transition.save() # return {"id": str(new_wf.id)}