def get_containers_by_root(root_id=None): """ Getting all containers from root object # @todo containers only with coordinates (Filter by models) # @todo containers only # from noc.sa.models.managedobject import ManagedObject # from noc.inv.models.object import Object # If None - all objects """ root = Object.get_by_id(root_id) coll = Object._get_collection().with_options( read_preference=ReadPreference.SECONDARY_PREFERRED) work_set = {root.id} os = set() kk = None for r in range(1, 9): work_set = set( o["_id"] for o in coll.find({"container": { "$in": list(work_set) }}, {"_id": 1})) # work_set |= work_set.union(os) os |= work_set if len(work_set) == kk: break kk = len(work_set) return os
def connect_p2p(self, o1: Object, c1: str, o2: Object, c2: str): """ Create P2P connection o1:c1 - o2:c2 """ try: cn = o1.connect_p2p(c1, o2, c2, {}, reconnect=True) if cn: o1.log( "Connect %s -> %s:%s" % (c1, o2, c2), system="DISCOVERY", managed_object=self.object, op="CONNECT", ) o2.log( "Connect %s -> %s:%s" % (c2, o1, c1), system="DISCOVERY", managed_object=self.object, op="CONNECT", ) c_name = o2.model.get_model_connection(c2) # If internal_name use if (o2, c_name.name, o1, c1) in self.to_disconnect: # Remove if connection on system self.to_disconnect.remove((o2, c_name.name, o1, c1)) except ConnectionError as e: self.logger.error("Failed to connect: %s", e)
def fix(): for d in Object._get_collection().find( {"data.geopoint.x": { "$exists": True }}, {"_id": 1}): o = Object.get_by_id(d["_id"]) o.save()
def connect_twinax(self, o1: Object, c1: str, o2: Object, c2: str): """ Connect twinax object o1 and virtual connection c1 to o2:c2 """ free_connections = [] # Resolve virtual name c1 to real connection r_names = [ o1.get_data("twinax", "connection%d" % i) for i in range(1, 3) ] # Check connection is already exists for n in r_names: cn, o, c = o1.get_p2p_connection(n) if not cn: free_connections += [n] continue if o.id == o2.id and c == c2: # Already connected return # Check twinax has free connection if not free_connections: self.logger.error("Twinax has no free connections") return # Connect first free to o2:c2 c = free_connections[0] self.logger.info("Using twinax connection '%s' instead of '%s'", c, c1) self.connect_p2p(o1, c, o2, c2)
def get_data(self, request, sel=None): qs = ManagedObject.objects if not request.user.is_superuser: qs = ManagedObject.objects.filter( administrative_domain__in=UserAccess.get_domains(request.user)) # Get all managed objects by selector mos_list = qs.filter(sel.Q) columns = [ _("Managed Objects"), _("Address"), _("Vendor"), _("Platform"), _("HW Revision"), _("SW Version"), _("Serial"), ] data = [] for mo in mos_list: q = Object._get_collection().count_documents( {"data.management.managed_object": { "$in": [mo.id] }}) if q == 0: data += [[ mo.name, mo.address, mo.vendor or None, mo.platform.full_name if mo.platform else None, mo.get_attr("HW version") or None, mo.version.version if mo.version else None, mo.get_attr("Serial Number") or None, None, ]] else: for x in Object._get_collection().find( {"data.management.managed_object": { "$in": [mo.id] }}): data += [[ x["name"], mo.address, mo.vendor or None, mo.platform.full_name if mo.platform else None, mo.get_attr("HW version") or None, mo.version.version if mo.version else None, x["data"]["asset"]["serial"], ]] return self.from_dataset(title=self.title, columns=columns, data=data, enumerate=True)
def fix(): for d in Object._get_collection().find( {"data": { "$elemMatch": { "interface": "geopoint", "attr": "x" } }}, {"_id": 1}): o = Object.get_by_id(d["_id"]) o.save()
def update_name(self, object: Object): n = self.get_name(object, self.object) if n and n != object.name: object.name = n self.logger.info("Changing name to '%s'", n) object.save() object.log( "Change name to '%s'" % n, system="DISCOVERY", managed_object=self.object, op="CHANGE", )
def _get_asset(o: Object): def get_asset_data(data): rd = {} for d in data: if d.interface not in rd: rd[d.interface] = {} rd[d.interface][d.attr] = d.value return rd rev = o.get_data("asset", "revision") if rev == "None": rev = "" r = { "id": str(o.id), "model": { "id": str(o.model.id), "name": str(o.model.name), "vendor": {"id": str(o.model.vendor.id), "name": str(o.model.vendor.name)}, "labels": [str(t) for t in o.model.labels or []], # Alias "tags": [str(t) for t in o.model.labels or []], }, "serial": o.get_data("asset", "serial") or "", "revision": rev, "data": get_asset_data(o.data), "slots": [], } if_map = {c.name: c.interface_name for c in o.connections if c.interface_name} for n in o.model.connections: if n.direction == "i": c, r_object, _ = o.get_p2p_connection(n.name) r["slots"] += [ { "name": n.name, "direction": n.direction, "protocols": [str(p) for p in n.protocols], } ] if c: r["slots"][-1]["asset"] = ManagedObjectDataStream._get_asset(r_object) elif n.direction == "s": r["slots"] += [ { "name": n.name, "direction": n.direction, "protocols": [str(p) for p in n.protocols], } ] if n.name in if_map: r["slots"][-1]["interface"] = if_map[n.name] return r
def api_create(self, request, model=None, name=None, srid=None, x=None, y=None): # Find suitable container to_pop = model.name == "Ducts | Cable Entry" p = (x, y, srid) if to_pop: # Cable entries are attached to nearest PoP pop_layers = list(Layer.objects.filter(code__startswith="pop_")) np, npd = map.find_nearest_d(p, pop_layers) else: # Or to the objects on same layer layer = Layer.objects.get(code=model.get_data("geopoint", "layer")) np, npd = map.find_nearest_d(p, layer) # Check nearest area layer = Layer.objects.get(code="areas") ap, apd = map.find_nearest_d(p, layer) if ap and (not np or apd < npd): np, npd = ap, apd # Check nearest city layer = Layer.objects.get(code="cities") ap, apd = map.find_nearest_d(p, layer) if ap and (not np or apd < npd): np, npd = ap, apd # Get best nearest container if to_pop and np.layer.code.startswith("pop_"): container = np.id else: container = np.container # Create object o = Object( name=name, model=model, container=container, data=[ ObjectAttr(scope="", interface="geopoint", attr="srid", value=srid), ObjectAttr(scope="", interface="geopoint", attr="x", value=x), ObjectAttr(scope="", interface="geopoint", attr="y", value=y), ], ) o.save() return {"id": str(o.id)}
def location(self, id): """ Return geo address for Managed Objects """ def chunkIt(seq, num): avg = len(seq) / float(num) out = [] last = 0.0 while last < len(seq): out.append(seq[int(last):int(last + avg)]) last += avg return out location = [] address = Object.get_by_id(id).get_address_text() if address: for res in address.split(","): adr = normalize_division(res.strip().decode("utf-8").lower()) if None in adr and "" in adr: continue if None in adr: location += [adr[1].title().strip()] else: location += [' '.join(adr).title().strip()] res = chunkIt(location, 2) location_1 = ", ".join(res[0]) location_2 = ", ".join(res[1]) return [location_1, location_2] else: return ["", ""]
def extract(self): o = Object._get_collection().with_options( read_preference=ReadPreference.SECONDARY_PREFERRED) for obj in o.find( {}, { "_id": 1, "bi_id": 1, "name": 1, "container": 1, "data.address.text": 1 }, no_cursor_timeout=True, ): address = [ a for a in obj["data"] if a and a["interface"] == "address" and a["attr"] == "text" ] yield ( obj["bi_id"], obj["_id"], obj.get("name", ""), bi_hash(obj["container"]) if obj.get("container") else "", address[0] if address else "", )
def f_object_location(cls, object): """ Returns managed object location """ from noc.inv.models.object import Object if not object.container: metrics["error", ("type", "no_such_container")] += 1 return _("N/A") path = [] c = object.container while c: if "address" in c.data: if c.data["address"]["text"]: path += [c.data["address"]["text"]] break if c.name: path += [c.name] c = c.container if c: c = Object.get_by_id(c.id) if not path: metrics["error", ("type", "no_such_path")] += 1 return _("N/A") return ", ".join(reversed(path))
def get_layer_objects(self, layer, x0, y0, x1, y1, srid): """ Extract GeoJSON from bounding box """ lr = Layer.get_by_code(layer) if not lr: return {} bbox = self.get_bbox(x0, y0, x1, y1, srid) features = [ geojson.Feature( id=str(d["_id"]), geometry=self.transform(d["point"], self.db_proj, srid), properties={ "object": str(d["_id"]), "label": d.get("name", "") }, ) for d in Object._get_collection().find( { "layer": lr.id, "point": { "$geoWithin": { "$geometry": bbox } } }, { "_id": 1, "point": 1, "label": 1 }, ) ] return geojson.FeatureCollection(features=features, crs=srid)
def get_container(self, path): if path not in self.containers: pp = path.split(" | ") # Find object c = Object.get_by_path(pp, hints=self.mappings) if c: self.containers[path] = c else: self.logger.debug("Create container: %s", path) if " | " in path: parent = self.get_container(path.rsplit(" | ", 1)[0]) else: parent = None self.containers[path] = Object( name=pp[-1], container=parent.id if parent else None, model=self.get_model(self.CONTAINER_MODEL)) self.containers[path].save() return self.containers[path]
def api_add_group(self, request, type, name, container=None, serial=None): if is_objectid(container): c = Object.get_by_id(container) if not c: return self.response_not_found() c = c.id elif container: return self.response_bad_request() m = ObjectModel.get_by_id(type) if not m: return self.response_not_found() o = Object(name=name, model=m, container=c) if serial and m.get_data("asset", "part_no0"): o.set_data("asset", "serial", serial) o.save() o.log("Created", user=request.user.username, system="WEB", op="CREATE") return str(o.id)
def sync_object(cls, obj: Object) -> None: """ Synchronize sensors with object model :param obj: :return: """ # Get existing sensors obj_sensors: Dict[str, Sensor] = { s.local_id: s for s in Sensor.objects.filter(object=obj.id) } m_proto = [ d.value for d in obj.get_effective_data() if d.interface == "modbus" and d.attr == "type" ] or ["rtu"] # Create new sensors for sensor in obj.model.sensors: if sensor.name in obj_sensors: obj_sensors[sensor.name].seen("objectmodel") del obj_sensors[sensor.name] continue # logger.info("[%s|%s] Creating new sensor '%s'", obj.name if obj else "-", "-", sensor.name) s = Sensor( profile=SensorProfile.get_default_profile(), object=obj, local_id=sensor.name, units=sensor.units, label=sensor.description, ) # Get sensor protocol if sensor.modbus_register: if not m_proto: continue s.protocol = "modbus_%s" % m_proto[0].lower() s.modbus_register = sensor.modbus_register elif sensor.snmp_oid: s.protocol = "snmp" s.snmp_oid = sensor.snmp_oid else: logger.info( "[%s|%s] Unknown sensor protocol '%s'", obj.name if obj else "-", "-", sensor.name, ) s.save() s.seen("objectmodel") # Notify missed sensors for s in sorted(obj_sensors): sensor = obj_sensors[s] sensor.unseen(source="objectmodel")
def get_name(obj: Object, managed_object: Optional[Any] = None) -> str: """ Generate discovered object's name """ name = None if managed_object: name = managed_object.name sm = obj.get_data("stack", "member") if sm is not None: # Stack member name += "#%s" % sm return name
def merge_data(self, o: ObjectM, data: List[Dict[str, Any]]): # @todo add test to merge_data r = {(attr.interface, attr.attr, attr.scope): attr.value for attr in o.data if attr.scope} self.logger.debug("Merge data object: %s, Data: %s", o.data, data) for d in data: k = (d["interface"], d["attr"], d["scope"]) if k in r and d["value"] == r[k]: r.pop(k) continue if k in r: self.logger.debug("[%s] Change data: %e", o, d) r.pop(k) o.set_data( interface=d["interface"], key=d["attr"], value=d["value"], scope=d.get("scope", self.system.name), ) else: self.logger.debug("[%s] Set new: %s", o, d) o.set_data( interface=d["interface"], key=d["attr"], value=d["value"], scope=d.get("scope", self.system.name), ) for d in r: self.logger.debug("[%s] Reset data: %s", o, d) o.reset_data(*d)
def api_add_group(self, request, type, name, container=None, serial=None): if container is None: c = self.get_root() else: c = self.get_object_or_404(Object, id=container) m = self.get_object_or_404(ObjectModel, id=type) o = Object(name=name, model=m, container=c.id) if serial and m.get_data("asset", "part_no0"): o.set_data("asset", "serial", serial) o.save() o.log("Created", user=request.user.username, system="WEB", op="CREATE") return str(o.id)
def api_connect( self, request, object, name, remote_object, remote_name, cable: Optional[str] = None, reconnect=False, ): lo: Object = self.get_object_or_404(Object, id=object) ro: Object = self.get_object_or_404(Object, id=remote_object) cable_o: Optional[Object] = None if cable: cable = ObjectModel.get_by_name(cable) cable_o = Object( name="Wire %s:%s <-> %s:%s" % (lo.name, name, ro.name, remote_name), model=cable, container=lo.container.id, ) cable_o.save() print(lo, ro, cable_o) try: if cable_o: c1, c2 = cable_o.model.connections[:2] self.logger.debug("Wired connect c1:c2", c1, c2) lo.connect_p2p(name, cable_o, c1.name, {}, reconnect=reconnect) ro.connect_p2p(remote_name, cable_o, c2.name, {}, reconnect=reconnect) lo.save() ro.save() else: lo.connect_p2p(name, ro, remote_name, {}, reconnect=reconnect) except ConnectionError as e: self.logger.warning("Connection Error: %s", str(e)) return self.render_json({"status": False, "text": str(e)}) return True
def get_sensor_metrics(self): metrics = [] o = Object.get_managed(self.object).values_list("id") for s in (Sensor._get_collection().with_options( read_preference=ReadPreference.SECONDARY_PREFERRED).find( { "object": { "$in": list(o) }, "snmp_oid": { "$exists": True } }, { "local_id": 1, "profile": 1, "state": 1, "snmp_oid": 1, "labels": 1, "bi_id": 1 }, )): if not s.get("profile"): self.logger.debug("[%s] Sensor has no profile. Skipping", s["name"]) continue pm: "SensorProfile" = SensorProfile.get_by_id(s["profile"]) if not pm.enable_collect: continue state = State.get_by_id(s["state"]) if not state.is_productive: self.logger.debug( "[%s] Sensor is not productive state. Skipping", s["name"]) continue for mtype in ["Sensor | Value", "Sensor | Status"]: m_id = next(self.id_count) metric = MetricType.get_by_name(mtype) labels = [f'noc::sensor::{s["local_id"]}'] + s.get( "labels", []) metrics += [{ "id": m_id, "metric": metric.name, "labels": labels, "oid": s["snmp_oid"], }] self.id_metrics[m_id] = MetricConfig(metric, False, True, True, None) self.sensors_metrics[m_id] = int(s["bi_id"]) return metrics
def extract(self): o = Object._get_collection().with_options( read_preference=ReadPreference.SECONDARY_PREFERRED) for obj in o.find({}, { "_id": 1, "bi_id": 1, "name": 1, "container": 1, "data.address.text": 1 }, no_cursor_timeout=True): data = obj.get("data", {}) yield (obj["bi_id"], obj["_id"], obj.get("name", ""), bi_hash(obj["container"]) if obj.get("container") else "", data["address"].get("text", "") if data and "address" in data else "")
def get_linked_pops(self): linked = set() self_objects = set(self.get_pop_objects()) self_interfaces = set(i.id for i in Interface.objects.filter(managed_object__in=self_objects)) for l in Link.objects.filter(interfaces__in=self_interfaces): ri = (i for i in l.interfaces if i.id not in self_interfaces) if ri: # Remote link ro = set() for i in ri: ro.add(i.managed_object.id) if len(ro) == 1 and ro: for o in Object.get_managed(ro.pop()): pop = o.get_pop() if pop and pop not in linked: linked.add(pop) return linked
def fix(): # check "Lost&Found" container logging.info("Checking Lost&Found object") lostfound_model = ObjectModel.objects.get( uuid="b0fae773-b214-4edf-be35-3468b53b03f2") lf = Object.objects.filter(model=lostfound_model.id).count() if lf == 0: # Create missed "Lost&Found" logging.info(" ... creating missed Lost&Found") Object(model=lostfound_model.id, name="Global Lost&Found", container=None).save() elif lf == 1: logging.info("Global Lost&Found object found") # OK # check container if Object.objects.get(name="Global Lost&Found").container: logging.info( "Global Lost&Found object not valid container - fix") # fix o = Object.objects.get(name="Global Lost&Found") o.container = None o.save() else: logging.info("Global Lost&Found object container is valid") else: logging.info("Global Lost&Found object found greater that one!!!!") # merge Lost&found lfs = Object.objects.filter(model=lostfound_model.id).order_by("id") l0 = lfs[0] for l in lfs[1:]: for ls in Object.objects.filter(container=l.id): logging.info(" ... moving %s to primary Lost&Found", unicode(ls)) ls.container = l0 ls.save() logging.info(" ... removing duplicated Lost&Found %s", l) l.delete() if Object.objects.get(name="Global Lost&Found").container: logging.info( "Global Lost&Found object not valid container - fix") # fix o = Object.objects.get(name="Global Lost&Found") o.container = None o.save() else: logging.info("Global Lost&Found object container is valid")
def get_data(self, **kwargs): self.model_name = {} # oid -> name data = list(Object._get_collection().aggregate([{ "$group": { "_id": "$model", "total": { "$sum": 1 } } }])) oms = [x["_id"] for x in data if x["_id"]] c = ObjectModel._get_collection() om_names = {} while oms: chunk, oms = oms[:500], oms[500:] om_names.update({ x["_id"]: x["name"] for x in c.find({"_id": { "$in": chunk }}, { "_id": 1, "name": 1 }) }) data = sorted( ([om_names[x["_id"]], x["total"]] for x in data if x["_id"] in om_names), key=lambda x: -x[1], ) return self.from_dataset( title=self.title, columns=[ "Model", TableColumn("Count", format="numeric", align="right", total="sum") ], data=data, enumerate=True, )
def cleaned_query(self, q): geoaddr = q.pop("__geoaddress", None) if "administrative_domain" in q: ad = AdministrativeDomain.get_nested_ids( int(q["administrative_domain"])) if ad: del q["administrative_domain"] else: ad = None if "selector" in q: s = self.get_object_or_404(ManagedObjectSelector, id=int(q["selector"])) del q["selector"] else: s = None r = super().cleaned_query(q) if s: r["id__in"] = ManagedObject.objects.filter(s.Q) if ad: r["administrative_domain__in"] = ad if geoaddr: scope, query = geoaddr.split(":", 1) geocoder = geocoder_loader.get_class(scope)() addr_ids = {r.id for r in geocoder.iter_recursive_query(query)} addr_mo = set() for o in Object.iter_by_address_id(list(addr_ids), scope): addr_mo |= set(o.iter_managed_object_id()) # If ManagedObject has container refer to Object addr_mo |= set( ManagedObject.objects.filter( container__in=o.get_nested_ids()).values_list( "id", flat=True)) # Intersect with selector expression if "id__in" in r: addr_mo &= set(r["id__in"]) r["id__in"] = list(addr_mo) return r
def fix_inv_lost_and_found(self): from noc.inv.models.object import Object from noc.inv.models.objectmodel import ObjectModel lf_model = ObjectModel.objects.get(uuid="b0fae773-b214-4edf-be35-3468b53b03f2") self.info("Checking inventory Lost&Found") rc = Object.objects.filter(model=lf_model.id).count() if rc == 0: # Create missed l&f self.info(" ... creating missed Lost&Found") Object(model=lf_model, name="Global Lost&Found").save() elif rc == 1: return # OK else: # Merge lost&founds lfs = Object.objects.filter(model=lf_model.id).order_by("id") r0 = lfs[0] for r in lfs[1:]: for o in Object.objects.filter(container=r.id): self.info(" ... moving %s to primary Lost&Found", unicode(o)) o.container = r0.id o.save() self.info(" ... removing duplicated lost&found %s", r.uuid) r.delete()
def fix_inv_root(self): from noc.inv.models.object import Object from noc.inv.models.objectmodel import ObjectModel root_model = ObjectModel.objects.get(uuid="0f1b7c90-c611-4046-9a83-b120377eb6e0") self.info("Checking inventory Root") rc = Object.objects.filter(model=root_model.id).count() if rc == 0: # Create missed root self.info(" ... creating missed root") Object(model=root_model, name="Root").save() elif rc == 1: return # OK else: # Merge roots roots = Object.objects.filter(model=root_model.id).order_by("id") r0 = roots[0] for r in roots[1:]: for o in Object.objects.filter(container=r.id): self.info(" ... moving %s to primary root", unicode(o)) o.container = r0.id o.save() self.info(" ... removing duplicated root %s", r) r.delete()
def api_create_ducts(self, request, id, ducts=None): o = self.app.get_object_or_404(Object, id=id) conns = {} # target -> conneciton for c, t, _ in o.get_genderless_connections("ducts"): conns[t] = c conduits = defaultdict(list) # target, conduits for c, t, _ in o.get_genderless_connections("conduits"): for cc, tt, _ in t.get_genderless_connections("conduits"): if tt.id != o.id: conduits[tt] += [t] left = set(conns) for cd in ducts: target = cd["target"] if target not in left: # New record o.connect_genderless( "ducts", target, "ducts", data={"project_distance": cd["project_distance"]}, type="ducts") else: c = conns[target] # Updated if cd["project_distance"] != c.data.get("project_distance"): # Updated c.data["project_distance"] = cd["project_distance"] c.type = "ducts" c.save() left.remove(target) left_conduits = set(conduits[target]) for cc in cd["conduits"]: if "id" not in cc or cc["id"] not in left_conduits: # Create new conduit conduit = Object(name=str(cc["n"]), model=self.conduits_model) conduit.save() # Connect to both manholes o.connect_genderless( "conduits", conduit, "conduits", data={ # Conduit position "plan": { "x": cc["x"], "y": cc["y"] } }, type="conduits") target.connect_genderless( "conduits", conduit, "conduits", data={ # @todo: Mirror position # Conduit position "plan": { "x": cc["x"], "y": cc["y"] } }, type="conduits") else: # Change. conduit = cc["id"] for ccc, ro, _ in conduit.get_genderless_connections( "conduits"): odata = ccc.data.copy() ccc.data["plan"]["x"] = cc["x"] ccc.data["plan"]["y"] = cc["y"] if ccc.data != odata: ccc.save() if "id" in cc: left_conduits.remove(cc["id"]) for t in left_conduits: cc = left_conduits[t] print "DEL", cc # Deleted for x in left: for c, remote, _ in conns[x].get_genderless_connecitons( "conduits"): remote.delete() conns[x].delete() return {"status": True}
def get_object(self, id=None): if id: self.root = Object.get_by_id(id=id) else: self.root = Object.objects.get(name=self.o_default_name)