class ASSet(NOCModel): class Meta(object): verbose_name = "ASSet" verbose_name_plural = "ASSets" db_table = "peer_asset" app_label = "peer" name = models.CharField("Name", max_length=32, unique=True) project = models.ForeignKey( Project, verbose_name="Project", null=True, blank=True, related_name="asset_set", on_delete=models.CASCADE, ) description = models.CharField("Description", max_length=64) members = models.TextField("Members", null=True, blank=True) rpsl_header = models.TextField("RPSL Header", null=True, blank=True) rpsl_footer = models.TextField("RPSL Footer", null=True, blank=True) tags = TagsField("Tags", null=True, blank=True) rpsl = GridVCSField("rpsl_asset") def __str__(self): return self.name @property def member_list(self): if self.members is None: return [] m = sorted( self.members.replace(",", " ").replace("\n", " ").replace( "\r", " ").upper().split()) return m def get_rpsl(self): sep = "remark: %s" % ("-" * 72) s = [] if self.rpsl_header: s += self.rpsl_header.split("\n") s += ["as-set: %s" % self.name] for m in self.member_list: s += ["members: %s" % m] if self.rpsl_footer: s += [sep] s += self.rpsl_footer.split("\n") return rpsl_format("\n".join(s)) def touch_rpsl(self): c_rpsl = self.rpsl.read() n_rpsl = self.get_rpsl() if c_rpsl == n_rpsl: return # Not changed self.rpsl.write(n_rpsl) def on_save(self): self.touch_rpsl()
class Person(models.Model): class Meta(object): verbose_name = "Person" verbose_name_plural = "Persons" db_table = "peer_person" app_label = "peer" nic_hdl = models.CharField("nic-hdl", max_length=64, unique=True) person = models.CharField("person", max_length=128) type = models.CharField( "type", max_length=1, default="P", choices=[ ("P", "Person"), ("R", "Role") ] ) address = models.TextField("address") phone = models.TextField("phone") fax_no = models.TextField("fax-no", blank=True, null=True) email = models.TextField("email") rir = models.ForeignKey(RIR, verbose_name="RIR") extra = models.TextField("extra", blank=True, null=True) rpsl = GridVCSField("rpsl_person") def __unicode__(self): return u" %s (%s)" % (self.nic_hdl, self.person) def get_rpsl(self): s = [] if self.type == "R": s += ["role: %s" % self.person] else: s += ["person: %s" % self.person] s += ["nic-hdl: %s" % self.nic_hdl] s += rpsl_multiple("address", self.address) s += rpsl_multiple("phone", self.phone) s += rpsl_multiple("fax-no", self.fax_no) s += rpsl_multiple("email", self.email) if self.extra: s += [self.extra] return rpsl_format("\n".join(s)) def touch_rpsl(self): c_rpsl = self.rpsl.read() n_rpsl = self.get_rpsl() if c_rpsl == n_rpsl: return # Not changed self.rpsl.write(n_rpsl) def on_save(self): self.touch_rpsl()
class Maintainer(NOCModel): class Meta(object): verbose_name = "Maintainer" verbose_name_plural = "Maintainers" db_table = "peer_maintainer" app_label = "peer" maintainer = models.CharField("mntner", max_length=64, unique=True) description = models.CharField("description", max_length=64) password = models.CharField("Password", max_length=64, null=True, blank=True) rir = models.ForeignKey(RIR, verbose_name="RIR", on_delete=models.CASCADE) admins = models.ManyToManyField(Person, verbose_name="admin-c") extra = models.TextField("extra", blank=True, null=True) rpsl = GridVCSField("rpsl_maintainer") def __str__(self): return self.maintainer def get_rpsl(self): s = [] s += ["mntner: %s" % self.maintainer] s += ["descr: %s" % self.description] if self.password: s += ["auth: MD5-PW %s" % md5crypt(self.password)] s += ["admins: %s" % x.nic_hdl for x in self.admins.all()] s += ["mnt-by: %s" % self.maintainer] if self.extra: s += [self.extra] return rpsl_format("\n".join(s)) def touch_rpsl(self): c_rpsl = self.rpsl.read() n_rpsl = self.get_rpsl() if c_rpsl == n_rpsl: return # Not changed self.rpsl.write(n_rpsl) def on_save(self): self.touch_rpsl()
class Object(Document): """ Inventory object """ meta = { "collection": "noc.objects", "strict": False, "auto_create_index": False, "indexes": [ "data", "container", ("name", "container"), ("data.interface", "data.attr", "data.value"), ], } name = StringField() model = PlainReferenceField(ObjectModel) data = ListField(EmbeddedDocumentField(ObjectAttr)) container = PlainReferenceField("self", required=False) comment = GridVCSField("object_comment") # Map layer = PlainReferenceField(Layer) point = PointField(auto_index=True) # Additional connection data connections = ListField(EmbeddedDocumentField(ObjectConnectionData)) # Labels labels = ListField(StringField()) effective_labels = ListField(StringField()) # Integration with external NRI and TT systems # Reference to remote system object has been imported from remote_system = ReferenceField(RemoteSystem) # Object id in remote system remote_id = StringField() # Object id in BI bi_id = LongField(unique=True) _id_cache = cachetools.TTLCache(maxsize=1000, ttl=60) _bi_id_cache = cachetools.TTLCache(maxsize=1000, ttl=60) REBUILD_CONNECTIONS = ["links", "conduits"] def __str__(self): return smart_text(self.name or self.id) @classmethod @cachetools.cachedmethod(operator.attrgetter("_id_cache"), lock=lambda _: id_lock) def get_by_id(cls, id) -> Optional["Object"]: return Object.objects.filter(id=id).first() @classmethod @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"), lock=lambda _: id_lock) def get_by_bi_id(cls, id) -> Optional["Object"]: return Object.objects.filter(bi_id=id).first() def iter_changed_datastream(self, changed_fields=None): if config.datastream.enable_managedobject: if self.data and self.get_data("management", "managed_object"): yield "managedobject", self.get_data("management", "managed_object") else: for _, o, _ in self.iter_outer_connections(): if o.data and o.get_data("management", "managed_object"): yield "managedobject", o.get_data( "management", "managed_object") def clean(self): self.set_point() def set_point(self): from noc.gis.map import map # Reset previous data self.layer = None self.point = None # Get points x, y, srid = self.get_data_tuple("geopoint", ("x", "y", "srid")) if x is None or y is None: return # No point data # Get layer layer_code = self.model.get_data("geopoint", "layer") if not layer_code: return layer = Layer.get_by_code(layer_code) if not layer: return # Update actual data self.layer = layer self.point = map.get_db_point(x, y, srid=srid) def on_save(self): def get_coordless_objects(o): r = {str(o.id)} for co in Object.objects.filter(container=o.id): cx, cy = co.get_data_tuple("geopoint", ("x", "y")) if cx is None and cy is None: r |= get_coordless_objects(co) return r x, y = self.get_data_tuple("geopoint", ("x", "y")) if x is not None and y is not None: # Rebuild connection layers for ct in self.REBUILD_CONNECTIONS: for c, _, _ in self.get_genderless_connections(ct): c.save() # Update nested objects from noc.sa.models.managedobject import ManagedObject mos = get_coordless_objects(self) if mos: ManagedObject.objects.filter(container__in=mos).update( x=x, y=y, default_zoom=self.layer.default_zoom if self.layer else DEFAULT_ZOOM) if self._created: if self.container: pop = self.get_pop() if pop: pop.update_pop_links() # Changed container elif hasattr( self, "_changed_fields") and "container" in self._changed_fields: # Old pop old_container_id = getattr(self, "_old_container", None) old_pop = None if old_container_id: c = Object.get_by_id(old_container_id) while c: if c.get_data("pop", "level"): old_pop = c break c = c.container # New pop new_pop = self.get_pop() # Check if pop moved if old_pop != new_pop: if old_pop: old_pop.update_pop_links() if new_pop: new_pop.update_pop_links() if self.model.sensors: self._sync_sensors() @cachetools.cached(_path_cache, key=lambda x: str(x.id), lock=id_lock) def get_path(self) -> List[str]: """ Returns list of parent segment ids :return: """ if self.container: return self.container.get_path() + [self.id] return [self.id] def get_nested_ids(self): """ Return id of this and all nested object :return: """ # $graphLookup hits 100Mb memory limit. Do not use it seen = {self.id} wave = {self.id} max_level = 4 coll = Object._get_collection() for _ in range(max_level): # Get next wave wave = (set(d["_id"] for d in coll.find({"container": { "$in": list(wave) }}, {"_id": 1})) - seen) if not wave: break seen |= wave return list(seen) def get_data(self, interface: str, key: str, scope: Optional[str] = None) -> Any: attr = ModelInterface.get_interface_attr(interface, key) if attr.is_const: # Lookup model return self.model.get_data(interface, key) for item in self.data: if item.interface == interface and item.attr == key: if not scope or item.scope == scope: return item.value return None def get_data_dict(self, interface: str, keys: Iterable, scope: Optional[str] = None) -> Dict[str, Any]: """ Get multiple keys from single interface. Returns dict with values for every given key. If key is missed, return None value :param interface: :param keys: Iterable contains key names :param scope: :return: """ kset = set(keys) r = {k: None for k in kset} for item in self.data: if item.interface == interface and item.attr in kset: if not scope or item.scope == scope: r[item.attr] = item.value return r def get_data_tuple(self, interface: str, keys: Union[List, Tuple], scope: Optional[str] = None) -> Tuple[Any, ...]: """ Get multiple keys from single interface. Returns tuple with values for every given key. If key is missed, return None value :param interface: :param keys: List or tuple with key names :param scope: :return: """ r = self.get_data_dict(interface, keys, scope) return tuple(r.get(k) for k in keys) def get_effective_data(self) -> List[ObjectAttr]: """ Return effective object data, including the model's defaults :return: """ seen: Set[Tuple[str, str, str]] = set() # (interface, attr, scope r: List[ObjectAttr] = [] # Object attributes for item in self.data: k = (item.interface, item.attr, item.scope or "") if k in seen: continue r += [item] seen.add(k) # Model attributes for i in self.model.data: for a in self.model.data[i]: k = (i, a, "") if k in seen: continue r += [ ObjectAttr(interface=i, attr=a, scope="", value=self.model.data[i][a]) ] seen.add(k) # Sort according to interface sorting_keys: Dict[str, str] = {} for ni, i in enumerate(sorted(set(x[0] for x in seen))): mi = ModelInterface.get_by_name(i) if not mi: continue for na, a in enumerate(mi.attrs): sorting_keys["%s.%s" % (i, a.name)] = "%06d.%06d" % (ni, na) # Return sorted result return list( sorted( r, key=lambda oa: "%s.%s" % (sorting_keys.get( "%s.%s" % (oa.interface, oa.attr), "999999.999999"), oa.scope), )) def set_data(self, interface: str, key: str, value: Any, scope: Optional[str] = None) -> None: attr = ModelInterface.get_interface_attr(interface, key) if attr.is_const: raise ModelDataError("Cannot set read-only value") value = attr._clean(value) for item in self.data: if item.interface == interface and item.attr == key: if not scope or item.scope == scope: item.value = value break else: # Insert new item self.data += [ ObjectAttr(interface=interface, attr=attr.name, value=value, scope=scope or "") ] def reset_data(self, interface: str, key: Union[str, Iterable], scope: Optional[str] = None) -> None: if isinstance(key, str): kset = {key} else: kset = set(key) v = [ ModelInterface.get_interface_attr(interface, k).is_const for k in kset ] if any(v): raise ModelDataError("Cannot reset read-only value") self.data = [ item for item in self.data if item.interface != interface or ( scope and item.scope != scope) or item.attr not in kset ] def has_connection(self, name): return self.model.has_connection(name) def get_p2p_connection( self, name: str ) -> Tuple[Optional["ObjectConnection"], Optional["Object"], Optional[str]]: """ Get neighbor for p2p connection (s and mf types) Returns connection, remote object, remote connection or None, None, None """ c = ObjectConnection.objects.filter(__raw__={ "connection": { "$elemMatch": { "object": self.id, "name": name } } }).first() if c: for x in c.connection: if x.object.id != self.id: return c, x.object, x.name # Strange things happen return None, None, None def get_genderless_connections( self, name: str) -> List[Tuple["ObjectConnection", "Object", str]]: r = [] for c in ObjectConnection.objects.filter(__raw__={ "connection": { "$elemMatch": { "object": self.id, "name": name } } }): for x in c.connection: if x.object.id != self.id: r += [[c, x.object, x.name]] return r def disconnect_p2p(self, name: str): """ Remove connection *name* """ c = self.get_p2p_connection(name)[0] if c: self.log("'%s' disconnected" % name, system="CORE", op="DISCONNECT") c.delete() def connect_p2p( self, name: str, remote_object: "Object", remote_name: str, data: Dict[str, Any], reconnect: bool = False, ) -> Optional["ObjectConnection"]: lc = self.model.get_model_connection(name) if lc is None: raise ConnectionError("Local connection not found: %s" % name) name = lc.name rc = remote_object.model.get_model_connection(remote_name) if rc is None: raise ConnectionError("Remote connection not found: %s" % remote_name) remote_name = rc.name valid, cause = self.model.check_connection(lc, rc) if not valid: raise ConnectionError(cause) # Check existing connecitons if lc.type.genders in ("s", "m", "f", "mf"): ec, r_object, r_name = self.get_p2p_connection(name) if ec is not None: # Connection exists if reconnect: if r_object.id == remote_object.id and r_name == remote_name: # Same connection exists n_data = deep_merge(ec.data, data) # Merge ObjectConnection if n_data != ec.data: # Update data ec.data = n_data ec.save() return self.disconnect_p2p(name) else: raise ConnectionError("Already connected") # Create connection c = ObjectConnection( connection=[ ObjectConnectionItem(object=self, name=name), ObjectConnectionItem(object=remote_object, name=remote_name), ], data=data, ).save() self.log("%s:%s -> %s:%s" % (self, name, remote_object, remote_name), system="CORE", op="CONNECT") # Disconnect from container on o-connection if lc.direction == "o" and self.container: self.log("Remove from %s" % self.container, system="CORE", op="REMOVE") self.container = None self.save() return c def connect_genderless( self, name: str, remote_object: "Object", remote_name: str, data: Dict[str, Any] = None, type: Optional[str] = None, layer: Optional[Layer] = None, ): """ Connect two genderless connections """ lc = self.model.get_model_connection(name) if lc is None: raise ConnectionError("Local connection not found: %s" % name) name = lc.name rc = remote_object.model.get_model_connection(remote_name) if rc is None: raise ConnectionError("Remote connection not found: %s" % remote_name) remote_name = rc.name if lc.gender != "s": raise ConnectionError("Local connection '%s' must be genderless" % name) if rc.gender != "s": raise ConnectionError("Remote connection '%s' must be genderless" % remote_name) # Check for connection for c, ro, rname in self.get_genderless_connections(name): if ro.id == remote_object.id and rname == remote_name: c.data = data or {} c.save() return # Normalize layer if layer and isinstance(layer, str): layer = Layer.get_by_code(layer) # Create connection ObjectConnection( connection=[ ObjectConnectionItem(object=self, name=name), ObjectConnectionItem(object=remote_object, name=remote_name), ], data=data or {}, type=type or None, layer=layer, ).save() self.log("%s:%s -> %s:%s" % (self, name, remote_object, remote_name), system="CORE", op="CONNECT") def put_into(self, container: "Object"): """ Put object into container """ if container and not container.get_data("container", "container"): raise ValueError("Must be put into container") # Disconnect all o-connections for c in self.model.connections: if c.direction == "o": c, _, _ = self.get_p2p_connection(c.name) if c: self.disconnect_p2p(c.name) # Connect to parent self.container = container.id if container else None # Reset previous rack position self.reset_data("rackmount", ("position", "side", "shift")) # self.save() self.log("Insert into %s" % (container or "Root"), system="CORE", op="INSERT") def get_content(self) -> "Object": """ Returns all items directly put into container """ return Object.objects.filter(container=self.id) def get_local_name_path(self): for _, ro, rn in self.get_outer_connections(): return ro.get_local_name_path() + [rn] return [] def get_name_path(self) -> List[str]: """ Return list of container names """ current = self.container if current is None: for _, ro, rn in self.get_outer_connections(): return ro.get_name_path() + [rn] return [smart_text(self)] np = [smart_text(self)] while current: np.insert(0, smart_text(current)) current = current.container return np def log(self, message, user=None, system=None, managed_object=None, op=None): if not user: user = get_user() if hasattr(user, "username"): user = user.username if not user: user = "******" if not isinstance(managed_object, str): managed_object = smart_text(managed_object) ObjectLog( object=self.id, user=user, ts=datetime.datetime.now(), message=message, system=system, managed_object=managed_object, op=op, ).save() def get_log(self): return ObjectLog.objects.filter(object=self.id).order_by("ts") def get_lost_and_found(self) -> Optional["Object"]: m = ObjectModel.get_by_name("Lost&Found") c = self.container while c: # Check siblings lf = Object.objects.filter(container=c, model=m).first() if lf: return lf # Up one level c = c.container return None @classmethod def detach_children(cls, sender, document, target=None): if not document.get_data("container", "container"): return if not target: target = document.get_lost_and_found() for o in Object.objects.filter(container=document.id): if o.get_data("container", "container"): cls.detach_children(sender, o, target) o.delete() else: o.put_into(target) def iter_connections( self, direction: Optional[str]) -> Iterable[Tuple[str, "Object", str]]: """ Yields connections of specified direction as tuples of (name, remote_object, remote_name) """ ic = set(c.name for c in self.model.connections if c.direction == direction) for c in ObjectConnection.objects.filter(connection__object=self.id): sn = None oc = None for cc in c.connection: if cc.object.id == self.id: if cc.name in ic: sn = cc.name else: oc = cc if sn and oc: yield sn, oc.object, oc.name def iter_inner_connections(self): """ Yields inner connections as tuples of (name, remote_object, remote_name) """ yield from self.iter_connections("i") def iter_outer_connections(self): """ Yields outer connections as tuples of (name, remote_object, remote_name) """ yield from self.iter_connections("o") def has_inner_connections(self): """ Returns True if object has any inner connections """ return any(self.iter_inner_connections()) def get_inner_connections(self): """ Returns a list of inner connections as (name, remote_object, remote_name) """ return list(self.iter_inner_connections()) def get_outer_connections(self): """ Returns a list of outer connections as (name, remote_object, remote_name) """ return list(self.iter_outer_connections()) @classmethod def delete_disconnect(cls, sender, document, target=None): for c in ObjectConnection.objects.filter( connection__object=document.id): left = [cc for cc in c.connection if cc.object.id != document.id] if len(left) < 2: c.delete() # Remove connection else: # Wipe object c.connection = left c.save() def get_pop(self) -> Optional["Object"]: """ Find enclosing PoP :returns: PoP instance or None """ c = self.container while c: if c.get_data("pop", "level"): return c c = c.container return None def get_coordinates_zoom( self) -> Tuple[Optional[float], Optional[float], Optional[int]]: """ Get managed object's coordinates # @todo: Speedup? :returns: x (lon), y (lat), zoom level """ c = self while c: if c.point and c.layer: x, y = c.get_data_tuple("geopoint", ("x", "y")) zoom = c.layer.default_zoom or DEFAULT_ZOOM return x, y, zoom if c.container: c = Object.get_by_id(c.container.id) if c: continue break return None, None, None @classmethod def get_managed(cls, mo): """ Get Object managed by managed object :param mo: Managed Object instance or id :returns: Objects managed by managed object, or empty list """ if hasattr(mo, "id"): mo = mo.id return cls.objects.filter(data__match={ "interface": "management", "attr": "managed_object", "value": mo }) def iter_managed_object_id(self) -> Iterator[int]: for d in Object._get_collection().aggregate([ { "$match": { "_id": self.id } }, # Get all nested objects and put them into the _path field { "$graphLookup": { "from": "noc.objects", "connectFromField": "_id", "connectToField": "container", "startWith": "$_id", "as": "_path", "maxDepth": 50, } }, # Leave only _path field { "$project": { "_id": 0, "_path": 1 } }, # Unwind _path array to separate documents { "$unwind": { "path": "$_path" } }, # Move data one level up { "$project": { "data": "$_path.data" } }, # Unwind data { "$unwind": { "path": "$data" } }, # Convert nested data to flat document { "$project": { "interface": "$data.interface", "attr": "$data.attr", "value": "$data.value", } }, # Leave only management objects { "$match": { "interface": "management", "attr": "managed_object" } }, # Leave only value { "$project": { "value": 1 } }, ]): mo = d.get("value") if mo: yield mo @classmethod def get_by_path(cls, path: List[str], hints=None) -> Optional["Object"]: """ Get object by given path. :param path: List of names following to path :param hints: {name: object_id} dictionary for getting object in path :returns: Object instance. None if not found """ current = None for p in path: current = Object.objects.filter(name=p, container=current).first() if not current: return None if hints: h = hints.get(p) if h: return Object.get_by_id(h) return current def update_pop_links(self, delay: int = 20): call_later("noc.inv.util.pop_links.update_pop_links", delay, pop_id=self.id) @classmethod def _pre_init(cls, sender, document, values, **kwargs): """ Object pre-initialization """ # Store original container id if "container" in values and values["container"]: document._cache_container = values["container"] def get_address_text(self) -> Optional[str]: """ Return first found address.text value upwards the path :return: Address text or None """ current = self while current: addr = current.get_data("address", "text") if addr: return addr if current.container: current = Object.get_by_id(current.container.id) else: break return None def get_object_serials(self, chassis_only: bool = True) -> List[str]: """ Gettint object serialNumber :param chassis_only: With serial numbers inner objects :return: """ serials = [self.get_data("asset", "serial")] if not chassis_only: for sn, oo, name in self.iter_inner_connections(): serials += oo.get_object_serials(chassis_only=False) return serials def iter_scope(self, scope: str) -> Iterable[Tuple[PathItem, ...]]: """ Yields Full physical path for all connections with given scopes behind the object :param scope: Scope name :return: """ connections = { name: ro for name, ro, _ in self.iter_inner_connections() } for c in self.model.connections: if c.type.is_matched_scope(scope, c.protocols): # Yield connection yield PathItem(object=self, connection=c), elif c.name in connections: ro = connections[c.name] for part_path in ro.iter_scope(scope): yield (PathItem(object=self, connection=c), ) + part_path def set_connection_interface(self, name, if_name): for cdata in self.connections: if cdata.name == name: cdata.interface_name = if_name return # New item self.connections += [ ObjectConnectionData(name=name, interface_name=if_name) ] def reset_connection_interface(self, name): self.connections = [c for c in self.connections if c.name != name] def _sync_sensors(self): """ Synchronize sensors :return: """ from .sensor import Sensor Sensor.sync_object(self) @classmethod def iter_by_address_id(cls, address: Union[str, List[str]], scope: str = None) -> Iterable["Object"]: """ Get objects :param address: :param scope: :return: """ q = { "interface": "address", "scope": scope or "", "attr": "id", } if isinstance(address, list): if len(address) == 1: q["value"] = address[0] else: q["value__in"] = address else: q["value"] = address yield from cls.objects.filter(data__match=q) @classmethod def can_set_label(cls, label): return Label.get_effective_setting(label, setting="enable_object")
class AS(models.Model): class Meta(object): verbose_name = "AS" verbose_name_plural = "ASes" db_table = "peer_as" app_label = "peer" asn = models.IntegerField("ASN", unique=True) # as-name RPSL Field as_name = models.CharField("AS Name", max_length=64, null=True, blank=True) profile = DocumentReferenceField(ASProfile, null=False, blank=False) project = models.ForeignKey(Project, verbose_name="Project", null=True, blank=True, related_name="as_set") # RPSL descr field description = models.CharField("Description", max_length=64) organisation = models.ForeignKey(Organisation, verbose_name="Organisation") administrative_contacts = models.ManyToManyField( Person, verbose_name="admin-c", related_name="as_administrative_contacts", null=True, blank=True) tech_contacts = models.ManyToManyField(Person, verbose_name="tech-c", related_name="as_tech_contacts", null=True, blank=True) maintainers = models.ManyToManyField(Maintainer, verbose_name="Maintainers", related_name="as_maintainers", null=True, blank=True) routes_maintainers = models.ManyToManyField( Maintainer, verbose_name="Routes Maintainers", related_name="as_route_maintainers", null=True, blank=True) # remarks: will be prepended automatically header_remarks = models.TextField("Header Remarks", null=True, blank=True) # remarks: will be prepended automatically footer_remarks = models.TextField("Footer Remarks", null=True, blank=True) rir = models.ForeignKey(RIR, verbose_name="RIR") # source: tags = TagsField("Tags", null=True, blank=True) rpsl = GridVCSField("rpsl_as") _id_cache = cachetools.TTLCache(maxsize=100, ttl=60) _asn_cache = cachetools.TTLCache(maxsize=100, ttl=60) def __unicode__(self): return u"AS%d (%s)" % (self.asn, self.description) @classmethod @cachetools.cachedmethod(operator.attrgetter("_id_cache"), lock=lambda _: id_lock) def get_by_id(cls, id): asn = AS.objects.filter(id=id)[:1] if asn: return asn[0] return None @classmethod @cachetools.cachedmethod(operator.attrgetter("_asn_cache"), lock=lambda _: id_lock) def get_by_asn(cls, asn): asn = AS.objects.filter(asn=asn)[:1] if asn: return asn[0] return None def get_rpsl(self): sep = "remarks: %s" % ("-" * 72) s = [] s += ["aut-num: AS%s" % self.asn] if self.as_name: s += ["as-name: %s" % self.as_name] if self.description: s += ["descr: %s" % x for x in self.description.split("\n")] s += ["org: %s" % self.organisation.organisation] # Add header remarks if self.header_remarks: s += ["remarks: %s" % x for x in self.header_remarks.split("\n")] # Find AS peers pg = { } # Peer Group -> AS -> peering_point -> [(import, export, localpref, import_med, export_med, remark)] for peer in self.peer_set.filter(status="A"): if peer.peer_group not in pg: pg[peer.peer_group] = {} if peer.remote_asn not in pg[peer.peer_group]: pg[peer.peer_group][peer.remote_asn] = {} if peer.peering_point not in pg[peer.peer_group][peer.remote_asn]: pg[peer.peer_group][peer.remote_asn][peer.peering_point] = [] to_skip = False e_import_med = peer.effective_import_med e_export_med = peer.effective_export_med for R in pg[peer.peer_group][peer.remote_asn][peer.peering_point]: p_import, p_export, localpref, import_med, export_med, remark = R if (peer.import_filter == p_import and peer.export_filter == p_export and e_import_med == import_med and e_export_med == export_med): to_skip = True break if not to_skip: pg[peer.peer_group][peer.remote_asn][peer.peering_point] +=\ [(peer.import_filter, peer.export_filter, peer.effective_local_pref, e_import_med, e_export_med, peer.rpsl_remark)] # Build RPSL inverse_pref = config.peer.rpsl_inverse_pref_style for peer_group in pg: s += [sep] s += [ "remarks: -- %s" % x for x in peer_group.description.split("\n") ] s += [sep] for asn in sorted(pg[peer_group]): add_at = len(pg[peer_group][asn]) != 1 for pp in pg[peer_group][asn]: for R in pg[peer_group][asn][pp]: import_filter, export_filter, localpref, import_med, export_med, remark = R # Prepend import and export with remark when given if remark: s += ["remarks: # %s" % remark] # Build import statement i_s = "import: from AS%d" % asn if add_at: i_s += " at %s" % pp.hostname actions = [] if localpref: pref = (65535 - localpref) if inverse_pref else localpref actions += ["pref=%d;" % pref] if import_med: actions += ["med=%d;" % import_med] if actions: i_s += " action " + " ".join(actions) i_s += " accept %s" % import_filter s += [i_s] # Build export statement e_s = "export: to AS%d" % asn if add_at: e_s += " at %s" % pp.hostname if export_med: e_s += " action med=%d;" % export_med e_s += " announce %s" % export_filter s += [e_s] # Add contacts for c in self.administrative_contacts.order_by("nic_hdl"): s += ["admin-c: %s" % c.nic_hdl] for c in self.tech_contacts.order_by("nic_hdl"): s += ["tech-c: %s" % c.nic_hdl] # Add maintainers for m in self.maintainers.all(): s += ["mnt-by: %s" % m.maintainer] for m in self.routes_maintainers.all(): s += ["mnt-routes: %s" % m.maintainer] # Add footer remarks if self.footer_remarks: s += ["remarks: %s" % x for x in self.footer_remarks.split("\n")] return rpsl_format("\n".join(s)) def touch_rpsl(self): c_rpsl = self.rpsl.read() n_rpsl = self.get_rpsl() if c_rpsl == n_rpsl: return # Not changed self.rpsl.write(n_rpsl) def on_save(self): self.touch_rpsl() @property def dot(self): from .peer import Peer s = ["graph {"] all_peers = Peer.objects.filter(local_asn__exact=self) uplinks = {} peers = {} downlinks = {} for p in all_peers: if p.import_filter == "ANY" and p.export_filter != "ANY": uplinks[p.remote_asn] = p elif p.export_filter == "ANY": downlinks[p.remote_asn] = p else: peers[p.remote_asn] = p asn = "AS%d" % self.asn for subgraph, peers in [("uplinks", uplinks.values()), ("peers", peers.values()), ("downlinks", downlinks.values())]: s += ["subgraph %s {" % subgraph] for p in peers: attrs = [ "taillabel=\" %s\"" % p.import_filter, "headlabel=\" %s\"" % p.export_filter ] if p.import_filter == "ANY": attrs += ["arrowtail=open"] if p.export_filter == "ANY": attrs += ["arrothead=open"] s += [ " %s -- AS%d [%s];" % (asn, p.remote_asn, ",".join(attrs)) ] s += ["}"] s += ["}"] return "\n".join(s) def update_rir_db(self): return self.rir.update_rir_db(self.rpsl, self.maintainers.all()[0])
class DNSZone(models.Model): """ DNS Zone """ class Meta(object): verbose_name = _("DNS Zone") verbose_name_plural = _("DNS Zones") ordering = ["name"] db_table = "dns_dnszone" app_label = "dns" name = models.CharField(_("Domain"), max_length=256, unique=True) type = models.CharField(_("Type"), max_length=1, null=False, blank=False, default=ZONE_FORWARD, choices=[(ZONE_FORWARD, "Forward"), (ZONE_REVERSE_IPV4, "Reverse IPv4"), (ZONE_REVERSE_IPV6, "Reverse IPv6")]) description = models.CharField(_("Description"), null=True, blank=True, max_length=64) project = models.ForeignKey(Project, verbose_name="Project", null=True, blank=True, related_name="dnszone_set") # @todo: Rename to is_provisioned is_auto_generated = models.BooleanField(_("Auto generated?")) serial = models.IntegerField(_("Serial"), default=0) profile = models.ForeignKey(DNSZoneProfile, verbose_name=_("Profile")) notification_group = models.ForeignKey( NotificationGroup, verbose_name=_("Notification Group"), null=True, blank=True, help_text=_("Notification group to use when zone changed")) paid_till = models.DateField(_("Paid Till"), null=True, blank=True) tags = TagsField(_("Tags"), null=True, blank=True) # Managers objects = models.Manager() zone = GridVCSField("dnszone") # Caches _id_cache = cachetools.TTLCache(maxsize=100, ttl=60) _name_cache = cachetools.TTLCache(maxsize=100, ttl=60) def __unicode__(self): return self.name @classmethod @cachetools.cachedmethod(operator.attrgetter("_id_cache"), lock=lambda _: id_lock) def get_by_id(cls, id): zone = DNSZone.objects.filter(id=id)[:1] if zone: return zone[0] return None @classmethod @cachetools.cachedmethod(operator.attrgetter("_name_cache"), lock=lambda _: id_lock) def get_by_name(cls, name): zone = DNSZone.objects.filter(name=name)[:1] if zone: return zone[0] return None def iter_changed_datastream(self): if config.datastream.enable_dnszone: yield "dnszone", self.id def clean(self): super(DNSZone, self).clean() self.type = self.get_type_for_zone(self.name or "") def save(self, *args, **kwargs): self.clean() super(DNSZone, self).save(*args, **kwargs) @staticmethod def get_type_for_zone(name): """ Zone type. One of: * R4 - IPv4 reverse * R6 - IPv6 reverse * F - forward zone :return: Zone type :rtype: String """ nl = name.lower() if nl.endswith(".in-addr.arpa"): return ZONE_REVERSE_IPV4 # IPv4 reverse elif nl.endswith(".ip6.int") or nl.endswith(".ip6.arpa"): return ZONE_REVERSE_IPV6 # IPv6 reverse else: return ZONE_FORWARD # Forward rx_rzone = re.compile(r"^(\d+)\.(\d+)\.(\d+)\.in-addr.arpa$") @property def reverse_prefix(self): """ Appropriative prefix for reverse zone :return: IPv4 or IPv6 prefix :rtype: String """ if self.type == ZONE_REVERSE_IPV4: # Get IPv4 prefix covering reverse zone n = self.name.lower() if n.endswith(".in-addr.arpa"): r = n[:-13].split(".") r.reverse() length = 4 - len(r) r += ["0"] * length ml = 32 - 8 * length return ".".join(r) + "/%d" % ml elif self.type == ZONE_REVERSE_IPV6: # Get IPv6 prefix covering reverse zone n = self.name.lower() if n.endswith(".ip6.int"): n = n[:-8] elif n.endswith(".ip6.arpa"): n = n[:-9] else: raise Exception("Invalid IPv6 zone suffix") p = n.split(".") p.reverse() length = len(p) if length % 4: p += [u"0"] * (4 - length % 4) r = "" for i, c in enumerate(p): if i and i % 4 == 0: r += ":" r += c if len(p) != 32: r += "::" prefix = r + "/%d" % (length * 4) return IPv6(prefix).normalized.prefix @classmethod def get_reverse_for_address(cls, address): """ Return reverse zone holding address :param address: Address (as a string) :return: DNSZone instance or None """ if ":" in address: return cls._get_reverse_for_ipv6_address(address) return cls._get_reverse_for_ipv4_address(address) @classmethod def _get_reverse_for_ipv4_address(cls, address): """ Get reverze zone holding IPv4 address :param address: Address (as a string) :return: DNSZone instance or None """ parts = list(reversed(address.split(".")))[1:] while parts: name = "%s.in-addr.arpa" % ".".join(parts) zone = DNSZone.get_by_name(name) if zone: return zone parts.pop(0) return None @classmethod def _get_reverse_for_ipv6_address(cls, address): """ Get reverze zone holding IPv6 address :param address: Address (as a string) :return: DNSZone instance or None """ # @todo: Impelement properly parts = [str(x) for x in reversed(IPv6(address).iter_bits())][1:] while parts: for suffix in (".ip6.int", ".ip6.arpa"): name = "%s.%s" % (".".join(parts), suffix) zone = DNSZone.get_by_name(name) if zone: return zone parts.pop(0) # Remove first par return None def get_next_serial(self): """ Next zone serial number. Next serial is greater than current one. Serial is built using current data to follow common practive. :return: Zone serial number :rtype: int """ T = time.gmtime() base = T[0] * 10000 + T[1] * 100 + T[2] s_base = self.serial // 100 if s_base < base: return base * 100 # New day else: return self.serial + 1 # May cause future lap def set_next_serial(self): old_serial = self.serial self.serial = self.get_next_serial() logger.info("Zone %s serial change: %s -> %s", self.name, old_serial, self.serial) # Hack to not send post_save signal DNSZone.objects.filter(id=self.id).update(serial=self.serial) @property def children(self): """List of next-level nested zones""" length = len(self.name) s = ".%s" % self.name return [ z for z in DNSZone.objects.filter(name__iendswith=s) if "." not in z.name[:-length - 1] ] @classmethod def get_ns_name(cls, ns): """Add missed '.' to the end of NS name, if given as FQDN""" name = ns.name.strip() if not is_ipv4(name) and not name.endswith("."): return name + "." else: return name @property def ns_list(self): """ Sorted list of zone NSes. NSes are properly formatted and have '.' at the end. :return: List of zone NSes :rtype: List of string """ return sorted( self.get_ns_name(ns) for ns in self.profile.authoritative_servers) @property def masters(self): """ Sorted list of zone master NSes. NSes are properly formatted and have '.' at the end. :return: List of zone master NSes :rtype: List of string :return: """ return sorted( self.get_ns_name(ns) for ns in self.profile.masters.all()) @property def slaves(self): """ Sorted list of zone slave NSes. NSes are properly formatted and have '.' at the end. :return: List of zone slave NSes :rtype: List of string :return: """ return sorted(self.get_ns_name(ns) for ns in self.profile.slaves.all()) @property def rpsl(self): """ RPSL for reverse zone. RPSL contains domain: and nserver: attributes :return: RPSL :rtype: String """ if self.type == ZONE_FORWARD: return "" # Do not generate RPSL for private reverse zones if self.name.lower().endswith(".10.in-addr.arpa"): return "" n1, n2, n = self.name.lower().split(".", 2) if "16.172.in-addr.arpa" <= n <= "31.172.in-addr.arpa": return "" n1, n = self.name.lower().split(".", 1) if n == "168.192.in-addr.arpa": return "" s = ["domain: %s" % self.name ] + ["nserver: %s" % ns for ns in self.ns_list] return rpsl_format("\n".join(s)) @staticmethod def to_idna(n): if isinstance(n, unicode): return n.lower().encode("idna") elif isinstance(n, six.string_types): return unicode(n, "utf-8").lower().encode("idna") else: return n @classmethod def get_zone(cls, name): """ Resolve name to zone object :return: """ def get_closest(n): """ Return closest matching zone """ while n: try: return DNSZone.objects.get(name=n) except DNSZone.DoesNotExist: pass n = ".".join(n.split(".")[1:]) return None if not name: return None if is_ipv4(name): # IPv4 zone n = name.split(".") n.reverse() return get_closest("%s.in-addr.arpa" % (".".join(n[1:]))) elif is_ipv6(name): # IPv6 zone d = IPv6(name).digits d.reverse() c = ".".join(d) return (get_closest("%s.ip6.arpa" % c) or get_closest("%s.ip6.int" % c)) else: return get_closest(name) def get_notification_groups(self): """ Get a list of notification groups to notify about zone changes :return: """ if self.notification_group: return [self.notification_group] if self.profile.notification_group: return [self.profile.notification_group] ng = SystemNotification.get_notification_group("dns.change") if ng: return [ng] else: return [] @property def is_forward(self): return self.type == ZONE_FORWARD @property def is_reverse_ipv4(self): return self.type == ZONE_REVERSE_IPV4 @property def is_reverse_ipv6(self): return self.type == ZONE_REVERSE_IPV6
class Peer(NOCModel): """ BGP Peering session """ class Meta(object): verbose_name = "Peer" verbose_name_plural = "Peers" db_table = "peer_peer" app_label = "peer" peer_group = models.ForeignKey(PeerGroup, verbose_name="Peer Group", on_delete=models.CASCADE) project = models.ForeignKey( Project, verbose_name="Project", null=True, blank=True, related_name="peer_set", on_delete=models.CASCADE, ) peering_point = models.ForeignKey(PeeringPoint, verbose_name="Peering Point", on_delete=models.CASCADE) local_asn = models.ForeignKey(AS, verbose_name="Local AS", on_delete=models.CASCADE) local_ip = INETField("Local IP") local_backup_ip = INETField("Local Backup IP", null=True, blank=True) remote_asn = models.IntegerField("Remote AS") remote_ip = INETField("Remote IP") remote_backup_ip = INETField("Remote Backup IP", null=True, blank=True) status = models.CharField( "Status", max_length=1, default="A", choices=[("P", "Planned"), ("A", "Active"), ("S", "Shutdown")], ) import_filter = models.CharField("Import filter", max_length=64) # Override PeerGroup.local_pref local_pref = models.IntegerField("Local Pref", null=True, blank=True) # Override PeerGroup.import_med import_med = models.IntegerField("Import MED", blank=True, null=True) # Override PeerGroup.export_med export_med = models.IntegerField("Export MED", blank=True, null=True) export_filter = models.CharField("Export filter", max_length=64) description = models.CharField("Description", max_length=64, null=True, blank=True) # Peer remark to be shown in RPSL rpsl_remark = models.CharField("RPSL Remark", max_length=64, null=True, blank=True) tt = models.IntegerField("TT", blank=True, null=True) # In addition to PeerGroup.communities # and PeeringPoint.communities communities = models.CharField("Import Communities", max_length=128, blank=True, null=True) max_prefixes = models.IntegerField("Max. Prefixes", default=100) import_filter_name = models.CharField("Import Filter Name", max_length=64, blank=True, null=True) export_filter_name = models.CharField("Export Filter Name", max_length=64, blank=True, null=True) tags = TagsField("Tags", null=True, blank=True) rpsl = GridVCSField("rpsl_peer") def __str__(self): return " %s (%s@%s)" % (self.remote_asn, self.remote_ip, self.peering_point.hostname) def save(self, *args, **kwargs): if self.import_filter_name is not None and not self.import_filter_name.strip( ): self.import_filter_name = None if self.export_filter_name is not None and not self.export_filter_name.strip( ): self.export_filter_name = None super(Peer, self).save(*args, **kwargs) self.peering_point.sync_cm_prefix_list() self.touch_rpsl() @property def all_communities(self): r = {} for cl in [ self.peering_point.communities, self.peer_group.communities, self.communities ]: if cl is None: continue for c in cl.replace(",", " ").split(): r[c] = None c = sorted(r.keys()) return " ".join(c) def get_rpsl(self): s = "import: from AS%d" % self.remote_asn s += " at %s" % self.peering_point.hostname actions = [] local_pref = self.effective_local_pref if local_pref: # Select pref meaning if config.peer.rpsl_inverse_pref_style: pref = 65535 - local_pref # RPSL style else: pref = local_pref actions += ["pref=%d;" % pref] import_med = self.effective_import_med if import_med: actions += ["med=%d;" % import_med] if actions: s += " action " + " ".join(actions) s += " accept %s\n" % self.import_filter actions = [] export_med = self.effective_export_med if export_med: actions += ["med=%d;" % export_med] s += "export: to AS%s at %s" % (self.remote_asn, self.peering_point.hostname) if actions: s += " action " + " ".join(actions) s += " announce %s" % self.export_filter return s @property def effective_max_prefixes(self): if self.max_prefixes: return self.max_prefixes if self.peer_group.max_prefixes: return self.peer_group.max_prefixes return 0 @property def effective_local_pref(self): """ Effective localpref: Peer specific or PeerGroup inherited """ if self.local_pref is not None: return self.local_pref return self.peer_group.local_pref @property def effective_import_med(self): """ Effective import med: Peer specific or PeerGroup inherited """ if self.import_med is not None: return self.import_med return self.peer_group.import_med @property def effective_export_med(self): """ Effective export med: Peer specific or PeerGroup inherited """ if self.export_med is not None: return self.export_med return self.peer_group.export_med @classmethod def get_peer(cls, address): """ Get peer by address :param address: Remote address :type address: Str :returns: Peer instance or None """ data = list(Peer.objects.filter().extra( where=["host(remote_ip)=%s OR host(remote_backup_ip)=%s"], params=[address, address])) if data: return data[0] else: return None def touch_rpsl(self): c_rpsl = self.rpsl.read() n_rpsl = self.get_rpsl() if c_rpsl == n_rpsl: return # Not changed self.rpsl.write(n_rpsl) def on_save(self): self.touch_rpsl()
class Object(Document): """ Inventory object """ meta = { "collection": "noc.objects", "strict": False, "auto_create_index": False, "indexes": [ "data", "container", ("name", "container"), ("model", "data.asset.serial"), "data.management.managed_object" ] } name = StringField() model = PlainReferenceField(ObjectModel) data = DictField() container = PlainReferenceField("self", required=False) comment = GridVCSField("object_comment") # Map layer = PlainReferenceField(Layer) point = PointField(auto_index=True) # tags = ListField(StringField()) # Object id in BI bi_id = LongField(unique=True) _id_cache = cachetools.TTLCache(maxsize=1000, ttl=60) _bi_id_cache = cachetools.TTLCache(maxsize=1000, ttl=60) _path_cache = cachetools.TTLCache(maxsize=1000, ttl=60) REBUILD_CONNECTIONS = [ "links", "conduits" ] def __unicode__(self): return unicode(self.name or self.id) @classmethod @cachetools.cachedmethod(operator.attrgetter("_id_cache"), lock=lambda _: id_lock) def get_by_id(cls, id): return Object.objects.filter(id=id).first() @classmethod @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"), lock=lambda _: id_lock) def get_by_bi_id(cls, id): return Object.objects.filter(bi_id=id).first() def clean(self): self.set_point() def set_point(self): from noc.gis.map import map self.layer = None self.point = None geo = self.data.get("geopoint") if not geo: return layer_code = self.model.get_data("geopoint", "layer") if not layer_code: return layer = Layer.get_by_code(layer_code) if not layer: return x = geo.get("x") y = geo.get("y") srid = geo.get("srid") if x and y: self.layer = layer self.point = map.get_db_point(x, y, srid=srid) def on_save(self): def get_coordless_objects(o): r = {str(o.id)} for co in Object.objects.filter(container=o.id): g = co.data.get("geopoint") if g and g.get("x") and g.get("y"): continue else: r |= get_coordless_objects(co) return r geo = self.data.get("geopoint") if geo and geo.get("x") and geo.get("y"): # Rebuild connection layers for ct in self.REBUILD_CONNECTIONS: for c, _, _ in self.get_genderless_connections(ct): c.save() # Update nested objects from noc.sa.models.managedobject import ManagedObject mos = get_coordless_objects(self) if mos: ManagedObject.objects.filter( container__in=mos ).update( x=geo.get("x"), y=geo.get("y"), default_zoom=self.layer.default_zoom ) if self._created: if self.container: pop = self.get_pop() if pop: pop.update_pop_links() # Changed container elif hasattr(self, "_changed_fields") and "container" in self._changed_fields: # Old pop old_container_id = getattr(self, "_old_container", None) old_pop = None if old_container_id: c = Object.get_by_id(old_container_id) while c: if c.get_data("pop", "level"): old_pop = c break c = c.container # New pop new_pop = self.get_pop() # Check if pop moved if old_pop != new_pop: if old_pop: old_pop.update_pop_links() if new_pop: new_pop.update_pop_links() @cachetools.cachedmethod(operator.attrgetter("_path_cache"), lock=lambda _: id_lock) def get_path(self): """ Returns list of parent segment ids :return: """ if self.container: return self.container.get_path() + [self.id] return [self.id] def get_data(self, interface, key): attr = ModelInterface.get_interface_attr(interface, key) if attr.is_const: # Lookup model return self.model.get_data(interface, key) else: v = self.data.get(interface, {}) return v.get(key) def set_data(self, interface, key, value): attr = ModelInterface.get_interface_attr(interface, key) if attr.is_const: raise ModelDataError("Cannot set read-only value") value = attr._clean(value) # @todo: Check interface restrictions if interface not in self.data: self.data[interface] = {} self.data[interface][key] = value def reset_data(self, interface, key): attr = ModelInterface.get_interface_attr(interface, key) if attr.is_const: raise ModelDataError("Cannot reset read-only value") if interface in self.data and key in self.data[interface]: del self.data[interface][key] def has_connection(self, name): return self.model.has_connection(name) def get_p2p_connection(self, name): """ Get neighbor for p2p connection (s and mf types) Returns connection, remote object, remote connection or None, None, None """ c = ObjectConnection.objects.filter( __raw__={ "connection": { "$elemMatch": { "object": self.id, "name": name } } } ).first() if c: for x in c.connection: if x.object.id != self.id: return c, x.object, x.name # Strange things happen return None, None, None def get_genderless_connections(self, name): r = [] for c in ObjectConnection.objects.filter( __raw__={ "connection": { "$elemMatch": { "object": self.id, "name": name } } } ): for x in c.connection: if x.object.id != self.id: r += [[c, x.object, x.name]] return r def disconnect_p2p(self, name): """ Remove connection *name* """ c = self.get_p2p_connection(name)[0] if c: self.log(u"'%s' disconnected" % name, system="CORE", op="DISCONNECT") c.delete() def connect_p2p(self, name, remote_object, remote_name, data, reconnect=False): lc = self.model.get_model_connection(name) if lc is None: raise ConnectionError("Local connection not found: %s" % name) name = lc.name rc = remote_object.model.get_model_connection(remote_name) if rc is None: raise ConnectionError("Remote connection not found: %s" % remote_name) remote_name = rc.name # Check genders are compatible r_gender = ConnectionType.OPPOSITE_GENDER[rc.gender] if lc.gender != r_gender: raise ConnectionError("Incompatible genders: %s - %s" % ( lc.gender, rc.gender )) # Check directions are compatible if ((lc.direction == "i" and rc.direction != "o") or (lc.direction == "o" and rc.direction != "i") or (lc.direction == "s" and rc.direction != "s")): raise ConnectionError("Incompatible directions: %s - %s" % ( lc.direction, rc.direction)) # Check types are compatible c_types = lc.type.get_compatible_types(lc.gender) if rc.type.id not in c_types: raise ConnectionError("Incompatible connection types: %s - %s" % ( lc.type.name, rc.type.name )) # Check existing connecitons if lc.type.genders in ("s", "m", "f", "mf"): ec, r_object, r_name = self.get_p2p_connection(name) if ec is not None: # Connection exists if reconnect: if r_object.id == remote_object.id and r_name == remote_name: # Same connection exists n_data = deep_merge(ec.data, data) if n_data != ec.data: # Update data ec.data = n_data ec.save() return self.disconnect_p2p(name) else: raise ConnectionError("Already connected") # Create connection c = ObjectConnection( connection=[ ObjectConnectionItem(object=self, name=name), ObjectConnectionItem(object=remote_object, name=remote_name) ], data=data ).save() self.log(u"%s:%s -> %s:%s" % (self, name, remote_object, remote_name), system="CORE", op="CONNECT") # Disconnect from container on o-connection if lc.direction == "o" and self.container: self.log(u"Remove from %s" % self.container, system="CORE", op="REMOVE") self.container = None self.save() return c def connect_genderless(self, name, remote_object, remote_name, data=None, type=None, layer=None): """ Connect two genderless connections """ lc = self.model.get_model_connection(name) if lc is None: raise ConnectionError("Local connection not found: %s" % name) name = lc.name rc = remote_object.model.get_model_connection(remote_name) if rc is None: raise ConnectionError("Remote connection not found: %s" % remote_name) remote_name = rc.name if lc.gender != "s": raise ConnectionError("Local connection '%s' must be genderless" % name) if rc.gender != "s": raise ConnectionError("Remote connection '%s' must be genderless" % remote_name) # Check for connection for c, ro, rname in self.get_genderless_connections(name): if ro.id == remote_object.id and rname == remote_name: c.data = data or {} c.save() return # Normalize layer if layer and isinstance(layer, six.string_types): layer = Layer.get_by_code(layer) # Create connection ObjectConnection( connection=[ ObjectConnectionItem(object=self, name=name), ObjectConnectionItem(object=remote_object, name=remote_name) ], data=data or {}, type=type or None, layer=layer ).save() self.log(u"%s:%s -> %s:%s" % (self, name, remote_object, remote_name), system="CORE", op="CONNECT") def put_into(self, container): """ Put object into container """ if not container.get_data("container", "container"): raise ValueError("Must be put into container") # Disconnect all o-connections for c in self.model.connections: if c.direction == "o": c, _, _ = self.get_p2p_connection(c.name) if c: self.disconnect_p2p(c.name) # Connect to parent self.container = container.id # Reset previous rack position if self.data.get("rackmount"): for k in ("position", "side", "shift"): if k in self.data["rackmount"]: del self.data["rackmount"][k] self.save() self.log( "Insert into %s" % container, system="CORE", op="INSERT") def get_content(self): """ Returns all items directly put into container """ return Object.objects.filter(container=self.id) def get_local_name_path(self): for _, ro, rn in self.get_outer_connections(): return ro.get_local_name_path() + [rn] return [] def get_name_path(self): """ Return list of container names """ current = self.container if current is None: for _, ro, rn in self.get_outer_connections(): return ro.get_name_path() + [rn] return [unicode(self)] np = [unicode(self)] while current: np.insert(0, unicode(current)) current = current.container return np def log(self, message, user=None, system=None, managed_object=None, op=None): if not user: user = get_user() if hasattr(user, "username"): user = user.username if not user: user = "******" if not isinstance(managed_object, six.string_types): managed_object = unicode(managed_object) ObjectLog( object=self.id, user=user, ts=datetime.datetime.now(), message=message, system=system, managed_object=managed_object, op=op ).save() def get_log(self): return ObjectLog.objects.filter(object=self.id).order_by("ts") def get_lost_and_found(self): m = ObjectModel.get_by_name("Lost&Found") c = self.container while c: # Check siblings lf = Object.objects.filter(container=c, model=m).first() if lf: return lf # Up one level c = c.container return None @classmethod def detach_children(cls, sender, document, target=None): if not document.get_data("container", "container"): return if not target: target = document.get_lost_and_found() for o in Object.objects.filter(container=document.id): if o.get_data("container", "container"): cls.detach_children(sender, o, target) o.delete() else: o.put_into(target) def iter_connections(self, direction): """ Yields connections of specified direction as tuples of (name, remote_object, remote_name) """ ic = set(c.name for c in self.model.connections if c.direction == direction) for c in ObjectConnection.objects.filter( connection__object=self.id): sn = None oc = None for cc in c.connection: if cc.object.id == self.id: if cc.name in ic: sn = cc.name else: oc = cc if sn and oc: yield (sn, oc.object, oc.name) def iter_inner_connections(self): """ Yields inner connections as tuples of (name, remote_object, remote_name) """ for r in self.iter_connections("i"): yield r def iter_outer_connections(self): """ Yields outer connections as tuples of (name, remote_object, remote_name) """ for r in self.iter_connections("o"): yield r def has_inner_connections(self): """ Returns True if object has any inner connections """ return any(self.iter_inner_connections()) def get_inner_connections(self): """ Returns a list of inner connections as (name, remote_object, remote_name) """ return list(self.iter_inner_connections()) def get_outer_connections(self): """ Returns a list of outer connections as (name, remote_object, remote_name) """ return list(self.iter_outer_connections()) @classmethod def delete_disconnect(cls, sender, document, target=None): for c in ObjectConnection.objects.filter( connection__object=document.id): left = [cc for cc in c.connection if cc.object.id != document.id] if len(left) < 2: c.delete() # Remove connection else: # Wipe object c.connection = left c.save() def get_pop(self): """ Find enclosing PoP :returns: PoP instance or None """ c = self.container while c: if c.get_data("pop", "level"): return c c = c.container return None def get_coordinates_zoom(self): """ Get managed object's coordinates # @todo: Speedup? :returns: x (lon), y (lat), zoom level """ c = self while c: if c.point and c.layer: x = c.get_data("geopoint", "x") y = c.get_data("geopoint", "y") zoom = c.layer.default_zoom or 11 return x, y, zoom if c.container: c = Object.get_by_id(c.container.id) if c: continue break return None, None, None @classmethod def get_managed(cls, mo): """ Get Object managed by managed object :param mo: Managed Object instance or id :returns: Objects managed by managed object, or empty list """ if hasattr(mo, "id"): mo = mo.id return cls.objects.filter(data__management__managed_object=mo) @classmethod def get_by_path(cls, path, hints=None): """ Get object by given path. :param path: List of names following to path :param hints: {name: object_id} dictionary for getting object in path :returns: Object instance. None if not found """ current = None for p in path: current = Object.objects.filter(name=p, container=current).first() if not current: return None if hints: h = hints.get(p) if h: return Object.get_by_id(h) return current def update_pop_links(self, delay=20): call_later( "noc.inv.util.pop_links.update_pop_links", delay, pop_id=self.id ) @classmethod def _pre_init(cls, sender, document, values, **kwargs): """ Object pre-initialization """ # Store original container id if "container" in values and values["container"]: document._cache_container = values["container"] def get_address_text(self): """ Return first found address.text value upwards the path :return: Address text or None """ current = self while current: addr = current.get_data("address", "text") if addr: return addr if current.container: current = Object.get_by_id(current.container.id) else: break return None