Exemple #1
0
class Task(Document):
    meta = {'collection': 'task'}
    _id = StringField(primary_key=True)
    detail = StringField(unique=True, required=True)
    duration = LongField()
    consumed_time = LongField()
    created_at = DateTimeField()
    duration = LongField()
    finished = BooleanField()
Exemple #2
0
class Supplier(Document):
    meta = {
        "collection": "noc.suppliers",
        "indexes": ["name"],
        "strict": False,
        "auto_create_index": False
    }

    name = StringField()
    description = StringField()
    is_affilated = BooleanField(default=False)
    profile = PlainReferenceField(SupplierProfile)
    state = PlainReferenceField(State)
    # Integration with external NRI and TT systems
    # Reference to remote system object has been imported from
    remote_system = PlainReferenceField(RemoteSystem)
    # Object id in remote system
    remote_id = StringField()
    # Object id in BI
    bi_id = LongField(unique=True)

    tags = ListField(StringField())

    _id_cache = cachetools.TTLCache(maxsize=100, ttl=60)

    def __unicode__(self):
        return self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return Supplier.objects.filter(id=id).first()
Exemple #3
0
class ServiceProfile(Document):
    meta = {
        "collection": "noc.serviceprofiles",
        "strict": False,
        "auto_create_index": False
    }
    name = StringField(unique=True)
    description = StringField()
    # Jinja2 service label template
    card_title_template = StringField()
    # Short service code for reporting
    code = StringField()
    # FontAwesome glyph
    glyph = StringField()
    # Glyph order in summary
    display_order = IntField(default=100)
    # Show in total summary
    show_in_summary = BooleanField(default=True)
    workflow = PlainReferenceField(Workflow)
    # Auto-assign interface profile when service binds to interface
    interface_profile = ReferenceField(InterfaceProfile)
    # Alarm weight
    weight = IntField(default=0)
    # Capabilities
    caps = ListField(EmbeddedDocumentField(CapsItem))
    # Integration with external NRI and TT systems
    # Reference to remote system object has been imported from
    remote_system = ReferenceField(RemoteSystem)
    # Object id in remote system
    remote_id = StringField()
    # Object id in BI
    bi_id = LongField(unique=True)
    # Labels
    labels = ListField(StringField())
    effective_labels = ListField(StringField())

    _id_cache = cachetools.TTLCache(maxsize=100, ttl=60)

    def __str__(self):
        return self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return ServiceProfile.objects.filter(id=id).first()

    def on_save(self):
        if not hasattr(self, "_changed_fields"
                       ) or "interface_profile" in self._changed_fields:
            call_later(
                "noc.sa.models.serviceprofile.refresh_interface_profiles",
                sp_id=self.id,
                ip_id=self.interface_profile.id
                if self.interface_profile else None,
            )

    @classmethod
    def can_set_label(cls, label):
        return Label.get_effective_setting(label, "enable_serviceprofile")
Exemple #4
0
class AllocationGroup(Document):
    meta = {
        "collection": "allocationgroups",
        "strict": False,
        "auto_create_index": False
    }

    name = StringField(unique=True)
    description = StringField()
    tags = ListField(StringField())
    # Integration with external NRI and TT systems
    # Reference to remote system object has been imported from
    remote_system = PlainReferenceField(RemoteSystem)
    # Object id in remote system
    remote_id = StringField()
    # Object id in BI
    bi_id = LongField(unique=True)

    _id_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _bi_id_cache = cachetools.TTLCache(maxsize=100, ttl=60)

    def __unicode__(self):
        return self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"), lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return AllocationGroup.objects.filter(id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"), lock=lambda _: id_lock)
    def get_by_bi_id(cls, id):
        return AllocationGroup.objects.filter(bi_id=id).first()
Exemple #5
0
class SupplierProfile(Document):
    meta = {
        "collection": "noc.supplierprofiles",
        "strict": False,
        "auto_create_index": False
    }

    name = StringField(unique=True)
    description = StringField()
    workflow = PlainReferenceField(Workflow)
    style = ForeignKeyField(Style, required=False)
    tags = ListField(StringField())
    # Integration with external NRI and TT systems
    # Reference to remote system object has been imported from
    remote_system = PlainReferenceField(RemoteSystem)
    # Object id in remote system
    remote_id = StringField()
    # Object id in BI
    bi_id = LongField(unique=True)

    _id_cache = cachetools.TTLCache(maxsize=100, ttl=60)

    def __str__(self):
        return self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return SupplierProfile.objects.filter(id=id).first()
Exemple #6
0
class Workflow(Document):
    meta = {
        "collection": "workflows",
        "strict": False,
        "auto_create_index": False
    }
    name = StringField(unique=True)
    is_active = BooleanField()
    description = StringField()
    # Integration with external NRI and TT systems
    # Reference to remote system object has been imported from
    remote_system = ReferenceField(RemoteSystem)
    # Object id in remote system
    remote_id = StringField()
    # Object id in BI
    bi_id = LongField(unique=True)

    _id_cache = cachetools.TTLCache(maxsize=1000, ttl=60)
    _bi_id_cache = cachetools.TTLCache(maxsize=1000, ttl=60)

    def __str__(self):
        return self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return Workflow.objects.filter(id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_bi_id(cls, id):
        return Workflow.objects.filter(bi_id=id).first()

    @cachetools.cached(_default_state_cache,
                       key=lambda x: str(x.id),
                       lock=id_lock)
    def get_default_state(self):
        from .state import State

        return State.objects.filter(workflow=self.id, is_default=True).first()

    def set_default_state(self, state):
        from .state import State

        logger.info("[%s] Set default state to: %s", self.name, state.name)
        for s in State.objects.filter(workflow=self.id):
            if s.is_default and s.id != state.id:
                logger.info("[%s] Removing default status from: %s", self.name,
                            s.name)
                s.is_default = False
                s.save()
        # Invalidate caches
        key = str(self.id)
        if key in _default_state_cache:
            try:
                del _default_state_cache[key]
            except KeyError:
                pass
Exemple #7
0
class SensorProfile(Document):
    meta = {
        "collection": "sensorprofiles",
        "strict": False,
        "auto_create_index": False
    }

    name = StringField(unique=True)
    description = StringField()
    workflow = PlainReferenceField(Workflow)
    style = ForeignKeyField(Style)
    enable_collect = BooleanField(default=False)
    # Labels
    labels = ListField(StringField())
    effective_labels = ListField(StringField())
    # BI ID
    bi_id = LongField(unique=True)

    _id_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _bi_id_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _default_cache = cachetools.TTLCache(maxsize=100, ttl=60)

    DEFAULT_PROFILE_NAME = "default"
    DEFAULT_WORKFLOW_NAME = "Sensor Default"

    def __str__(self):
        return self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_id(cls, id) -> "SensorProfile":
        return SensorProfile.objects.filter(id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_bi_id(cls, id) -> "SensorProfile":
        return SensorProfile.objects.filter(bi_id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_default_cache"),
                             lock=lambda _: id_lock)
    def get_default_profile(cls) -> "SensorProfile":
        sp = SensorProfile.objects.filter(
            name=cls.DEFAULT_PROFILE_NAME).first()
        if not sp:
            sp = SensorProfile(
                name=cls.DEFAULT_PROFILE_NAME,
                workflow=Workflow.objects.filter(
                    name=cls.DEFAULT_WORKFLOW_NAME).first(),
            )
            sp.save()
        return sp

    @classmethod
    def can_set_label(cls, label):
        if label.enable_sensorprofile:
            return True
        return False
Exemple #8
0
class VPN(Document):
    meta = {
        "collection": "vpns",
        "strict": False,
        "auto_create_index": False
    }

    name = StringField(unique=True)
    profile = PlainReferenceField(VPNProfile)
    description = StringField()
    state = PlainReferenceField(State)
    # Link to parent overlay
    parent = PlainReferenceField("self")
    project = ForeignKeyField(Project)
    route_target = ListField(EmbeddedDocumentField(RouteTargetItem))
    tags = ListField(StringField())
    # Integration with external NRI and TT systems
    # Reference to remote system object has been imported from
    remote_system = PlainReferenceField(RemoteSystem)
    # Object id in remote system
    remote_id = StringField()
    # Object id in BI
    bi_id = LongField(unique=True)
    # @todo: last_seen
    # @todo: expired

    _id_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _bi_id_cache = cachetools.TTLCache(maxsize=100, ttl=60)

    def __unicode__(self):
        return self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"), lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return VPN.objects.filter(id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"), lock=lambda _: id_lock)
    def get_by_bi_id(cls, id):
        return VPN.objects.filter(bi_id=id).first()

    def clean(self):
        super(VPN, self).clean()
        if self.id and "parent" in self._changed_fields and self.has_loop:
            raise ValidationError("Creating VPN loop")

    @property
    def has_loop(self):
        """
        Check if object creates loop
        """
        if not self.id:
            return False
        p = self.parent
        while p:
            if p.id == self.id:
                return True
            p = p.parent
        return False
Exemple #9
0
class AddressProfile(Document):
    meta = {
        "collection": "addressprofiles",
        "strict": False,
        "auto_create_index": False
    }

    name = StringField(unique=True)
    description = StringField()
    # Address workflow
    workflow = PlainReferenceField(Workflow)
    style = ForeignKeyField(Style)
    # Template.subject to render Address.name
    name_template = ForeignKeyField(Template)
    # Template.subject to render Address.fqdn
    fqdn_template = ForeignKeyField(Template)
    # Send seen event to prefix
    seen_propagation_policy = StringField(choices=[("E", "Enable"),
                                                   ("D", "Disable")],
                                          default="D")
    # Labels
    labels = ListField(StringField())
    effective_labels = ListField(StringField())
    # Integration with external NRI and TT systems
    # Reference to remote system object has been imported from
    remote_system = PlainReferenceField(RemoteSystem)
    # Object id in remote system
    remote_id = StringField()
    # Object id in BI
    bi_id = LongField(unique=True)

    _id_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _name_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _bi_id_cache = cachetools.TTLCache(maxsize=100, ttl=60)

    def __str__(self):
        return self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return AddressProfile.objects.filter(id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_name_cache"),
                             lock=lambda _: id_lock)
    def get_by_name(cls, name):
        return AddressProfile.objects.filter(name=name).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_bi_id(cls, id):
        return AddressProfile.objects.filter(bi_id=id).first()

    @classmethod
    def can_set_label(cls, label):
        return Label.get_effective_setting(label,
                                           setting="enable_addressprofile")
Exemple #10
0
class PrefixProfile(Document):
    meta = {"collection": "prefixprofiles", "strict": False, "auto_create_index": False}

    name = StringField(unique=True)
    description = StringField()
    # Enable nested Address discovery
    # via ARP cache
    enable_ip_discovery = BooleanField(default=False)
    # Enable nested Addresses discovery
    # via active PING probes
    enable_ip_ping_discovery = BooleanField(default=False)
    # Enable nested prefix prefix discovery
    enable_prefix_discovery = BooleanField(default=False)
    # Prefix workflow
    workflow = PlainReferenceField(Workflow)
    style = ForeignKeyField(Style)
    # Template.subject to render Prefix.name
    name_template = ForeignKeyField(Template)
    # Discovery policies
    prefix_discovery_policy = StringField(choices=[("E", "Enable"), ("D", "Disable")], default="D")
    address_discovery_policy = StringField(choices=[("E", "Enable"), ("D", "Disable")], default="D")
    # Send seen event to parent
    seen_propagation_policy = StringField(
        choices=[("P", "Propagate"), ("E", "Enable"), ("D", "Disable")], default="P"
    )
    # Include/Exclude broadcast & network addresses from prefix
    prefix_special_address_policy = StringField(
        choices=[("I", "Include"), ("X", "Exclude")], default="X"
    )
    # Labels
    labels = ListField(StringField())
    effective_labels = ListField(StringField())
    # Integration with external NRI and TT systems
    # Reference to remote system object has been imported from
    remote_system = PlainReferenceField(RemoteSystem)
    # Object id in remote system
    remote_id = StringField()
    # Object id in BI
    bi_id = LongField(unique=True)

    _id_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _bi_id_cache = cachetools.TTLCache(maxsize=100, ttl=60)

    def __str__(self):
        return self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"), lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return PrefixProfile.objects.filter(id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"), lock=lambda _: id_lock)
    def get_by_bi_id(cls, id):
        return PrefixProfile.objects.filter(bi_id=id).first()

    @classmethod
    def can_set_label(cls, label):
        return Label.get_effective_setting(label, setting="enable_prefixprofile")
class StaticProfileResultDO(EmbeddedDocument):
    """
    Static profiling result plain object
    """

    # Number of parameters of this model
    parameters = IntField(required=True)
    # Floating point operations
    flops = LongField(required=True)
    # Memory consumption in Byte in order to load this model into GPU or CPU
    memory = LongField(required=True)
    # Memory read in Byte
    mread = LongField(required=True)
    # Memory write in Byte
    mwrite = LongField(required=True)
    # Memory readwrite in Byte
    mrw = LongField(required=True)
Exemple #12
0
class Pool(Document):
    meta = {
        "collection": "noc.pools",
        "strict": False,
        "auto_create_index": False
    }

    name = StringField(unique=True,
                       min_length=1,
                       max_length=16,
                       regex="^[0-9a-zA-Z]{1,16}$")
    description = StringField()
    discovery_reschedule_limit = IntField(default=50)
    # Object id in BI
    bi_id = LongField(unique=True)

    _id_cache = cachetools.TTLCache(1000, ttl=60)
    _bi_id_cache = cachetools.TTLCache(1000, ttl=60)
    _name_cache = cachetools.TTLCache(1000, ttl=60)
    reschedule_lock = threading.Lock()
    reschedule_ts = {}  # pool id -> timestamp

    def __unicode__(self):
        return self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return Pool.objects.filter(id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_bi_id(cls, id):
        return Pool.objects.filter(bi_id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_name_cache"),
                             lock=lambda _: id_lock)
    def get_by_name(cls, name):
        return Pool.objects.filter(name=name).first()

    def get_delta(self):
        """
        Get delta for next discovery,
        Limit runs to discovery_reschedule_limit tasks
        """
        t = time.time()
        dt = 1.0 / float(self.discovery_reschedule_limit)
        with self.reschedule_lock:
            lt = self.reschedule_ts.get(self.id)
            if lt and lt > t:
                delta = lt - t
            else:
                delta = 0
            self.reschedule_ts[self.id] = t + dt
        return delta
Exemple #13
0
class ResourceGroup(Document):
    """
    Technology

    Abstraction to restrict ResourceGroup links
    """

    meta = {
        "collection": "resourcegroups",
        "strict": False,
        "auto_create_index": False
    }

    # Group | Name
    name = StringField()
    technology = PlainReferenceField(Technology)
    parent = PlainReferenceField("inv.ResourceGroup")
    description = StringField()
    # @todo: FM integration
    # Integration with external NRI and TT systems
    # Reference to remote system object has been imported from
    remote_system = PlainReferenceField(RemoteSystem)
    # Object id in remote system
    remote_id = StringField()
    # Object id in BI
    bi_id = LongField(unique=True)
    # Tags
    tags = ListField(StringField())

    _id_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _bi_id_cache = cachetools.TTLCache(maxsize=100, ttl=60)

    def __str__(self):
        return "%s (%s)" % (self.name, self.technology.name)

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return ResourceGroup.objects.filter(id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_bi_id(cls, id):
        return ResourceGroup.objects.filter(bi_id=id).first()

    def iter_changed_datastream(self, changed_fields=None):
        if config.datastream.enable_resourcegroup:
            yield "resourcegroup", self.id

    @property
    def has_children(self):
        return bool(
            ResourceGroup.objects.filter(parent=self.id).only("id").first())
Exemple #14
0
class VPNProfile(Document):
    meta = {
        "collection": "vpnprofiles",
        "strict": False,
        "auto_create_index": False
    }

    name = StringField(unique=True)
    description = StringField()
    type = StringField(choices=[("vrf", "VRF"), ("vxlan", "VxLAN"),
                                ("vpls", "VPLS"), ("vll", "VLL"),
                                ("evpn", "EVPN"), ("ipsec", "IPSec"),
                                ("gre", "GRE"), ("ipip", "IP-IP")],
                       default="vrf")
    workflow = PlainReferenceField(Workflow)
    # Template.subject to render VPN/VRF.name
    name_template = ForeignKeyField(Template)
    #
    style = ForeignKeyField(Style)
    # For vrf type -- default prefix profile
    # Used to create AFI root prefixes
    default_prefix_profile = PlainReferenceField("ip.PrefixProfile")
    #
    tags = ListField(StringField())
    # Integration with external NRI and TT systems
    # Reference to remote system object has been imported from
    remote_system = PlainReferenceField(RemoteSystem)
    # Object id in remote system
    remote_id = StringField()
    # Object id in BI
    bi_id = LongField(unique=True)

    _id_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _bi_id_cache = cachetools.TTLCache(maxsize=100, ttl=60)

    def __unicode__(self):
        return self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return VPNProfile.objects.filter(id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_bi_id(cls, id):
        return VPNProfile.objects.filter(bi_id=id).first()

    def clean(self):
        if self.type == "vrf" and not self.default_prefix_profile:
            raise ValidationError(
                "default_prefix_profile must be set for vrf type")
Exemple #15
0
class VLANProfile(Document):
    meta = {
        "collection": "vlanprofiles",
        "strict": False,
        "auto_create_index": False
    }

    name = StringField(unique=True)
    description = StringField()
    # VLAN is management VLAN
    enable_management = BooleanField(default=False)
    # VLAN is multicast VLAN
    enable_multicast = BooleanField(default=False)
    # VLAN should be automatically provisioned
    enable_provisioning = BooleanField(default=False)
    # VLAN workflow
    workflow = PlainReferenceField(Workflow)
    style = ForeignKeyField(Style)
    # Labels
    labels = ListField(StringField())
    effective_labels = ListField(StringField())
    # Integration with external NRI and TT systems
    # Reference to remote system object has been imported from
    remote_system = PlainReferenceField(RemoteSystem)
    # Object id in remote system
    remote_id = StringField()
    # Object id in BI
    bi_id = LongField(unique=True)

    _id_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _bi_id_cache = cachetools.TTLCache(maxsize=100, ttl=60)

    def __str__(self):
        return self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return VLANProfile.objects.filter(id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_bi_id(cls, id):
        return VLANProfile.objects.filter(bi_id=id).first()

    @classmethod
    def can_set_label(cls, label):
        return Label.get_effective_setting(label, "enable_vlanprofile")
Exemple #16
0
class SubscriberProfile(Document):
    meta = {
        "collection": "noc.subscriberprofiles",
        "strict": False,
        "auto_create_index": False
    }

    name = StringField(unique=True)
    description = StringField()
    style = ForeignKeyField(Style, required=False)
    workflow = PlainReferenceField(Workflow)
    # FontAwesome glyph
    glyph = StringField()
    # Glyph order in summary
    display_order = IntField(default=100)
    # Show in total summary
    show_in_summary = BooleanField(default=True)
    # Labels
    labels = ListField(StringField())
    effective_labels = ListField(StringField())
    # Alarm weight
    weight = IntField(default=0)
    # Integration with external NRI and TT systems
    # Reference to remote system object has been imported from
    remote_system = PlainReferenceField(RemoteSystem)
    # Object id in remote system
    remote_id = StringField()
    # Object id in BI
    bi_id = LongField(unique=True)

    _id_cache = cachetools.TTLCache(maxsize=100, ttl=60)

    def __str__(self):
        return self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return SubscriberProfile.objects.filter(id=id).first()

    @classmethod
    def can_set_label(cls, label):
        return Label.get_effective_setting(label,
                                           setting="enable_subscriberprofile")
Exemple #17
0
class Subscriber(Document):
    meta = {
        "collection": "noc.subscribers",
        "indexes": ["name"],
        "strict": False,
        "auto_create_index": False,
    }

    name = StringField()
    description = StringField()
    profile = PlainReferenceField(SubscriberProfile)
    state = PlainReferenceField(State)
    # Main address
    address = StringField()
    # Technical contacts
    tech_contact_person = StringField()
    tech_contact_phone = StringField()
    project = ForeignKeyField(Project)
    # Labels
    labels = ListField(StringField())
    effective_labels = ListField(StringField())
    # Integration with external NRI and TT systems
    # Reference to remote system object has been imported from
    remote_system = PlainReferenceField(RemoteSystem)
    # Object id in remote system
    remote_id = StringField()
    # Object id in BI
    bi_id = LongField(unique=True)

    _id_cache = cachetools.TTLCache(maxsize=100, ttl=60)

    def __str__(self):
        return self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"), lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return Subscriber.objects.filter(id=id).first()

    @classmethod
    def can_set_label(cls, label):
        if label.enable_subscriber:
            return True
        return False
Exemple #18
0
class PasswordRecovery(Document):
    meta = dict(
        collection="PwdRecovery",
        allow_inheritance=False,
        indexes=[
            'user',
            # Automatically delete records as they expire.
            dict(fields=['expires'], expireAfterSeconds=0)
        ])

    user = ReferenceField(User, required=True)
    recovery_key = LongField(db_field='k', required=True)
    expires = DateTimeField(
        db_field='e',
        default=lambda: datetime.utcnow() + timedelta(minutes=15))

    @property
    def created(self):
        return self.id.generation_time

    def __repr__(self):
        return 'PasswordRecovery({0}, {1}, {2})'.format(
            self.id.generation_time.isoformat(), self.user.username,
            self.recovery_key)
Exemple #19
0
class Profile(Document):
    meta = {
        "collection": "noc.profiles",
        "strict": False,
        "auto_create_index": False,
        "json_collection": "sa.profiles",
        "json_unique_fields": ["name"]
    }
    name = StringField(unique=True)
    description = StringField(required=False)
    # Global ID
    uuid = UUIDField(binary=True)
    # Object id in BI
    bi_id = LongField(unique=True)

    _id_cache = cachetools.TTLCache(1000, ttl=60)
    _bi_id_cache = cachetools.TTLCache(1000, ttl=60)
    _name_cache = cachetools.TTLCache(1000, ttl=60)

    def __unicode__(self):
        return self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return Profile.objects.filter(id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_bi_id(cls, id):
        return Profile.objects.filter(bi_id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_name_cache"),
                             lock=lambda _: id_lock)
    def get_by_name(cls, name):
        return Profile.objects.filter(name=name).first()

    def to_json(self):
        return to_json(
            {
                "$collection": self._meta["json_collection"],
                "name": self.name,
                "uuid": self.uuid,
                "description": self.description
            },
            order=["name", "uuid", "description"])

    def get_json_path(self):
        vendor, soft = self.name.split(".")
        return os.path.join(vendor, "%s.json" % soft)

    def get_profile(self):
        return loader.get_profile(self.name)()

    @property
    def is_generic(self):
        return self.name == GENERIC_PROFILE

    @classmethod
    def get_generic_profile_id(cls):
        if not hasattr(cls, "_generic_profile_id"):
            cls._generic_profile_id = Profile.objects.filter(
                name=GENERIC_PROFILE).first().id
        return cls._generic_profile_id
Exemple #20
0
class MetricType(Document):
    meta = {
        "collection": "noc.metrictypes",
        "strict": False,
        "auto_create_index": False,
        "json_collection": "pm.metrictypes",
        "json_depends_on": [
            "pm.metricscopes"
        ],
        "json_unique_fields": ["name"]
    }

    # Metric type name, i.e. Interface | Load | In
    name = StringField(unique=True)
    # Global ID
    uuid = UUIDField(binary=True)
    # Metric scope reference
    scope = PlainReferenceField(MetricScope)
    # Database field name
    field_name = StringField()
    # Database field type
    field_type = StringField(
        choices=[
            ("UInt8", "UInt8"),
            ("Int8", "Int8"),
            ("UInt16", "UInt16"),
            ("Int16", "Int16"),
            ("UInt32", "UInt32"),
            ("Int32", "Int32"),
            ("UInt64", "UInt64"),
            ("Int64", "Int64"),
            ("Float32", "Float32"),
            ("Float64", "Float64"),
            ("String", "String")
        ]
    )
    # Text description
    description = StringField(required=False)
    # Measure name, like 'kbit/s'
    # Compatible to Grafana
    measure = StringField()
    # Optional required capability
    required_capability = PlainReferenceField(Capability)
    # Object id in BI, used for counter context hashing
    bi_id = LongField(unique=True)
    #
    category = ObjectIdField()

    _id_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _name_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _bi_id_cache = cachetools.TTLCache(maxsize=100, ttl=60)

    def __unicode__(self):
        return self.name

    @property
    def json_data(self):
        r = {
            "name": self.name,
            "$collection": self._meta["json_collection"],
            "uuid": self.uuid,
            "scope__name": self.scope.name,
            "field_name": self.field_name,
            "field_type": self.field_type,
            "description": self.description,
            "measure": self.measure
        }
        if self.required_capability:
            r["required_capability__name"] = self.required_capability.name
        return r

    def to_json(self):
        return to_json(
            self.json_data,
            order=[
                "name", "$collection",
                "uuid", "scope__name", "field_name", "field_type",
                "description", "measure", "vector_tag"])

    def get_json_path(self):
        p = [quote_safe_path(n.strip()) for n in self.name.split("|")]
        return os.path.join(*p) + ".json"

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"), lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return MetricType.objects.filter(id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_name_cache"), lock=lambda _: id_lock)
    def get_by_name(cls, name):
        return MetricType.objects.filter(name=name).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"), lock=lambda _: id_lock)
    def get_by_bi_id(cls, id):
        return MetricType.objects.filter(bi_id=id).first()

    def on_save(self):
        call_later(
            "noc.core.clickhouse.ensure.ensure_all_pm_scopes",
            scheduler="scheduler",
            delay=30
        )

    def clean_value(self, value):
        return getattr(self, "clean_%s" % self.field_type)(value)

    @staticmethod
    def clean_UInt8(value):
        try:
            v = int(value)
        except ValueError:
            raise
        if v < 0 or v > 255:
            raise ValueError("Value out of range")
        return v

    @staticmethod
    def clean_Int8(value):
        try:
            v = int(value)
        except ValueError:
            raise
        if v < -127 or v > 127:
            raise ValueError("Value out of range")
        return v

    @staticmethod
    def clean_UInt16(value):
        try:
            v = int(value)
        except ValueError:
            raise
        if v < 0 or v > 65535:
            raise ValueError("Value out of range")
        return v

    @staticmethod
    def clean_Int16(value):
        try:
            v = int(value)
        except ValueError:
            raise
        if v < -32767 or v > 32767:
            raise ValueError("Value out of range")
        return v

    @staticmethod
    def clean_UInt32(value):
        try:
            v = int(value)
        except ValueError:
            raise
        if v < 0 or v > 4294967295:
            raise ValueError("Value out of range")
        return v

    @staticmethod
    def clean_Int32(value):
        try:
            v = int(value)
        except ValueError:
            raise
        if v < -2147483647 or v > 2147483647:
            raise ValueError("Value out of range")
        return v

    @staticmethod
    def clean_UInt64(value):
        try:
            v = int(value)
        except ValueError:
            raise
        if v < 0 or v > 18446744073709551615:
            raise ValueError("Value out of range")
        return v

    @staticmethod
    def clean_Int64(value):
        try:
            v = int(value)
        except ValueError:
            raise
        if v < -9223372036854775807 or v > 9223372036854775807:
            raise ValueError("Value out of range")
        return v

    @staticmethod
    def clean_Float32(value):
        return float(value)

    @staticmethod
    def clean_Float64(value):
        return float(value)

    @staticmethod
    def clean_String(value):
        return str(value)
Exemple #21
0
class Transition(Document):
    meta = {
        "collection": "transitions",
        "indexes": ["from_state", "to_state"],
        "strict": False,
        "auto_create_index": False,
    }
    workflow = PlainReferenceField(Workflow)
    from_state = PlainReferenceField(State)
    to_state = PlainReferenceField(State)
    is_active = BooleanField(default=True)
    # Event name
    # Some predefined names exists:
    # seen -- discovery confirms resource usage
    # expired - TTL expired
    event = StringField()
    # Text label
    label = StringField()
    # Arbbitrary description
    description = StringField()
    # Enable manual transition
    enable_manual = BooleanField(default=True)
    # Handler to be called on starting transitions
    # Any exception aborts transtion
    handlers = ListField(StringField())
    # Visual vertices
    vertices = ListField(EmbeddedDocumentField(TransitionVertex))
    # Integration with external NRI and TT systems
    # Reference to remote system object has been imported from
    remote_system = ReferenceField(RemoteSystem)
    # Object id in remote system
    remote_id = StringField()
    # Object id in BI
    bi_id = LongField(unique=True)

    _id_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _bi_id_cache = cachetools.TTLCache(maxsize=100, ttl=60)

    def __str__(self):
        return "%s: %s -> %s [%s]" % (
            self.workflow.name,
            self.from_state.name,
            self.to_state.name,
            self.label,
        )

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return Transition.objects.filter(id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_bi_id(cls, id):
        return Transition.objects.filter(bi_id=id).first()

    def clean(self):
        if not self.from_state or not self.to_state:
            raise ValueError("Missed state")
        if self.from_state.workflow != self.to_state.workflow:
            raise ValueError("Workflow mismatch")
        self.workflow = self.from_state.workflow

    def on_transition(self, obj):
        """
        Called during transition
        :param obj:
        :return:
        """
        if self.handlers:
            logger.debug("[%s|%s|%s] Running transition handlers", obj,
                         obj.state.name, self.label)
            for hn in self.handlers:
                try:
                    h = get_handler(str(hn))
                except ImportError as e:
                    logger.error("Error import handler: %s" % e)
                    h = None
                if h:
                    logger.debug("[%s|%s|%s] Running %s", obj, obj.state.name,
                                 self.label, hn)
                    h(obj)  # @todo: Catch exceptions
                else:
                    logger.debug(
                        "[%s|%s|%s] Invalid handler %s, skipping",
                        obj,
                        obj.state.name,
                        self.label,
                        hn,
                    )
Exemple #22
0
class NetworkSegment(Document):
    meta = {
        "collection": "noc.networksegments",
        "strict": False,
        "auto_create_index": False,
        "indexes": ["parent", "sibling", "adm_domains"],
    }

    name = StringField(unique=True)
    parent = ReferenceField("self", required=False)
    profile = ReferenceField(NetworkSegmentProfile, required=True)
    description = StringField(required=False)
    # Management VLAN processing order
    # * d - disable management vlan
    # * e - enable management vlan and get from management_vlan field
    # * p - use profile settings
    management_vlan_policy = StringField(
        choices=[("d", "Disable"), ("p", "Profile"), ("e", "Enable")], default="p"
    )
    management_vlan = IntField(required=False, min_value=1, max_value=4095)
    # MVR VLAN processing order
    # * d - disable multicast vlan
    # * e - enable multicast vlan and get from multicast_vlan field
    # * p - use profile settings
    multicast_vlan_policy = StringField(
        choices=[("d", "Disable"), ("p", "Profile"), ("e", "Enable")], default="p"
    )
    multicast_vlan = IntField(required=False, min_value=1, max_value=4095)

    settings = DictField(default=lambda: {}.copy())
    labels = ListField(StringField())
    # Selectors for fake segments
    # Transition only, should not be used
    selector = ForeignKeyField(ManagedObjectSelector)
    # Sibling segment, if part of larger structure with
    # horizontal links
    sibling = ReferenceField("self")
    # True if segment has alternative paths
    is_redundant = BooleanField(default=False)
    # True if segment is redundant and redundancy
    # currently broken
    lost_redundancy = BooleanField(default=False)
    # VLAN namespace demarcation
    # * False - share namespace with parent VLAN
    # * True - split own namespace
    vlan_border = BooleanField(default=True)
    # VLAN translation policy when marking border
    # (vlan_border=True)
    # Dynamically recalculated and placed to VLAN.translation_rule
    # and VLAN.parent
    vlan_translation = ListField(EmbeddedDocumentField(VLANTranslation))
    # Share allocation resources with another segments
    allocation_group = PlainReferenceField(AllocationGroup)
    # Provided L2 MTU
    l2_mtu = IntField(default=1504)
    # Administrative domains which have access to segment
    # Sum of all administrative domains
    adm_domains = ListField(IntField())
    # Collapse object's downlinks on network map
    # when count is above the threshold
    max_shown_downlinks = IntField(default=1000)
    # Limit objects on network map for reach "Too many objects" error
    max_objects = IntField(default=300)
    # Horizontal transit policy
    horizontal_transit_policy = StringField(
        choices=[("E", "Always Enable"), ("C", "Calculate"), ("D", "Disable"), ("P", "Profile")],
        default="P",
    )
    # Horizontal transit settings
    # i.e. Allow traffic flow not only from parent-to-childrens and
    # children-to-children, but parent-to-parent and parent-to-neighbors
    # Calculated automatically during topology research
    enable_horizontal_transit = BooleanField(default=False)
    # Objects, services and subscribers belonging to segment directly
    direct_objects = ListField(EmbeddedDocumentField(ObjectSummaryItem))
    direct_services = ListField(EmbeddedDocumentField(SummaryItem))
    direct_subscribers = ListField(EmbeddedDocumentField(SummaryItem))
    # Objects, services and subscribers belonging to all nested segments
    total_objects = ListField(EmbeddedDocumentField(ObjectSummaryItem))
    total_services = ListField(EmbeddedDocumentField(SummaryItem))
    total_subscribers = ListField(EmbeddedDocumentField(SummaryItem))
    # Integration with external NRI and TT systems
    # Reference to remote system object has been imported from
    remote_system = ReferenceField(RemoteSystem)
    # Object id in remote system
    remote_id = StringField()
    # Object id in BI
    bi_id = LongField(unique=True)

    _id_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _bi_id_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _border_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _vlan_domains_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _vlan_domains_mo_cache = cachetools.TTLCache(maxsize=100, ttl=60)

    DISCOVERY_JOB = "noc.services.discovery.jobs.segment.job.SegmentDiscoveryJob"

    def __str__(self):
        return self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"), lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return NetworkSegment.objects.filter(id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"), lock=lambda _: id_lock)
    def get_by_bi_id(cls, id):
        return NetworkSegment.objects.filter(bi_id=id).first()

    @classmethod
    def _reset_caches(cls, id):
        try:
            del cls._id_cache[
                str(id),  # Tuple
            ]
        except KeyError:
            pass

    @cachetools.cached(_path_cache, key=lambda x: str(x.id), lock=id_lock)
    def get_path(self):
        """
        Returns list of parent segment ids
        :return:
        """
        if self.parent:
            return self.parent.get_path() + [self.id]
        return [self.id]

    def clean(self):
        if self.id and "parent" in self._changed_fields and self.has_loop:
            raise ValidationError("Creating segments loop")
        if self.horizontal_transit_policy == "E":
            self.enable_horizontal_transit = True
        elif self.horizontal_transit_policy == "D":
            self.enable_horizontal_transit = False
        elif self.profile and self.horizontal_transit_policy == "P":
            if self.profile.horizontal_transit_policy == "E":
                self.enable_horizontal_transit = True
            elif self.profile.horizontal_transit_policy == "D":
                self.enable_horizontal_transit = False
        super().clean()

    @property
    def effective_settings(self):
        """
        Returns dict with effective settings
        """
        if hasattr(self, "_es"):
            return self._es
        # Build full parent stack
        sstack = [self.settings or {}]
        p = self.parent
        while p:
            sstack = [p.settings or {}] + sstack
            p = p.parent
        # Get effective settings
        es = {}
        for s in sstack:
            for k in s:
                v = s[k]
                if v:
                    # Override parent settings
                    es[k] = v
                elif k in es:
                    # Ignore parent settings
                    del es[k]
        self._es = es
        return es

    @property
    def has_loop(self):
        """
        Check if object creates loop
        """
        if not self.id:
            return False
        p = self.parent
        while p:
            if p.id == self.id:
                return True
            p = p.parent
        return False

    @property
    def managed_objects(self):
        from noc.sa.models.managedobject import ManagedObject

        if self.selector:
            return self.selector.managed_objects
        else:
            siblings = self.get_siblings()
            if len(siblings) == 1:
                q = {"segment": str(siblings.pop().id)}
            else:
                q = {"segment__in": [str(s.id) for s in siblings]}
            return ManagedObject.objects.filter(**q)

    def get_siblings(self, seen=None):
        seen = seen or set()
        ss = {self}
        seen |= ss
        if self.sibling and self.sibling not in seen:
            ss |= self.sibling.get_siblings(seen)
        seen |= ss
        for s in NetworkSegment.objects.filter(sibling=self):
            ss |= s.get_siblings(seen)
        return ss

    def run_discovery(self):
        """
        Run discovery on whole segment
        """
        for o in self.managed_objects:
            if o.is_managed:
                o.run_discovery()

    @property
    def has_children(self):
        return bool(NetworkSegment.objects.filter(parent=self.id).only("id").first())

    def set_redundancy(self, status):
        """
        Change interface redundancy status
        :param status:
        :return:
        """
        siblings = list(self.get_siblings())
        filter = {"status": {"$ne": status}}
        if len(siblings) == 1:
            filter["_id"] = self.id
        else:
            filter["_id"] = {"$in": [s.id for s in siblings]}

        set_op = {"is_redundant": status}
        if not status:
            set_op["lost_redundancy"] = False
        NetworkSegment._get_collection().update_many(filter, {"$set": set_op})

    def set_lost_redundancy(self, status):
        NetworkSegment._get_collection().update(
            {"_id": self.id}, {"$set": {"lost_redundancy": bool(status)}}
        )

    def get_direct_summary(self):
        objects = {
            d["object_profile"]: d["count"]
            for d in self.managed_objects.values("object_profile")
            .annotate(count=Count("id"))
            .order_by("count")
        }
        # Direct services
        mo_ids = self.managed_objects.values_list("id", flat=True)
        services, subscribers = ServiceSummary.get_direct_summary(mo_ids)
        return services, subscribers, objects

    def get_summary(self):
        def to_list(v):
            return [{"profile": k, "summary": v[k]} for k in sorted(v)]

        def update_dict(d1, d2):
            for kk in d2:
                if kk in d1:
                    d1[kk] += d2[kk]
                else:
                    d1[kk] = d2[kk]

        services, subscribers, objects = self.get_direct_summary()
        r = {
            "direct_services": to_list(services),
            "direct_subscribers": to_list(subscribers),
            "direct_objects": to_list(objects),
        }
        # map(lambda x: update_dict(*x), zip([services, subscribers, objects], self.get_total_summary()))
        [
            update_dict(k, v)
            for k, v in zip([services, subscribers, objects], self.get_total_summary())
        ]
        r["total_services"] = to_list(services)
        r["total_subscribers"] = to_list(subscribers)
        r["total_objects"] = to_list(objects)
        return r

    @classmethod
    def update_summary(cls, network_segment):
        """
        Update summaries
        :return:
        """
        if not hasattr(network_segment, "id"):
            network_segment = NetworkSegment.get_by_id(network_segment)
        path = network_segment.get_path()
        # Update upwards
        path.reverse()
        for ns in sorted(
            NetworkSegment.objects.filter(id__in=path), key=lambda x: path.index(x.id)
        ):
            r = ns.get_summary()
            NetworkSegment._get_collection().update_one({"_id": ns.id}, {"$set": r}, upsert=True)

    def update_access(self):
        from noc.sa.models.administrativedomain import AdministrativeDomain

        # Get all own administrative domains
        adm_domains = set(
            d["administrative_domain"]
            for d in self.managed_objects.values("administrative_domain")
            .annotate(count=Count("id"))
            .order_by("count")
        )
        p = set()
        for a in adm_domains:
            a = AdministrativeDomain.get_by_id(a)
            p |= set(a.get_path())
        adm_domains |= p
        # Merge with children's administrative domains
        for s in NetworkSegment.objects.filter(parent=self.id).only("adm_domains"):
            adm_domains |= set(s.adm_domains or [])
        # Check for changes
        if set(self.adm_domains) != adm_domains:
            self.adm_domains = sorted(adm_domains)
            self.save()
            # Propagate to parents
            if self.parent:
                self.parent.update_access()

    def update_uplinks(self):
        # if self.profile.is_persistent:
        call_later("noc.core.topology.segment.update_uplinks", 60, segment_id=self.id)

    def get_horizontal_transit_policy(self):
        if self.horizontal_transit_policy in ("E", "C"):
            return self.horizontal_transit_policy
        elif self.horizontal_transit_policy == "P" and self.profile:
            return self.profile.horizontal_transit_policy
        else:
            return "D"

    def get_management_vlan(self):
        """
        Returns Management VLAN for segment
        :return: vlan (integer) or None
        """
        if self.management_vlan_policy == "e":
            return self.management_vlan or None
        elif self.management_vlan_policy == "p":
            return self.profile.management_vlan or None
        else:
            return None

    def get_multicast_vlan(self):
        """
        Returns Multicast VLAN for segment
        :return: vlan (integer) or None
        """
        if self.multicast_vlan_policy == "e":
            return self.multicast_vlan or None
        elif self.multicast_vlan_policy == "p":
            return self.profile.multicast_vlan or None
        else:
            return None

    def get_nested_ids(self):
        """
        Return id of this and all nested segments
        :return:
        """
        # $graphLookup hits 100Mb memory limit. Do not use it
        seen = {self.id}
        wave = {self.id}
        max_level = 10
        coll = NetworkSegment._get_collection()
        for _ in range(max_level):
            # Get next wave
            wave = (
                set(d["_id"] for d in coll.find({"parent": {"$in": list(wave)}}, {"_id": 1})) - seen
            )
            if not wave:
                break
            seen |= wave
        return list(seen)

    def ensure_discovery_jobs(self):
        if self.profile and self.profile.discovery_interval > 0:
            Job.submit("scheduler", self.DISCOVERY_JOB, key=self.id, keep_ts=True)
        else:
            Job.remove("scheduler", self.DISCOVERY_JOB, key=self.id)

    def on_save(self):
        if hasattr(self, "_changed_fields") and "profile" in self._changed_fields:
            self.ensure_discovery_jobs()
        if (
            hasattr(self, "_changed_fields")
            and self.vlan_border
            and "vlan_translation" in self._changed_fields
        ):
            from noc.vc.models.vlan import VLAN

            for vlan in VLAN.objects.filter(segment=self.id):
                vlan.refresh_translation()
        if hasattr(self, "_changed_fields") and "parent" in self._changed_fields:
            self.update_access()
            self.update_links()
            if self.parent:
                self.parent.update_links()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_border_cache"), lock=lambda _: id_lock)
    def get_border_segment(cls, segment):
        """
        Proceed up until vlan border
        :return:
        """
        current = segment
        while current:
            if current.vlan_border or not current.parent:
                return current
            current = current.parent

    @classmethod
    def iter_vlan_domain_segments(cls, segment):
        """
        Get all segments related to same VLAN domains
        :param segment:
        :return:
        """

        def iter_segments(ps):
            # Return segment
            yield ps
            # Iterate and recurse over all non vlan-border children
            for s in NetworkSegment.objects.filter(parent=ps.id):
                if s.vlan_border:
                    continue
                yield from iter_segments(s)

        # Get domain root
        root = cls.get_border_segment(segment)
        # Yield all children segments
        yield from iter_segments(root)

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_vlan_domains_cache"), lock=lambda _: id_lock)
    def get_vlan_domain_segments(cls, segment):
        """
        Get list of all segments related to same VLAN domains
        :param segment:
        :return:
        """
        return list(cls.iter_vlan_domain_segments(segment))

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_vlan_domains_mo_cache"), lock=lambda _: id_lock)
    def get_vlan_domain_object_ids(cls, segment):
        """
        Get list of all managed object ids belonging to
        same VLAN domain
        :param segment:
        :return:
        """
        from noc.sa.models.managedobject import ManagedObject

        return ManagedObject.objects.filter(
            segment__in=[s.id for s in cls.get_vlan_domain_segments(segment)]
        ).values_list("id", flat=True)

    def iter_links(self):
        yield from Link.objects.filter(linked_segments__in=[self.id])

    def update_links(self):
        # @todo intersect link only
        for link in self.iter_links():
            link.save()

    def get_total_summary(self, ids=None, parent_id=None):
        """

        :param ids: Network segment ID list
        :param parent_id: Parent ID filter value
        :return:
        """
        services = {}
        subscribers = {}
        objects = {}
        pipeline = []
        # Exclude segment sibling set (sibling segments as one)
        match = {"sibling": None}
        if ids:
            # Filter by network segment
            match["_id"] = {"$in": ids}
        else:
            match["parent"] = parent_id or self.id
        if match:
            pipeline += [{"$match": match}]
        # Mark service and profile with type field
        pipeline += [
            {
                "$project": {
                    "_id": 0,
                    "service": {
                        "$map": {
                            "input": "$total_services",
                            "as": "svc",
                            "in": {
                                "type": "svc",
                                "profile": "$$svc.profile",
                                "summary": "$$svc.summary",
                            },
                        }
                    },
                    "subscriber": {
                        "$map": {
                            "input": "$total_subscribers",
                            "as": "sub",
                            "in": {
                                "type": "sub",
                                "profile": "$$sub.profile",
                                "summary": "$$sub.summary",
                            },
                        }
                    },
                    "object": {
                        "$map": {
                            "input": "$total_objects",
                            "as": "obj",
                            "in": {
                                "type": "obj",
                                "profile": "$$obj.profile",
                                "summary": "$$obj.summary",
                            },
                        }
                    },
                }
            },
            # Concatenate services and profiles
            {"$project": {"summary": {"$concatArrays": ["$service", "$subscriber", "$object"]}}},
            # Unwind *summary* array to independed records
            {"$unwind": "$summary"},
            # Group by (type, profile)
            {
                "$group": {
                    "_id": {"type": "$summary.type", "profile": "$summary.profile"},
                    "summary": {"$sum": "$summary.summary"},
                }
            },
        ]  # noqa
        try:
            for doc in NetworkSegment._get_collection().aggregate(pipeline):
                profile = doc["_id"]["profile"]
                if doc["_id"]["type"] == "svc":
                    services[profile] = services.get(profile, 0) + doc["summary"]
                elif doc["_id"]["type"] == "sub":
                    subscribers[profile] = subscribers.get(profile, 0) + doc["summary"]
                elif doc["_id"]["type"] == "obj":
                    objects[profile] = objects.get(profile, 0) + doc["summary"]
        except OperationFailure:
            # for Mongo less 3.4
            pass
        return services, subscribers, objects
Exemple #23
0
class Firmware(Document):
    meta = {
        "collection": "noc.firmwares",
        "strict": False,
        "auto_create_index": False,
        "json_collection": "inv.firmwares",
        "json_depends_on": ["sa.profile"],
        "json_unique_fields": ["profile", "vendor", "version"],
        "indexes": [{
            "fields": ["profile", "vendor", "version"],
            "unique": True
        }],
    }
    # Global ID
    uuid = UUIDField(binary=True)
    #
    profile = PlainReferenceField(Profile)
    vendor = PlainReferenceField(Vendor)
    version = StringField()
    description = StringField()
    download_url = StringField()
    # Full name, combined from profile and version
    full_name = StringField()
    # Object id in BI
    bi_id = LongField(unique=True)

    _id_cache = cachetools.TTLCache(1000, ttl=60)
    _bi_id_cache = cachetools.TTLCache(1000, ttl=60)
    _ensure_cache = cachetools.TTLCache(1000, ttl=60)

    def __str__(self):
        return self.full_name if self.full_name else self.version

    def clean(self):
        self.full_name = "%s %s" % (self.profile.name, self.version)
        super().clean()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return Firmware.objects.filter(id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_bi_id(cls, id):
        return Firmware.objects.filter(bi_id=id).first()

    def to_json(self):
        return to_json(
            {
                "$collection": self._meta["json_collection"],
                "profile__name": self.profile.name,
                "vendor__code": self.vendor.code[0],
                "version": self.version,
                "uuid": self.uuid,
            },
            order=["profile__name", "vendor__code", "version", "uuid"],
        )

    def get_json_path(self):
        return os.path.join(self.vendor.code[0], self.profile.name,
                            "%s.json" % self.version.replace(os.sep, "_"))

    @classmethod
    @cachetools.cachedmethod(
        operator.attrgetter("_ensure_cache"),
        key=lambda p, v, vv: "%s-%s-%s" % (p.id, v.id, vv),
        lock=lambda _: id_lock,
    )
    def ensure_firmware(cls, profile, vendor, version):
        """
        Get or create firmware by profile, vendor and version
        :param profile:
        :param vendor:
        :param version:
        :return:
        """
        while True:
            firmware = Firmware.objects.filter(profile=profile.id,
                                               vendor=vendor.id,
                                               version=version).first()
            if firmware:
                return firmware
            try:
                firmware = Firmware(profile=profile,
                                    vendor=vendor,
                                    version=version,
                                    uuid=uuid.uuid4())
                firmware.save()
                return firmware
            except NotUniqueError:
                pass  # Already created by concurrent process, reread
Exemple #24
0
class DiscoveryID(Document):
    """
    Managed Object's discovery identity
    """
    meta = {
        "collection": "noc.inv.discovery_id",
        "strict": False,
        "auto_create_index": False,
        "indexes": ["object", "hostname", "udld_id", "macs"]
    }
    object = ForeignKeyField(ManagedObject)
    chassis_mac = ListField(EmbeddedDocumentField(MACRange))
    hostname = StringField()
    router_id = StringField()
    udld_id = StringField()  # UDLD local identifier
    #
    macs = ListField(LongField())

    _mac_cache = cachetools.TTLCache(maxsize=10000, ttl=60)
    _udld_cache = cachetools.TTLCache(maxsize=1000, ttl=60)

    def __unicode__(self):
        return self.object.name

    @staticmethod
    def _macs_as_ints(ranges=None, additional=None):
        """
        Get all MAC addresses within ranges as integers
        :param ranges: list of dicts {first_chassis_mac: ..., last_chassis_mac: ...}
        :param additional: Optional list of additional macs
        :return: List of integers
        """
        ranges = ranges or []
        additional = additional or []
        # Apply ranges
        macs = set()
        for r in ranges:
            if not r:
                continue
            first = MAC(r["first_chassis_mac"])
            last = MAC(r["last_chassis_mac"])
            macs.update(m for m in range(int(first), int(last) + 1))
        # Append additional macs
        macs.update(int(MAC(m)) for m in additional)
        return sorted(macs)

    @staticmethod
    def _macs_to_ranges(macs):
        """
        Convert list of macs (as integers) to MACRange
        :param macs: List of integer
        :return: List of MACRange
        """
        ranges = []
        for mi in macs:
            if ranges and mi - ranges[-1][1] == 1:
                # Extend last range
                ranges[-1][1] = mi
            else:
                # New range
                ranges += [[mi, mi]]
        return [
            MACRange(first_mac=str(MAC(r[0])), last_mac=str(MAC(r[1])))
            for r in ranges
        ]

    @classmethod
    def submit(cls,
               object,
               chassis_mac=None,
               hostname=None,
               router_id=None,
               additional_macs=None):
        # Process ranges
        macs = cls._macs_as_ints(chassis_mac, additional_macs)
        ranges = cls._macs_to_ranges(macs)
        # Update database
        o = cls.objects.filter(object=object.id).first()
        if o:
            old_macs = set(m.first_mac for m in o.chassis_mac)
            o.chassis_mac = ranges
            o.hostname = hostname
            o.router_id = router_id
            old_macs -= set(m.first_mac for m in o.chassis_mac)
            if old_macs:
                cache.delete_many(["discoveryid-mac-%s" % m for m in old_macs])
            # MAC index
            o.macs = macs
            o.save()
        else:
            cls(object=object,
                chassis_mac=ranges,
                hostname=hostname,
                router_id=router_id,
                macs=macs).save()

    @classmethod
    @cachedmethod(operator.attrgetter("_mac_cache"),
                  key="discoveryid-mac-%s",
                  lock=lambda _: mac_lock)
    def get_by_mac(cls, mac):
        return cls._get_collection().find_one({"macs": int(MAC(mac))}, {
            "_id": 0,
            "object": 1
        })

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_udld_cache"),
                             lock=lambda _: mac_lock)
    def get_by_udld_id(cls, device_id):
        return cls._get_collection().find_one({"udld_id": device_id}, {
            "_id": 0,
            "object": 1
        })

    @classmethod
    def find_object(cls, mac=None, ipv4_address=None):
        """
        Find managed object
        :param mac:
        :param ipv4_address:
        :param cls:
        :return: Managed object instance or None
        """
        def has_ip(ip, addresses):
            x = ip + "/"
            for a in addresses:
                if a.startswith(x):
                    return True
            return False

        # Find by mac
        if mac:
            metrics["discoveryid_mac_requests"] += 1
            r = cls.get_by_mac(mac)
            if r:
                return ManagedObject.get_by_id(r["object"])
        if ipv4_address:
            metrics["discoveryid_ip_requests"] += 1
            # Try router_id
            d = DiscoveryID.objects.filter(router_id=ipv4_address).first()
            if d:
                metrics["discoveryid_ip_routerid"] += 1
                return d.object
            # Fallback to interface addresses
            o = set(
                d["managed_object"]
                for d in SubInterface._get_collection().with_options(
                    read_preference=ReadPreference.SECONDARY_PREFERRED).find(
                        {
                            "ipv4_addresses": {
                                "$gt": ipv4_address + "/",
                                "$lt": ipv4_address + "/99"
                            }
                        }, {
                            "_id": 0,
                            "managed_object": 1,
                            "ipv4_addresses": 1
                        }) if has_ip(ipv4_address, d["ipv4_addresses"]))
            if len(o) == 1:
                metrics["discoveryid_ip_interface"] += 1
                return ManagedObject.get_by_id(list(o)[0])
            metrics["discoveryid_ip_failed"] += 1
        return None

    @classmethod
    def macs_for_object(cls, object):
        """
        Get MAC addresses for object
        :param cls:
        :param object:
        :return: list of (fist_mac, last_mac)
        """
        # Get discovered chassis id range
        o = DiscoveryID.objects.filter(object=object.id).first()
        if o and o.chassis_mac:
            c_macs = [(r.first_mac, r.last_mac) for r in o.chassis_mac]
        else:
            c_macs = []
        # Get interface macs
        i_macs = set(i.mac for i in Interface.objects.filter(
            managed_object=object.id, mac__exists=True).only("mac") if i.mac)
        # Enrich discovered macs with additional interface's ones
        c_macs += [(m, m) for m in i_macs
                   if not any(1 for f, t in c_macs if f <= m <= t)]
        return c_macs

    @classmethod
    def macs_for_objects(cls, objects_ids):
        """
        Get MAC addresses for object
        :param cls:
        :param objects_ids: Lis IDs of Managed Object Instance
        :type: list
        :return: Dictionary mac: objects
        :rtype: dict
        """
        if not objects_ids:
            return None
        if isinstance(objects_ids, list):
            objects = objects_ids
        else:
            objects = list(objects_ids)

        os = cls.objects.filter(object__in=objects)
        if not os:
            return None
        # Discovered chassis id range
        c_macs = {
            int(did[0][0]): did[1]
            for did in os.scalar("macs", "object") if did[0]
        }
        # c_macs = [r.macs for r in os]
        # Other interface macs
        i_macs = {
            int(MAC(i[0])): i[1]
            for i in Interface.objects.filter(managed_object__in=objects,
                                              mac__exists=True).scalar(
                                                  "mac", "managed_object")
            if i[0]
        }
        # Other subinterface macs (actual for DSLAM)
        si_macs = {
            int(MAC(i[0])): i[1]
            for i in SubInterface.objects.filter(managed_object__in=objects,
                                                 mac__exists=True).scalar(
                                                     "mac", "managed_object")
            if i[0]
        }
        c_macs.update(i_macs)
        c_macs.update(si_macs)

        return c_macs

    def on_delete(self):
        # Reset cache
        macs = set(m.first_mac for m in self.chassis_mac)
        if macs:
            cache.delete_many(["discoveryid-mac-%s" % m for m in macs])

    @classmethod
    def clean_for_object(cls, mo):
        if hasattr(mo, "id"):
            mo = mo.id
        for d in DiscoveryID.objects.filter(object=mo):
            d.delete()

    @classmethod
    def find_objects(cls, macs):
        """
        Find objects for list of macs
        :param macs: List of MAC addresses
        :return: dict of MAC -> ManagedObject for resolved MACs
        """
        r = {}
        if not macs:
            return r
        # Build list of macs to search
        mlist = sorted(int(MAC(m)) for m in macs)
        # Search for macs
        obj_ranges = {}  # (first, last) -> mo
        for d in DiscoveryID._get_collection().find({"macs": {
                "$in": mlist
        }}, {
                "_id": 0,
                "object": 1,
                "chassis_mac": 1
        }):
            mo = ManagedObject.get_by_id(d["object"])
            if mo:
                for dd in d.get("chassis_mac", []):
                    obj_ranges[int(MAC(dd["first_mac"])),
                               int(MAC(dd["last_mac"]))] = mo
        n = 1
        for s, e in obj_ranges:
            n += 1
        # Resolve ranges
        start = 0
        ll = len(mlist)
        for s, e in sorted(obj_ranges):
            mo = obj_ranges[s, e]
            start = bisect.bisect_left(mlist, s, start, ll)
            while start < ll and s <= mlist[start] <= e:
                r[MAC(mlist[start])] = mo
                start += 1
        return r

    @classmethod
    def update_udld_id(cls, object, local_id):
        """
        Update UDLD id if necessary
        :param object: Object for set
        :param local_id: Local UDLD id
        :return:
        """
        DiscoveryID._get_collection().update_one(
            {"object": object.id}, {"$set": {
                "udld_id": local_id
            }},
            upsert=True)
Exemple #25
0
class NetworkSegmentProfile(Document):
    meta = {
        "collection": "noc.networksegmentprofiles",
        "strict": False,
        "auto_create_index": False
    }

    name = StringField(unique=True)
    description = StringField(required=False)
    # segment discovery interval
    discovery_interval = IntField(default=86400)
    # Segment style
    style = ForeignKeyField(Style, required=False)
    # Restrict MAC discovery to management vlan
    mac_restrict_to_management_vlan = BooleanField(default=False)
    # Management vlan, to restrict MAC search for MAC topology discovery
    management_vlan = IntField(required=False, min_value=1, max_value=4095)
    # MVR VLAN
    multicast_vlan = IntField(required=False, min_value=1, max_value=4095)
    # Detect lost redundancy condition
    enable_lost_redundancy = BooleanField(default=False)
    # Horizontal transit policy
    horizontal_transit_policy = StringField(
        choices=[
            ("E", "Always Enable"),
            ("C", "Calculate"),
            ("D", "Disable")
        ], default="D"
    )
    # Default profile for autocreated children segments
    # (i.e. during autosegmentation)
    # Copy this segment profile otherwise
    autocreated_profile = PlainReferenceField("self")
    # List of enabled topology method
    # in order of preference (most preferable first)
    topology_methods = ListField(EmbeddedDocumentField(SegmentTopologySettings))
    # Enable VLAN discovery for appropriative management objects
    enable_vlan = BooleanField(default=False)
    # Default VLAN profile for discovered VLANs
    default_vlan_profile = PlainReferenceField("vc.VLANProfile")
    # Integration with external NRI and TT systems
    # Reference to remote system object has been imported from
    remote_system = PlainReferenceField(RemoteSystem)
    # Object id in remote system
    remote_id = StringField()
    # Object id in BI
    bi_id = LongField(unique=True)

    _id_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _bi_id_cache = cachetools.TTLCache(maxsize=100, ttl=60)

    def __unicode__(self):
        return self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return NetworkSegmentProfile.objects.filter(id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_bi_id(cls, id):
        return NetworkSegmentProfile.objects.filter(bi_id=id).first()

    def on_save(self):
        if hasattr(self, "_changed_fields") and "discovery_interval" in self._changed_fields:
            from .networksegment import NetworkSegment
            for ns in NetworkSegment.objects.filter(profile=self.id):
                ns.ensure_discovery_jobs()

    def get_topology_methods(self):
        ml = getattr(self, "_topology_methods", None)
        if not ml:
            ml = [m.method for m in self.topology_methods
                  if m.is_active and m not in ("custom", "handler")]
            self._topology_methods = ml
        return ml

    def is_preferable_method(self, m1, m2):
        """
        Returns True if m1 topology discovery method is
        preferable over m2
        """
        if m1 == m2:
            # Method can refine itself
            return True
        try:
            methods = self.get_topology_methods()
            i1 = methods.index(m1)
            i2 = methods.index(m2)
        except ValueError:
            return False
        return i1 <= i2
Exemple #26
0
class Sensor(Document):
    meta = {
        "collection": "sensors",
        "strict": False,
        "auto_create_index": False
    }

    profile = PlainReferenceField(SensorProfile,
                                  default=SensorProfile.get_default_profile)
    object = PlainReferenceField(Object)
    managed_object = ForeignKeyField(ManagedObject)
    local_id = StringField()
    state = PlainReferenceField(State)
    units = PlainReferenceField(MeasurementUnits)
    label = StringField()
    dashboard_label = StringField()
    # Sources that find sensor
    sources = ListField(StringField(choices=list(SOURCES)))
    # Timestamp of last seen
    last_seen = DateTimeField()
    # Timestamp expired
    expired = DateTimeField()
    # Timestamp of first discovery
    first_discovered = DateTimeField(default=datetime.datetime.now)
    protocol = StringField(
        choices=["modbus_rtu", "modbus_ascii", "modbus_tcp", "snmp", "ipmi"])
    modbus_register = IntField()
    snmp_oid = StringField()
    ipmi_id = StringField()
    bi_id = LongField(unique=True)
    # Labels
    labels = ListField(StringField())
    effective_labels = ListField(StringField())

    _id_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _bi_id_cache = cachetools.TTLCache(maxsize=100, ttl=60)

    def __str__(self):
        if self.object:
            return f"{self.object}: {self.local_id}"
        elif self.managed_object:
            return f"{self.managed_object}: {self.local_id}"
        return f"{self.units}: {self.local_id}"

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return Sensor.objects.filter(id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_bi_id(cls, id):
        return Sensor.objects.filter(bi_id=id).first()

    def seen(self, source: Optional[str] = None):
        """
        Seen sensor
        """
        if source and source in SOURCES:
            self.sources = list(set(self.sources or []).union(set([source])))
            self._get_collection().update_one(
                {"_id": self.id}, {"$addToSet": {
                    "sources": source
                }})
        self.fire_event("seen")
        self.touch()  # Worflow expired

    def unseen(self, source: Optional[str] = None):
        """
        Unseen sensor
        """
        logger.info(
            "[%s|%s] Sensor is missed '%s'",
            self.object.name if self.object else "-",
            "-",
            self.local_id,
        )
        if source and source in SOURCES:
            self.sources = list(set(self.sources or []) - set([source]))
            self._get_collection().update_one({"_id": self.id},
                                              {"$pull": {
                                                  "sources": source
                                              }})
        elif not source:
            # For empty source, clean sources
            self.sources = []
            self._get_collection().update_one({"_id": self.id},
                                              {"$set": {
                                                  "sources": []
                                              }})
        if not self.sources:
            # source - None, set sensor to missed
            self.fire_event("missed")
            self.touch()

    @classmethod
    def sync_object(cls, obj: Object) -> None:
        """
        Synchronize sensors with object model
        :param obj:
        :return:
        """
        # Get existing sensors
        obj_sensors: Dict[str, Sensor] = {
            s.local_id: s
            for s in Sensor.objects.filter(object=obj.id)
        }
        m_proto = [
            d.value for d in obj.get_effective_data()
            if d.interface == "modbus" and d.attr == "type"
        ] or ["rtu"]
        # Create new sensors
        for sensor in obj.model.sensors:
            if sensor.name in obj_sensors:
                obj_sensors[sensor.name].seen("objectmodel")
                del obj_sensors[sensor.name]
                continue
            #
            logger.info("[%s|%s] Creating new sensor '%s'",
                        obj.name if obj else "-", "-", sensor.name)
            s = Sensor(
                profile=SensorProfile.get_default_profile(),
                object=obj,
                local_id=sensor.name,
                units=sensor.units,
                label=sensor.description,
            )
            # Get sensor protocol
            if sensor.modbus_register:
                if not m_proto:
                    continue
                s.protocol = "modbus_%s" % m_proto[0].lower()
                s.modbus_register = sensor.modbus_register
            elif sensor.snmp_oid:
                s.protocol = "snmp"
                s.snmp_oid = sensor.snmp_oid
            else:
                logger.info(
                    "[%s|%s] Unknown sensor protocol '%s'",
                    obj.name if obj else "-",
                    "-",
                    sensor.name,
                )
            s.save()
            s.seen("objectmodel")
        # Notify missed sensors
        for s in sorted(obj_sensors):
            sensor = obj_sensors[s]
            sensor.unseen(source="objectmodel")

    @classmethod
    def can_set_label(cls, label):
        return Label.get_effective_setting(label, setting="enable_sensor")

    @classmethod
    def iter_effective_labels(cls, intance: "Sensor") -> Iterable[List[str]]:
        yield intance.labels or [] + intance.profile.labels or []
Exemple #27
0
class VLAN(Document):
    meta = {
        "collection": "vlans",
        "strict": False,
        "auto_create_index": False,
        "indexes": [{
            "fields": ["segment", "vlan"],
            "unique": True
        }, "expired"],
    }

    name = StringField()
    profile = PlainReferenceField(VLANProfile)
    vlan = IntField(min_value=1, max_value=4095)
    segment = PlainReferenceField(NetworkSegment)
    description = StringField()
    state = PlainReferenceField(State)
    project = ForeignKeyField(Project)
    # Link to gathering VPN
    vpn = PlainReferenceField(VPN)
    # VxLAN VNI
    vni = IntField()
    # Translation rules when passing border
    translation_rule = StringField(choices=[
        # Rewrite tag to parent vlan's
        ("map", "map"),
        # Append parent tag as S-VLAN
        ("push", "push"),
    ])
    #
    parent = PlainReferenceField("self")
    # Automatically apply segment translation rule
    apply_translation = BooleanField(default=True)
    # Labels
    labels = ListField(StringField())
    effective_labels = ListField(StringField())
    # Integration with external NRI and TT systems
    # Reference to remote system object has been imported from
    remote_system = PlainReferenceField(RemoteSystem)
    # Object id in remote system
    remote_id = StringField()
    # Object id in BI
    bi_id = LongField(unique=True)
    # Discovery integration
    # Timestamp when object first discovered
    first_discovered = DateTimeField()
    # Timestamp when object last seen by discovery
    last_seen = DateTimeField()
    # Timestamp when send "expired" event
    expired = DateTimeField()

    _id_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _bi_id_cache = cachetools.TTLCache(maxsize=100, ttl=60)

    def __str__(self):
        return self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return VLAN.objects.filter(id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_bi_id(cls, id):
        return VLAN.objects.filter(bi_id=id).first()

    def clean(self):
        super().clean()
        self.segment = NetworkSegment.get_border_segment(self.segment)
        if self.translation_rule and not self.parent:
            self.translation_rule = None

    def refresh_translation(self):
        """
        Set VLAN translation according to segment settings
        :return:
        """
        if not self.apply_translation:
            return
        # Find matching rule
        for vt in self.segment.vlan_translation:
            if vt.filter.check(self.vlan):
                logger.debug(
                    "[%s|%s|%s] Matching translation rule <%s|%s|%s>",
                    self.segment.name,
                    self.name,
                    self.vlan,
                    vt.filter.expression,
                    vt.rule,
                    vt.parent_vlan.vlan,
                )
                if self.parent != vt.parent_vlan or self.translation_rule != vt.translation_rule:
                    self.modify(parent=vt.parent_vlan,
                                translation_rule=vt.rule)
                return
        # No matching rule
        if self.parent or self.translation_rule:
            logger.debug("[%s|%s|%s] No matching translation rule, resetting")
            if self.parent or self.translation_rule:
                self.modify(parent=None, translation_rule=None)

    def on_save(self):
        self.refresh_translation()

    @classmethod
    def can_set_label(cls, label):
        if label.enable_vlan:
            return True
        return False
Exemple #28
0
class InterfaceProfile(Document):
    """
    Interface SLA profile and settings
    """

    meta = {
        "collection": "noc.interface_profiles",
        "strict": False,
        "auto_create_index": False
    }
    name = StringField(unique=True)
    description = StringField()
    style = ForeignKeyField(Style, required=False)
    # Interface-level events processing
    link_events = StringField(
        required=True,
        choices=[
            ("I", "Ignore Events"),
            ("L", "Log events, do not raise alarms"),
            ("A", "Raise alarms"),
        ],
        default="A",
    )
    # Discovery settings
    discovery_policy = StringField(
        choices=[("I", "Ignore"), ("O", "Create new"), ("R", "Replace"),
                 ("C", "Add to cloud")],
        default="R",
    )
    # Collect mac addresses on interface
    mac_discovery_policy = StringField(
        choices=[
            ("d", "Disabled"),
            ("m", "Management VLAN"),
            ("e", "Transit"),
            ("i", "Direct Downlink"),
            ("c", "Chained Downlink"),
            ("u", "Direct Uplink"),
            ("C", "Cloud Downlink"),
        ],
        default="d",
    )
    # Collect and keep interface status
    status_discovery = BooleanField(default=False)
    #
    allow_lag_mismatch = BooleanField(default=False)
    # Send up/down notifications
    status_change_notification = ForeignKeyField(NotificationGroup,
                                                 required=False)
    # Interface profile metrics
    metrics = ListField(EmbeddedDocumentField(InterfaceProfileMetrics))
    # Alarm weight
    weight = IntField(default=0)
    # User network interface
    # MAC discovery can be restricted to UNI
    is_uni = BooleanField(default=False)
    # Allow automatic segmentation
    allow_autosegmentation = BooleanField(default=False)
    # Allow collecting metrics from subinterfaces
    allow_subinterface_metrics = BooleanField(default=False)
    #
    allow_vacuum_bulling = BooleanField(default=False)
    # Validation policy
    interface_validation_policy = PlainReferenceField(
        InterfaceValidationPolicy)
    #
    ifdesc_patterns = PlainReferenceField(IfDescPatterns)
    ifdesc_handler = PlainReferenceField(Handler)
    # Enable abduct detection on interface
    enable_abduct_detection = BooleanField(default=False)
    # Integration with external NRI and TT systems
    # Reference to remote system object has been imported from
    remote_system = ReferenceField(RemoteSystem)
    # Object id in remote system
    remote_id = StringField()
    # Object id in BI
    bi_id = LongField(unique=True)

    _id_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _name_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _bi_id_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _default_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _status_discovery_cache = cachetools.TTLCache(maxsize=100, ttl=60)

    DEFAULT_PROFILE_NAME = "default"

    def __str__(self):
        return self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return InterfaceProfile.objects.filter(id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_bi_id(cls, id):
        return InterfaceProfile.objects.filter(bi_id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_name_cache"),
                             lock=lambda _: id_lock)
    def get_by_name(cls, name):
        return InterfaceProfile.objects.filter(name=name).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_default_cache"),
                             lock=lambda _: id_lock)
    def get_default_profile(cls):
        return InterfaceProfile.objects.filter(
            name=cls.DEFAULT_PROFILE_NAME).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_status_discovery_cache"),
                             lock=lambda _: id_lock)
    def get_with_status_discovery(cls):
        """
        Get list of interface profile ids with status_discovery = True
        :return:
        """
        return list(x["_id"] for x in InterfaceProfile._get_collection().find(
            {"status_discovery": True}, {"_id": 1}))
Exemple #29
0
class Vendor(Document):
    """
    Equipment vendor
    """

    meta = {
        "collection": "noc.vendors",
        "strict": False,
        "auto_create_index": False,
        "json_collection": "inv.vendors",
        "json_unique_fields": ["name", "code"],
    }
    # Short vendor name, included as first part of platform
    name = StringField(unique=True)
    # Full vendor name
    full_name = StringField()
    # Unique id
    uuid = UUIDField(binary=True)
    # List of vendor codes to be searched via .get_by_code()
    code = ListField(StringField(), unique=True)
    # Vendor's site
    site = URLField(required=False)
    # Object id in BI
    bi_id = LongField(unique=True)

    _id_cache = cachetools.TTLCache(1000, ttl=60)
    _bi_id_cache = cachetools.TTLCache(1000, ttl=60)
    _code_cache = cachetools.TTLCache(1000, ttl=60)
    _ensure_cache = cachetools.TTLCache(1000, ttl=60)

    def __str__(self):
        return self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"), lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return Vendor.objects.filter(id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"), lock=lambda _: id_lock)
    def get_by_bi_id(cls, id):
        return Vendor.objects.filter(bi_id=id).first()

    @classmethod
    def _get_by_code(cls, code):
        """
        Uncached version of get_by_code
        :param code:
        :return:
        """
        code = code.upper()
        return Vendor.objects.filter(code=code).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_code_cache"), lock=lambda _: id_lock)
    def get_by_code(cls, code):
        return cls._get_by_code(code)

    def clean(self):
        # Convert code to list
        if isinstance(self.code, str):
            self.code = [self.code]
        # Uppercase code
        self.code = [c.upper() for c in self.code]
        # Fill full name if not set
        if not self.full_name:
            self.full_name = self.name
        #
        super().clean()

    def on_save(self):
        if not hasattr(self, "_changed_fields") or "name" in self._changed_fields:
            from .platform import Platform

            for p in Platform.objects.filter(vendor=self.id):
                p.save()  # Rebuild full name

    def to_json(self):
        return to_json(
            {
                "name": self.name,
                "$collection": self._meta["json_collection"],
                "full_name": self.full_name,
                "code": self.code,
                "site": self.site,
                "uuid": self.uuid,
            },
            order=["name", "uuid", "full_name", "code", "site"],
        )

    def get_json_path(self):
        return "%s.json" % self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_ensure_cache"), lock=lambda _: id_lock)
    def ensure_vendor(cls, code):
        """
        Get or create vendor by code
        :param code:
        :return:
        """
        while True:
            vendor = Vendor._get_by_code(code)
            if vendor:
                return vendor
            try:
                vendor = Vendor(name=code, full_name=code, code=[code], uuid=uuid.uuid4())
                vendor.save()
                return vendor
            except NotUniqueError:
                pass  # Already created by concurrent process, reread

    @classmethod
    def iter_lazy_labels(cls, vendor: "Vendor"):
        yield f"noc::vendor::{vendor.name}::="
Exemple #30
0
class Object(Document):
    """
    Inventory object
    """

    meta = {
        "collection":
        "noc.objects",
        "strict":
        False,
        "auto_create_index":
        False,
        "indexes": [
            "data",
            "container",
            ("name", "container"),
            ("data.interface", "data.attr", "data.value"),
        ],
    }

    name = StringField()
    model = PlainReferenceField(ObjectModel)
    data = ListField(EmbeddedDocumentField(ObjectAttr))
    container = PlainReferenceField("self", required=False)
    comment = GridVCSField("object_comment")
    # Map
    layer = PlainReferenceField(Layer)
    point = PointField(auto_index=True)
    # Additional connection data
    connections = ListField(EmbeddedDocumentField(ObjectConnectionData))
    # Labels
    labels = ListField(StringField())
    effective_labels = ListField(StringField())
    # Integration with external NRI and TT systems
    # Reference to remote system object has been imported from
    remote_system = ReferenceField(RemoteSystem)
    # Object id in remote system
    remote_id = StringField()
    # Object id in BI
    bi_id = LongField(unique=True)

    _id_cache = cachetools.TTLCache(maxsize=1000, ttl=60)
    _bi_id_cache = cachetools.TTLCache(maxsize=1000, ttl=60)

    REBUILD_CONNECTIONS = ["links", "conduits"]

    def __str__(self):
        return smart_text(self.name or self.id)

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_id(cls, id) -> Optional["Object"]:
        return Object.objects.filter(id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_bi_id(cls, id) -> Optional["Object"]:
        return Object.objects.filter(bi_id=id).first()

    def iter_changed_datastream(self, changed_fields=None):
        if config.datastream.enable_managedobject:
            if self.data and self.get_data("management", "managed_object"):
                yield "managedobject", self.get_data("management",
                                                     "managed_object")
            else:
                for _, o, _ in self.iter_outer_connections():
                    if o.data and o.get_data("management", "managed_object"):
                        yield "managedobject", o.get_data(
                            "management", "managed_object")

    def clean(self):
        self.set_point()

    def set_point(self):
        from noc.gis.map import map

        # Reset previous data
        self.layer = None
        self.point = None
        # Get points
        x, y, srid = self.get_data_tuple("geopoint", ("x", "y", "srid"))
        if x is None or y is None:
            return  # No point data
        # Get layer
        layer_code = self.model.get_data("geopoint", "layer")
        if not layer_code:
            return
        layer = Layer.get_by_code(layer_code)
        if not layer:
            return
        # Update actual data
        self.layer = layer
        self.point = map.get_db_point(x, y, srid=srid)

    def on_save(self):
        def get_coordless_objects(o):
            r = {str(o.id)}
            for co in Object.objects.filter(container=o.id):
                cx, cy = co.get_data_tuple("geopoint", ("x", "y"))
                if cx is None and cy is None:
                    r |= get_coordless_objects(co)
            return r

        x, y = self.get_data_tuple("geopoint", ("x", "y"))
        if x is not None and y is not None:
            # Rebuild connection layers
            for ct in self.REBUILD_CONNECTIONS:
                for c, _, _ in self.get_genderless_connections(ct):
                    c.save()
            # Update nested objects
            from noc.sa.models.managedobject import ManagedObject

            mos = get_coordless_objects(self)
            if mos:
                ManagedObject.objects.filter(container__in=mos).update(
                    x=x,
                    y=y,
                    default_zoom=self.layer.default_zoom
                    if self.layer else DEFAULT_ZOOM)
        if self._created:
            if self.container:
                pop = self.get_pop()
                if pop:
                    pop.update_pop_links()
        # Changed container
        elif hasattr(
                self,
                "_changed_fields") and "container" in self._changed_fields:
            # Old pop
            old_container_id = getattr(self, "_old_container", None)
            old_pop = None
            if old_container_id:
                c = Object.get_by_id(old_container_id)
                while c:
                    if c.get_data("pop", "level"):
                        old_pop = c
                        break
                    c = c.container
            # New pop
            new_pop = self.get_pop()
            # Check if pop moved
            if old_pop != new_pop:
                if old_pop:
                    old_pop.update_pop_links()
                if new_pop:
                    new_pop.update_pop_links()
        if self.model.sensors:
            self._sync_sensors()

    @cachetools.cached(_path_cache, key=lambda x: str(x.id), lock=id_lock)
    def get_path(self) -> List[str]:
        """
        Returns list of parent segment ids
        :return:
        """
        if self.container:
            return self.container.get_path() + [self.id]
        return [self.id]

    def get_nested_ids(self):
        """
        Return id of this and all nested object
        :return:
        """
        # $graphLookup hits 100Mb memory limit. Do not use it
        seen = {self.id}
        wave = {self.id}
        max_level = 4
        coll = Object._get_collection()
        for _ in range(max_level):
            # Get next wave
            wave = (set(d["_id"]
                        for d in coll.find({"container": {
                            "$in": list(wave)
                        }}, {"_id": 1})) - seen)
            if not wave:
                break
            seen |= wave
        return list(seen)

    def get_data(self,
                 interface: str,
                 key: str,
                 scope: Optional[str] = None) -> Any:
        attr = ModelInterface.get_interface_attr(interface, key)
        if attr.is_const:
            # Lookup model
            return self.model.get_data(interface, key)
        for item in self.data:
            if item.interface == interface and item.attr == key:
                if not scope or item.scope == scope:
                    return item.value
        return None

    def get_data_dict(self,
                      interface: str,
                      keys: Iterable,
                      scope: Optional[str] = None) -> Dict[str, Any]:
        """
        Get multiple keys from single interface. Returns dict with values for every given key.
        If key is missed, return None value

        :param interface:
        :param keys: Iterable contains key names
        :param scope:
        :return:
        """
        kset = set(keys)
        r = {k: None for k in kset}
        for item in self.data:
            if item.interface == interface and item.attr in kset:
                if not scope or item.scope == scope:
                    r[item.attr] = item.value
        return r

    def get_data_tuple(self,
                       interface: str,
                       keys: Union[List, Tuple],
                       scope: Optional[str] = None) -> Tuple[Any, ...]:
        """
        Get multiple keys from single interface. Returns tuple with values for every given key.
        If key is missed, return None value

        :param interface:
        :param keys: List or tuple with key names
        :param scope:
        :return:
        """
        r = self.get_data_dict(interface, keys, scope)
        return tuple(r.get(k) for k in keys)

    def get_effective_data(self) -> List[ObjectAttr]:
        """
        Return effective object data, including the model's defaults
        :return:
        """
        seen: Set[Tuple[str, str, str]] = set()  # (interface, attr, scope
        r: List[ObjectAttr] = []
        # Object attributes
        for item in self.data:
            k = (item.interface, item.attr, item.scope or "")
            if k in seen:
                continue
            r += [item]
            seen.add(k)
        # Model attributes
        for i in self.model.data:
            for a in self.model.data[i]:
                k = (i, a, "")
                if k in seen:
                    continue
                r += [
                    ObjectAttr(interface=i,
                               attr=a,
                               scope="",
                               value=self.model.data[i][a])
                ]
                seen.add(k)
        # Sort according to interface
        sorting_keys: Dict[str, str] = {}
        for ni, i in enumerate(sorted(set(x[0] for x in seen))):
            mi = ModelInterface.get_by_name(i)
            if not mi:
                continue
            for na, a in enumerate(mi.attrs):
                sorting_keys["%s.%s" % (i, a.name)] = "%06d.%06d" % (ni, na)
        # Return sorted result
        return list(
            sorted(
                r,
                key=lambda oa: "%s.%s" % (sorting_keys.get(
                    "%s.%s" %
                    (oa.interface, oa.attr), "999999.999999"), oa.scope),
            ))

    def set_data(self,
                 interface: str,
                 key: str,
                 value: Any,
                 scope: Optional[str] = None) -> None:
        attr = ModelInterface.get_interface_attr(interface, key)
        if attr.is_const:
            raise ModelDataError("Cannot set read-only value")
        value = attr._clean(value)
        for item in self.data:
            if item.interface == interface and item.attr == key:
                if not scope or item.scope == scope:
                    item.value = value
                    break
        else:
            # Insert new item
            self.data += [
                ObjectAttr(interface=interface,
                           attr=attr.name,
                           value=value,
                           scope=scope or "")
            ]

    def reset_data(self,
                   interface: str,
                   key: Union[str, Iterable],
                   scope: Optional[str] = None) -> None:
        if isinstance(key, str):
            kset = {key}
        else:
            kset = set(key)
        v = [
            ModelInterface.get_interface_attr(interface, k).is_const
            for k in kset
        ]
        if any(v):
            raise ModelDataError("Cannot reset read-only value")
        self.data = [
            item for item in self.data if item.interface != interface or (
                scope and item.scope != scope) or item.attr not in kset
        ]

    def has_connection(self, name):
        return self.model.has_connection(name)

    def get_p2p_connection(
        self, name: str
    ) -> Tuple[Optional["ObjectConnection"], Optional["Object"],
               Optional[str]]:
        """
        Get neighbor for p2p connection (s and mf types)
        Returns connection, remote object, remote connection or
        None, None, None
        """
        c = ObjectConnection.objects.filter(__raw__={
            "connection": {
                "$elemMatch": {
                    "object": self.id,
                    "name": name
                }
            }
        }).first()
        if c:
            for x in c.connection:
                if x.object.id != self.id:
                    return c, x.object, x.name
        # Strange things happen
        return None, None, None

    def get_genderless_connections(
            self, name: str) -> List[Tuple["ObjectConnection", "Object", str]]:
        r = []
        for c in ObjectConnection.objects.filter(__raw__={
                "connection": {
                    "$elemMatch": {
                        "object": self.id,
                        "name": name
                    }
                }
        }):
            for x in c.connection:
                if x.object.id != self.id:
                    r += [[c, x.object, x.name]]
        return r

    def disconnect_p2p(self, name: str):
        """
        Remove connection *name*
        """
        c = self.get_p2p_connection(name)[0]
        if c:
            self.log("'%s' disconnected" % name,
                     system="CORE",
                     op="DISCONNECT")
            c.delete()

    def connect_p2p(
        self,
        name: str,
        remote_object: "Object",
        remote_name: str,
        data: Dict[str, Any],
        reconnect: bool = False,
    ) -> Optional["ObjectConnection"]:
        lc = self.model.get_model_connection(name)
        if lc is None:
            raise ConnectionError("Local connection not found: %s" % name)
        name = lc.name
        rc = remote_object.model.get_model_connection(remote_name)
        if rc is None:
            raise ConnectionError("Remote connection not found: %s" %
                                  remote_name)
        remote_name = rc.name
        valid, cause = self.model.check_connection(lc, rc)
        if not valid:
            raise ConnectionError(cause)
        # Check existing connecitons
        if lc.type.genders in ("s", "m", "f", "mf"):
            ec, r_object, r_name = self.get_p2p_connection(name)
            if ec is not None:
                # Connection exists
                if reconnect:
                    if r_object.id == remote_object.id and r_name == remote_name:
                        # Same connection exists
                        n_data = deep_merge(ec.data,
                                            data)  # Merge ObjectConnection
                        if n_data != ec.data:
                            # Update data
                            ec.data = n_data
                            ec.save()
                        return
                    self.disconnect_p2p(name)
                else:
                    raise ConnectionError("Already connected")
        # Create connection
        c = ObjectConnection(
            connection=[
                ObjectConnectionItem(object=self, name=name),
                ObjectConnectionItem(object=remote_object, name=remote_name),
            ],
            data=data,
        ).save()
        self.log("%s:%s -> %s:%s" % (self, name, remote_object, remote_name),
                 system="CORE",
                 op="CONNECT")
        # Disconnect from container on o-connection
        if lc.direction == "o" and self.container:
            self.log("Remove from %s" % self.container,
                     system="CORE",
                     op="REMOVE")
            self.container = None
            self.save()
        return c

    def connect_genderless(
        self,
        name: str,
        remote_object: "Object",
        remote_name: str,
        data: Dict[str, Any] = None,
        type: Optional[str] = None,
        layer: Optional[Layer] = None,
    ):
        """
        Connect two genderless connections
        """
        lc = self.model.get_model_connection(name)
        if lc is None:
            raise ConnectionError("Local connection not found: %s" % name)
        name = lc.name
        rc = remote_object.model.get_model_connection(remote_name)
        if rc is None:
            raise ConnectionError("Remote connection not found: %s" %
                                  remote_name)
        remote_name = rc.name
        if lc.gender != "s":
            raise ConnectionError("Local connection '%s' must be genderless" %
                                  name)
        if rc.gender != "s":
            raise ConnectionError("Remote connection '%s' must be genderless" %
                                  remote_name)
        # Check for connection
        for c, ro, rname in self.get_genderless_connections(name):
            if ro.id == remote_object.id and rname == remote_name:
                c.data = data or {}
                c.save()
                return
        # Normalize layer
        if layer and isinstance(layer, str):
            layer = Layer.get_by_code(layer)
        # Create connection
        ObjectConnection(
            connection=[
                ObjectConnectionItem(object=self, name=name),
                ObjectConnectionItem(object=remote_object, name=remote_name),
            ],
            data=data or {},
            type=type or None,
            layer=layer,
        ).save()
        self.log("%s:%s -> %s:%s" % (self, name, remote_object, remote_name),
                 system="CORE",
                 op="CONNECT")

    def put_into(self, container: "Object"):
        """
        Put object into container
        """
        if container and not container.get_data("container", "container"):
            raise ValueError("Must be put into container")
        # Disconnect all o-connections
        for c in self.model.connections:
            if c.direction == "o":
                c, _, _ = self.get_p2p_connection(c.name)
                if c:
                    self.disconnect_p2p(c.name)
        # Connect to parent
        self.container = container.id if container else None
        # Reset previous rack position
        self.reset_data("rackmount", ("position", "side", "shift"))
        #
        self.save()
        self.log("Insert into %s" % (container or "Root"),
                 system="CORE",
                 op="INSERT")

    def get_content(self) -> "Object":
        """
        Returns all items directly put into container
        """
        return Object.objects.filter(container=self.id)

    def get_local_name_path(self):
        for _, ro, rn in self.get_outer_connections():
            return ro.get_local_name_path() + [rn]
        return []

    def get_name_path(self) -> List[str]:
        """
        Return list of container names
        """
        current = self.container
        if current is None:
            for _, ro, rn in self.get_outer_connections():
                return ro.get_name_path() + [rn]
            return [smart_text(self)]
        np = [smart_text(self)]
        while current:
            np.insert(0, smart_text(current))
            current = current.container
        return np

    def log(self,
            message,
            user=None,
            system=None,
            managed_object=None,
            op=None):
        if not user:
            user = get_user()
        if hasattr(user, "username"):
            user = user.username
        if not user:
            user = "******"
        if not isinstance(managed_object, str):
            managed_object = smart_text(managed_object)
        ObjectLog(
            object=self.id,
            user=user,
            ts=datetime.datetime.now(),
            message=message,
            system=system,
            managed_object=managed_object,
            op=op,
        ).save()

    def get_log(self):
        return ObjectLog.objects.filter(object=self.id).order_by("ts")

    def get_lost_and_found(self) -> Optional["Object"]:
        m = ObjectModel.get_by_name("Lost&Found")
        c = self.container
        while c:
            # Check siblings
            lf = Object.objects.filter(container=c, model=m).first()
            if lf:
                return lf
            # Up one level
            c = c.container
        return None

    @classmethod
    def detach_children(cls, sender, document, target=None):
        if not document.get_data("container", "container"):
            return
        if not target:
            target = document.get_lost_and_found()
        for o in Object.objects.filter(container=document.id):
            if o.get_data("container", "container"):
                cls.detach_children(sender, o, target)
                o.delete()
            else:
                o.put_into(target)

    def iter_connections(
            self,
            direction: Optional[str]) -> Iterable[Tuple[str, "Object", str]]:
        """
        Yields connections of specified direction as tuples of
        (name, remote_object, remote_name)
        """
        ic = set(c.name for c in self.model.connections
                 if c.direction == direction)
        for c in ObjectConnection.objects.filter(connection__object=self.id):
            sn = None
            oc = None
            for cc in c.connection:
                if cc.object.id == self.id:
                    if cc.name in ic:
                        sn = cc.name
                else:
                    oc = cc
            if sn and oc:
                yield sn, oc.object, oc.name

    def iter_inner_connections(self):
        """
        Yields inner connections as tuples of
        (name, remote_object, remote_name)
        """
        yield from self.iter_connections("i")

    def iter_outer_connections(self):
        """
        Yields outer connections as tuples of
        (name, remote_object, remote_name)
        """
        yield from self.iter_connections("o")

    def has_inner_connections(self):
        """
        Returns True if object has any inner connections
        """
        return any(self.iter_inner_connections())

    def get_inner_connections(self):
        """
        Returns a list of inner connections as
        (name, remote_object, remote_name)
        """
        return list(self.iter_inner_connections())

    def get_outer_connections(self):
        """
        Returns a list of outer connections as
        (name, remote_object, remote_name)
        """
        return list(self.iter_outer_connections())

    @classmethod
    def delete_disconnect(cls, sender, document, target=None):
        for c in ObjectConnection.objects.filter(
                connection__object=document.id):
            left = [cc for cc in c.connection if cc.object.id != document.id]
            if len(left) < 2:
                c.delete()  # Remove connection
            else:
                # Wipe object
                c.connection = left
                c.save()

    def get_pop(self) -> Optional["Object"]:
        """
        Find enclosing PoP
        :returns: PoP instance or None
        """
        c = self.container
        while c:
            if c.get_data("pop", "level"):
                return c
            c = c.container
        return None

    def get_coordinates_zoom(
            self) -> Tuple[Optional[float], Optional[float], Optional[int]]:
        """
        Get managed object's coordinates
        # @todo: Speedup?
        :returns: x (lon), y (lat), zoom level
        """
        c = self
        while c:
            if c.point and c.layer:
                x, y = c.get_data_tuple("geopoint", ("x", "y"))
                zoom = c.layer.default_zoom or DEFAULT_ZOOM
                return x, y, zoom
            if c.container:
                c = Object.get_by_id(c.container.id)
                if c:
                    continue
            break
        return None, None, None

    @classmethod
    def get_managed(cls, mo):
        """
        Get Object managed by managed object
        :param mo: Managed Object instance or id
        :returns: Objects managed by managed object, or empty list
        """
        if hasattr(mo, "id"):
            mo = mo.id
        return cls.objects.filter(data__match={
            "interface": "management",
            "attr": "managed_object",
            "value": mo
        })

    def iter_managed_object_id(self) -> Iterator[int]:
        for d in Object._get_collection().aggregate([
            {
                "$match": {
                    "_id": self.id
                }
            },
                # Get all nested objects and put them into the _path field
            {
                "$graphLookup": {
                    "from": "noc.objects",
                    "connectFromField": "_id",
                    "connectToField": "container",
                    "startWith": "$_id",
                    "as": "_path",
                    "maxDepth": 50,
                }
            },
                # Leave only _path field
            {
                "$project": {
                    "_id": 0,
                    "_path": 1
                }
            },
                # Unwind _path array to separate documents
            {
                "$unwind": {
                    "path": "$_path"
                }
            },
                # Move data one level up
            {
                "$project": {
                    "data": "$_path.data"
                }
            },
                # Unwind data
            {
                "$unwind": {
                    "path": "$data"
                }
            },
                # Convert nested data to flat document
            {
                "$project": {
                    "interface": "$data.interface",
                    "attr": "$data.attr",
                    "value": "$data.value",
                }
            },
                # Leave only management objects
            {
                "$match": {
                    "interface": "management",
                    "attr": "managed_object"
                }
            },
                # Leave only value
            {
                "$project": {
                    "value": 1
                }
            },
        ]):
            mo = d.get("value")
            if mo:
                yield mo

    @classmethod
    def get_by_path(cls, path: List[str], hints=None) -> Optional["Object"]:
        """
        Get object by given path.
        :param path: List of names following to path
        :param hints: {name: object_id} dictionary for getting object in path
        :returns: Object instance. None if not found
        """
        current = None
        for p in path:
            current = Object.objects.filter(name=p, container=current).first()
            if not current:
                return None
            if hints:
                h = hints.get(p)
                if h:
                    return Object.get_by_id(h)
        return current

    def update_pop_links(self, delay: int = 20):
        call_later("noc.inv.util.pop_links.update_pop_links",
                   delay,
                   pop_id=self.id)

    @classmethod
    def _pre_init(cls, sender, document, values, **kwargs):
        """
        Object pre-initialization
        """
        # Store original container id
        if "container" in values and values["container"]:
            document._cache_container = values["container"]

    def get_address_text(self) -> Optional[str]:
        """
        Return first found address.text value upwards the path
        :return: Address text or None
        """
        current = self
        while current:
            addr = current.get_data("address", "text")
            if addr:
                return addr
            if current.container:
                current = Object.get_by_id(current.container.id)
            else:
                break
        return None

    def get_object_serials(self, chassis_only: bool = True) -> List[str]:
        """
        Gettint object serialNumber
        :param chassis_only: With serial numbers inner objects
        :return:
        """
        serials = [self.get_data("asset", "serial")]
        if not chassis_only:
            for sn, oo, name in self.iter_inner_connections():
                serials += oo.get_object_serials(chassis_only=False)
        return serials

    def iter_scope(self, scope: str) -> Iterable[Tuple[PathItem, ...]]:
        """
        Yields Full physical path for all connections with given scopes
        behind the object

        :param scope: Scope name
        :return:
        """
        connections = {
            name: ro
            for name, ro, _ in self.iter_inner_connections()
        }
        for c in self.model.connections:
            if c.type.is_matched_scope(scope, c.protocols):
                # Yield connection
                yield PathItem(object=self, connection=c),
            elif c.name in connections:
                ro = connections[c.name]
                for part_path in ro.iter_scope(scope):
                    yield (PathItem(object=self, connection=c), ) + part_path

    def set_connection_interface(self, name, if_name):
        for cdata in self.connections:
            if cdata.name == name:
                cdata.interface_name = if_name
                return
        # New item
        self.connections += [
            ObjectConnectionData(name=name, interface_name=if_name)
        ]

    def reset_connection_interface(self, name):
        self.connections = [c for c in self.connections if c.name != name]

    def _sync_sensors(self):
        """
        Synchronize sensors
        :return:
        """
        from .sensor import Sensor

        Sensor.sync_object(self)

    @classmethod
    def iter_by_address_id(cls,
                           address: Union[str, List[str]],
                           scope: str = None) -> Iterable["Object"]:
        """
        Get objects
        :param address:
        :param scope:
        :return:
        """
        q = {
            "interface": "address",
            "scope": scope or "",
            "attr": "id",
        }
        if isinstance(address, list):
            if len(address) == 1:
                q["value"] = address[0]
            else:
                q["value__in"] = address
        else:
            q["value"] = address
        yield from cls.objects.filter(data__match=q)

    @classmethod
    def can_set_label(cls, label):
        return Label.get_effective_setting(label, setting="enable_object")