Esempio n. 1
0
class Poll(Document):
    date = DateTimeField(default=datetime.datetime.utcnow)
    author = StringField()
    labels = DictField()
    questions = ListField()
    possible_answers = ListField()
    related_answers = ListField()

    @classmethod
    def create(cls, poll_template: dict) -> dict:
        try:
            author = poll_template.get('author', 'Unknown')
            labels = poll_template.get('labels', ['default'])
            questions = poll_template.get('questions', [])
            possible_answers = poll_template.get('possible_answers', [])
            if questions != [] and len(questions) == len(
                    possible_answers) and cls._check_answers_len(
                        possible_answers):
                poll_instance = cls(author=author,
                                    labels=labels,
                                    questions=questions,
                                    possible_answers=possible_answers,
                                    related_answers=[])
                poll_instance.save()
                status = True
                result = 'Everything is OK!'
            else:
                status = False
                result = "The questions list is either empty or there is a problem with answers length.."
        except Exception as ex:
            status = False
            result = f"An error ocurred while saving a Poll:{ex}"

        finally:
            return {"status": status, "result": result}

    @classmethod
    def get_all(cls):
        try:
            output = []
            status = True
            result = cls.objects().as_pymongo()
            for index, item in enumerate(result):
                item['id'] = str(item['_id'])
                del (item['_id'])
                output.append(item)
            result = output
        except Exception as ex:
            status = False
            result = f"An error ocurred while querying all Polls:{ex}"

        finally:
            return {"status": status, "result": result}

    @classmethod
    def get_by_id(cls, id):
        try:
            status = True
            result = cls.objects(id=id)
        except Exception as ex:
            status = False
            result = f"An error ocurred while querying one Poll:{ex}"

        finally:
            return {"status": status, "result": result}

    @classmethod
    def get_by_user(cls, user_id):
        try:
            output = []
            status = True
            result = cls.objects(author=user_id).as_pymongo()
            for index, item in enumerate(result):
                item['id'] = str(item['_id'])
                del (item['_id'])
                output.append(item)
            result = output
        except Exception as ex:
            status = False
            result = f"An error ocurred while querying user's Poll:{ex}"

        finally:
            return {"status": status, "result": result}

    @classmethod
    def get_by_labels(cls, labels: dict) -> list:
        try:
            output = []
            result = cls.objects(labels=labels).as_pymongo()
            for index, item in enumerate(result):
                item['id'] = str(item['_id'])
                del (item['_id'])
                output.append(item)
            status = True
            result = output
        except Exception as ex:
            status = False
            result = f"An error ocurred while querying one Poll:{ex}"

        finally:
            return {"status": status, "result": result}

    @staticmethod
    def _check_answers_len(answers_list: list) -> bool:
        return all(
            [True if len(answers) <= 4 else False for answers in answers_list])
Esempio n. 2
0
class ObjectCapabilities(Document):
    meta = {
        "collection": "noc.sa.objectcapabilities",
        "strict": False,
        "auto_create_index": False
    }
    object = ForeignKeyField(ManagedObject, primary_key=True)
    caps = ListField(EmbeddedDocumentField(CapsItem))

    def __unicode__(self):
        return "%s caps" % self.object.name

    def on_save(self):
        cache.delete("cred-%s" % self.object.id)

    def iter_changed_datastream(self):
        yield "managedobject", self.object.id

    @classmethod
    def get_capabilities(cls, object):
        """
        Resolve object capabilities
        :param object: ManagedObject instance or id
        :return: dict of capability name -> current value
        """
        if hasattr(object, "id"):
            object = object.id
        caps = {}
        oc = ObjectCapabilities._get_collection().find_one({"_id": object})
        if oc:
            for c in oc["caps"]:
                cc = Capability.get_by_id(c["capability"])
                if cc:
                    caps[cc.name] = c.get("value")
        return caps

    @classmethod
    def update_capabilities(cls, object, caps, source):
        """
        Update stored capabilities
        :param object:
        :param caps:
        :param source:
        :return:
        """
        o_label = object
        if hasattr(object, "id"):
            o_label = object.name
            object = object.id
        o_label += "|%s" % source
        oc = ObjectCapabilities._get_collection().find_one({"_id": object
                                                            }) or {}
        # Update existing capabilities
        new_caps = []
        seen = set()
        changed = False
        for ci in oc.get("caps", []):
            c = Capability.get_by_id(ci["capability"])
            cs = ci.get("source")
            cv = ci.get("value")
            if not c:
                logger.info("[%s] Removing unknown capability id %s", o_label,
                            ci["capability"])
                continue
            cn = c.name
            seen.add(cn)
            if cs == source:
                if cn in caps:
                    if caps[cn] != cv:
                        logger.info("[%s] Changing capability %s: %s -> %s",
                                    o_label, cn, cv, caps[cn])
                        ci["value"] = caps[cn]
                        changed = True
                else:
                    logger.info("[%s] Removing capability %s", o_label, cn)
                    changed = True
                    continue
            elif cn in caps:
                logger.info(
                    "[%s] Not changing capability %s: "
                    "Already set with source '%s'", o_label, cn, cs)
            new_caps += [ci]
        # Add new capabilities
        for cn in set(caps) - seen:
            c = Capability.get_by_name(cn)
            if not c:
                logger.info("[%s] Unknown capability %s, ignoring", o_label,
                            cn)
                continue
            logger.info("[%s] Adding capability %s = %s", o_label, cn,
                        caps[cn])
            new_caps += [{
                "capability": c.id,
                "value": caps[cn],
                "source": source
            }]
            changed = True

        if changed:
            logger.info("[%s] Saving changes", o_label)
            ObjectCapabilities._get_collection().update(
                {"_id": object}, {"$set": {
                    "caps": new_caps
                }}, upsert=True)
            cache.delete("cred-%s" % object)
        caps = {}
        for ci in new_caps:
            cn = Capability.get_by_id(ci["capability"])
            if cn:
                caps[cn.name] = ci.get("value")
        return caps
Esempio n. 3
0
class Session(Document):
    """
    This class represents a running instance of the agent on a
    target system. It is responsible for running actions created
    by the user.

    This class also has an associated class SessionHistory,
    which stores less frequently accessed data, that tends to grow
    rapidly over time.
    """

    meta = {
        "collection":
        COLLECTION_SESSIONS,
        "indexes": [
            {
                "fields": ["session_id"],
                "unique": True
            },
            {
                "fields": ["target_name", "archived"]
            },
        ],
    }
    session_id = StringField(required=True, null=False, max_length=MAX_STR_LEN)
    target_name = StringField(required=True,
                              null=False,
                              max_length=MAX_STR_LEN)
    timestamp = FloatField(required=True, null=False)

    servers = ListField(StringField(required=True,
                                    null=False,
                                    max_length=MAX_STR_LEN),
                        required=True,
                        null=False)
    interval = FloatField(required=True, null=False)
    interval_delta = FloatField(required=True, null=False)
    config_dict = DictField(null=False)

    agent_version = StringField(null=True, max_length=MAX_STR_LEN)

    archived = BooleanField(required=False, null=False, default=False)

    @staticmethod
    def get_by_id(session_id):
        """
        This method queries for the session object matching the name provided.
        """
        return Session.objects.get(session_id=session_id)  # pylint: disable=no-member

    @staticmethod
    def list_sessions():
        """
        This method queries for all session objects.
        """
        return Session.objects(archived=False)  # pylint: disable=no-member

    @property
    def config(self):
        """
        This property returns the session configuration,
        overriding all reserved keys with their proper values.

        config_dict should never be used directly.
        """
        self.config_dict["interval"] = self.interval  # pylint: disable=unsupported-assignment-operation
        self.config_dict["interval_delta"] = self.interval_delta  # pylint: disable=unsupported-assignment-operation
        self.config_dict["servers"] = self.servers  # pylint: disable=unsupported-assignment-operation

        return self.config_dict

    @property
    def status(self):
        """
        This property returns the session status,
        which is based on the current interval setting
        and the last seen timestamp.
        """
        max_time = self.interval + self.interval_delta + SESSION_CHECK_THRESHOLD

        if time.time() > self.timestamp + (max_time * SESSION_CHECK_MODIFIER):
            return SESSION_STATUSES.get("inactive", "inactive")
        elif time.time() > self.timestamp + max_time:
            return SESSION_STATUSES.get("missing", "missing")

        return SESSION_STATUSES.get("active", "active")

    @property
    def history(self):
        """
        Performs a query to retrieve history information about this session.
        """
        return SessionHistory.objects.get(session_id=self.session_id)  # pylint: disable=no-member

    @property
    def document(self):
        """
        This property returns a filtered JSON document representation of the session.
        """
        return {
            "session_id": self.session_id,
            "target_name": self.target_name,
            "status": self.status,
            "timestamp": self.timestamp,
            "config": self.config,
            "agent_version": self.agent_version,
        }

    def update_config(self,
                      interval=None,
                      interval_delta=None,
                      servers=None,
                      config=None):
        """
        This function will update a sessions config according to the
        provided dict. It will also validate types of reserved keywords.
        """
        if interval is not None:
            self.interval = interval
        if interval_delta is not None:
            self.interval_delta = interval_delta
        if servers is not None:
            self.servers = servers
        if config is not None and isinstance(config, dict):
            for key, value in config.items():
                self.config_dict[key] = value  # pylint: disable=unsupported-assignment-operation
        self.save()

    def update_timestamp(self, new_timestamp):
        """
        This function will update a session's timestamp, and it's history document.
        It will also ensure that the session is unarchived.
        """
        self.timestamp = new_timestamp
        self.history.add_checkin(new_timestamp)
        self.archived = False
        self.save()

    def archive(self):
        """
        Archive a target. This will prevent it from being discovered by lists
        and from being in target documents.
        """
        self.archived = True
        self.save()
Esempio n. 4
0
class AnalyticConfigurationList(Document):
    analytics = ListField(EmbeddedDocumentField(AnalyticConfiguration))
    name = StringField(unique=True, required=True)
Esempio n. 5
0
class Candidate(TimestampMixin, DynamicDocument):
    """
    State is included because in nearly all cases, a candidate 
    is unique to a state (presidential races involve state-level 
    candidacies). This helps with lookups and prevents duplicates.

    """
    ### Meta fields ###
    source = StringField(
        required=True,
        help_text=
        "Name of data source (preferably from datasource.py). NOTE: this could be a single file among many for a given state, if results are split into different files by reporting level"
    )
    election_id = StringField(
        required=True, help_text="election id, e.g. md-2012-11-06-general")
    state = StringField(required=True, choices=STATE_POSTALS)

    person = ReferenceField(
        Person,
        help_text=
        "Reference to unique Person record to link candidacies over time and/or across states for presidential cands."
    )

    ### Contest fields ####
    contest = ReferenceField(Contest,
                             reverse_delete_rule=CASCADE,
                             required=True)
    contest_slug = StringField(
        required=True,
        help_text="Denormalized contest slug for easier querying and obj repr")

    ### Candidate fields ###
    #TODO: Add validation to require full_name or famly_name, assuming we allow full_name (see question above)
    full_name = StringField(max_length=200)
    family_name = StringField(max_length=75)
    given_name = StringField(max_length=50)
    suffix = StringField(max_length=20)
    additional_name = StringField(
        max_length=75,
        help_text="Middle name, nickname, etc., if provided in raw results.")
    #TODO: Add validation to require full_name or family_name
    #TODO: Add example to help_text for slugified name
    slug = StringField(
        max_length=300,
        required=True,
        help_text="Slugified name for easier querying and obj repr")
    identifiers = DictField(
        help_text=
        "Unique identifiers for candidate in other data sets, such as FEC Cand number. "
        "This should store IDs relevant to just this candidacy, such as FEC Cand number(s) for a particular election "
        "cycle. The Person model will store the full history of all FEC Cand Numbers"
    )
    flags = ListField(
        StringField(choices=CANDIDATE_FLAG_CHOICES,
                    help_text="Flags to unambiguously identify candidate "
                    "records that represent special non-person candidates."))

    meta = {
        'indexes': [
            'election_id',
        ],
    }

    def __unicode__(self):
        name = u'%s - %s' % (self.contest_slug, self.name)
        return name

    @property
    def name(self):
        if self.full_name:
            name = self.full_name
        else:
            name = self.family_name
            if self.given_name:
                name += " %s" % self.given_name
            if self.additional_name:
                name += " %s" % self.additional_name
            if self.suffix:
                name += " %s" % self.suffix
            name = "%s" % self.family_name
        return name

    @property
    def key(self):
        return (self.election_id, self.contest_slug, self.slug)

    @classmethod
    def post_init(cls, sender, document, **kwargs):
        if not document.contest_slug:
            document.contest_slug = document.contest.slug

        if not document.slug:
            document.slug = cls.make_slug(full_name=document.full_name)

    @classmethod
    def make_slug(cls, **kwargs):
        return slugify(kwargs.get('full_name'), '-')
Esempio n. 6
0
class Contributions(DynamicDocument):
    project = LazyReferenceField("Projects",
                                 required=True,
                                 passthrough=True,
                                 reverse_delete_rule=CASCADE)
    identifier = StringField(required=True,
                             help_text="material/composition identifier")
    formula = StringField(
        help_text="formula (set dynamically if not provided)")
    is_public = BooleanField(required=True,
                             default=False,
                             help_text="public/private contribution")
    last_modified = DateTimeField(required=True,
                                  default=datetime.utcnow,
                                  help_text="time of last modification")
    data = DictField(default={},
                     validation=valid_dict,
                     help_text="simple free-form data")
    structures = ListField(ReferenceField("Structures"),
                           default=list,
                           max_length=10)
    tables = ListField(ReferenceField("Tables"), default=list, max_length=10)
    notebook = ReferenceField("Notebooks")
    meta = {
        "collection":
        "contributions",
        "indexes": [
            "project",
            "identifier",
            "formula",
            "is_public",
            "last_modified",
            {
                "fields": [(r"data.$**", 1)]
            },
        ],
    }

    @classmethod
    def post_init(cls, sender, document, **kwargs):
        # replace existing structures/tables with according ObjectIds
        for component in ["structures", "tables"]:
            lst = getattr(document, component)
            if lst and lst[0].id is None:  # id is None for incoming POST
                dmodule = import_module(f"mpcontribs.api.{component}.document")
                klass = component.capitalize()
                Docs = getattr(dmodule, klass)
                vmodule = import_module(f"mpcontribs.api.{component}.views")
                Resource = getattr(vmodule, f"{klass}Resource")
                resource = Resource()
                for i, o in enumerate(lst):
                    d = resource.serialize(
                        o, fields=["lattice", "sites", "charge"])
                    s = json.dumps(d, sort_keys=True).encode("utf-8")
                    digest = md5(s).hexdigest()
                    obj = Docs.objects(md5=digest).only("id").first()
                    if obj:
                        obj.reload()
                        lst[i] = obj

    @classmethod
    def pre_save_post_validation(cls, sender, document, **kwargs):
        if kwargs.get("skip"):
            return

        # set formula field
        if hasattr(document, "formula") and not document.formula:
            formulae = current_app.config["FORMULAE"]
            document.formula = formulae.get(document.identifier,
                                            document.identifier)

        # project is LazyReferenceField
        project = document.project.fetch()

        # run data through Pint Quantities and save as dicts
        def make_quantities(path, key, value):
            if key in quantity_keys or not isinstance(value,
                                                      (str, int, float)):
                return key, value

            str_value = str(value)
            if str_value.count(" ") > 1:
                return key, value

            q = get_quantity(str_value)
            if not q:
                return key, value

            # silently ignore "nan"
            if isnan(q.nominal_value):
                return False

            # try compact representation
            qq = q.value.to_compact()
            q = new_error_units(q, qq)

            # reduce dimensionality if possible
            if not q.check(0):
                qq = q.value.to_reduced_units()
                q = new_error_units(q, qq)

            # ensure that the same units are used across contributions
            field = delimiter.join(["data"] + list(path) + [key])
            try:
                column = project.columns.get(path=field)
                if column.unit != str(q.value.units):
                    qq = q.value.to(column.unit)
                    q = new_error_units(q, qq)
            except DoesNotExist:
                pass  # column doesn't exist yet (generated in post_save)
            except DimensionalityError:
                raise ValueError(
                    f"Can't convert [{q.units}] to [{column.unit}]!")

            v = Decimal(str(q.nominal_value))
            vt = v.as_tuple()

            if vt.exponent < 0:
                dgts = len(vt.digits)
                dgts = max_dgts if dgts > max_dgts else dgts
                s = f"{v:.{dgts}g}"
                if not isnan(q.std_dev):
                    s += f"+/-{q.std_dev:.{dgts}g}"

                s += f" {q.units}"
                q = get_quantity(s)

            # return new value dict
            display = str(q.value) if isnan(q.std_dev) else str(q)
            value = {
                "display": display,
                "value": q.nominal_value,
                "error": q.std_dev,
                "unit": str(q.units),
            }
            return key, value

        document.data = remap(document.data,
                              visit=make_quantities,
                              enter=enter)

    @classmethod
    def post_save(cls, sender, document, **kwargs):
        if kwargs.get("skip"):
            return

        # project is LazyReferenceField
        project = document.project.fetch()

        # set columns field for project
        def update_columns(path, key, value):
            path = delimiter.join(["data"] + list(path) + [key])
            is_quantity = isinstance(value, dict) and quantity_keys.issubset(
                value.keys())
            is_text = bool(not is_quantity and isinstance(value, str)
                           and key not in quantity_keys)
            if is_quantity or is_text:
                project.reload("columns")
                try:
                    column = project.columns.get(path=path)
                    if is_quantity:
                        v = value["value"]
                        if isnan(column.max) or v > column.max:
                            column.max = v
                        if isnan(column.min) or v < column.min:
                            column.min = v

                except DoesNotExist:
                    column = {"path": path}
                    if is_quantity:
                        column["unit"] = value["unit"]
                        column["min"] = column["max"] = value["value"]

                    project.columns.create(**column)

                project.save().reload("columns")
                ncolumns = len(project.columns)
                if ncolumns > 50:
                    raise ValueError("Reached maximum number of columns (50)!")

            return True

        # run update_columns over document data
        remap(document.data, visit=update_columns, enter=enter)

        # add/remove columns for other components
        for path in ["structures", "tables"]:
            try:
                project.columns.get(path=path)
            except DoesNotExist:
                if getattr(document, path):
                    project.columns.create(path=path)
                    project.save().reload("columns")

        # generate notebook for this contribution
        if document.notebook is not None:
            document.notebook.delete()

        cells = [
            nbf.new_code_cell("client = Client(\n"
                              '\theaders={"X-Consumer-Groups": "admin"},\n'
                              f'\thost="{MPCONTRIBS_API_HOST}"\n'
                              ")"),
            nbf.new_code_cell(
                f'client.get_contribution("{document.id}").pretty()'),
        ]

        if document.tables:
            cells.append(nbf.new_markdown_cell("## Tables"))
            for table in document.tables:
                cells.append(
                    nbf.new_code_cell(
                        f'client.get_table("{table.id}").plot()'))

        if document.structures:
            cells.append(nbf.new_markdown_cell("## Structures"))
            for structure in document.structures:
                cells.append(
                    nbf.new_code_cell(
                        f'client.get_structure("{structure.id}")'))

        loop = asyncio.new_event_loop()
        task = loop.create_task(
            execute_cells(str(document.id), cells, loop=loop))
        outputs = loop.run_until_complete(task)

        for task in asyncio.all_tasks(loop=loop):
            print(f"Cancelling {task}")
            task.cancel()
            outputs = loop.run_until_complete(task)

        loop.close()

        for idx, output in outputs.items():
            cells[idx]["outputs"] = output

        cells[0] = nbf.new_code_cell("client = Client()")
        doc = deepcopy(seed_nb)
        doc["cells"] += cells

        # avoid circular imports
        from mpcontribs.api.notebooks.document import Notebooks

        document.notebook = Notebooks(**doc).save()
        document.last_modified = datetime.utcnow()
        document.save(signal_kwargs={"skip": True})

    @classmethod
    def pre_delete(cls, sender, document, **kwargs):
        document.reload("notebook", "structures", "tables")

        # remove reference documents
        if document.notebook is not None:
            document.notebook.delete()

        for component in ["structures", "tables"]:
            # check if other contributions exist before deletion!
            for obj in getattr(document, component):
                q = {component: obj.id}
                if sender.objects(**q).count() < 2:
                    obj.delete()

    @classmethod
    def post_delete(cls, sender, document, **kwargs):
        if kwargs.get("skip"):
            return

        # reset columns field for project
        project = document.project.fetch()

        for column in list(project.columns):
            if not isnan(column.min) and not isnan(column.max):
                column.min, column.max = get_min_max(sender, column.path)
                if isnan(column.min) and isnan(column.max):
                    # just deleted last contribution with this column
                    project.update(pull__columns__path=column.path)
            else:
                # use wildcard index if available -> single field query
                field = column.path.replace(delimiter, "__") + "__type"
                qs = sender.objects(**{field: "string"}).only(column.path)

                if qs.count() < 1 or qs.filter(
                        project__name=project.name).count() < 1:
                    project.update(pull__columns__path=column.path)
Esempio n. 7
0
class LabeledSentences(EmbeddedDocument):
    id = ObjectIdField(required=True, default=lambda: ObjectId())
    data = ListField(required=True)
Esempio n. 8
0
class PhoneNumber(Document):
    meta = {
        "collection":
        "noc.phonenumbers",
        "strict":
        False,
        "auto_create_index":
        False,
        "indexes": [
            "static_service_groups", "effective_service_groups",
            "static_client_groups", "effective_client_groups"
        ]
    }

    number = StringField()
    profile = PlainReferenceField(PhoneNumberProfile)
    state = PlainReferenceField(State)
    dialplan = PlainReferenceField(DialPlan)
    phone_range = PlainReferenceField(PhoneRange)
    category = PlainReferenceField(NumberCategory)
    description = StringField()
    service = PlainReferenceField(Service)
    project = ForeignKeyField(Project)
    protocol = StringField(default="SIP",
                           choices=[("SIP", "SIP"), ("H323", "H.323"),
                                    ("SS7", "SS7"), ("MGCP", "MGCP"),
                                    ("H247", "H.247"), ("ISDN", "ISDN"),
                                    ("Skinny", "Skinny")])
    # Auto-change status to F after *allocated_till*
    allocated_till = DateTimeField()
    # Last state change
    changed = DateTimeField()
    #
    administrative_domain = ForeignKeyField(AdministrativeDomain)
    # Resource groups
    static_service_groups = ListField(ObjectIdField())
    effective_service_groups = ListField(ObjectIdField())
    static_client_groups = ListField(ObjectIdField())
    effective_client_groups = ListField(ObjectIdField())

    _id_cache = cachetools.TTLCache(100, ttl=60)

    def __unicode__(self):
        return self.number

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return PhoneNumber.objects.filter(id=id).first()

    def clean(self):
        super(PhoneNumber, self).clean()
        # Check number is valid integer
        self.number = clean_number(self.number or "")
        if not self.number:
            raise ValidationError("Empty phone number")
        # Change parent
        self.phone_range = PhoneRange.get_closest_range(
            dialplan=self.dialplan, from_number=self.number)
        # Set profile when necessary
        if not self.profile:
            if not self.phone_range:
                raise ValidationError("Either range or profile must be set")
            self.profile = self.phone_range.profile.default_number_profile

    @property
    def enum(self):
        return ".".join(reversed(self.number)) + ".e164.arpa"
Esempio n. 9
0
class ModelInterface(Document):
    """
    Equipment vendor
    """

    meta = {
        "collection": "noc.modelinterfaces",
        "strict": False,
        "auto_create_index": False,
        "json_collection": "inv.modelinterfaces",
        "json_unique_fields": ["uuid", "name"],
    }

    name = StringField(unique=True)
    description = StringField()
    attrs = ListField(EmbeddedDocumentField(ModelInterfaceAttr))
    uuid = UUIDField(binary=True)

    _id_cache = cachetools.TTLCache(1000, 10)
    _name_cache = cachetools.TTLCache(1000, 10)

    def __str__(self):
        return self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_id(cls, id) -> Optional["ModelInterface"]:
        return ModelInterface.objects.filter(id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_name_cache"),
                             lock=lambda _: id_lock)
    def get_by_name(cls, name: str) -> Optional["ModelInterface"]:
        return ModelInterface.objects.filter(name=name).first()

    def get_attr(self, name):
        for a in self.attrs:
            if a.name == name:
                return a
        return None

    def to_json(self):
        ar = []
        for a in self.attrs:
            r = ["        {"]
            r += ['            "name": "%s",' % q(a.name)]
            r += ['            "type": "%s",' % q(a.type)]
            r += ['            "description": "%s",' % q(a.description)]
            r += ['            "required": %s,' % q(a.required)]
            r += ['            "is_const": %s' % q(a.is_const)]
            r += ["        }"]
            ar += ["\n".join(r)]
        r = [
            "{",
            '    "name": "%s",' % q(self.name),
            '    "$collection": "%s",' % self._meta["json_collection"],
            '    "uuid": "%s",' % str(self.uuid),
            '    "description": "%s",' % q(self.description),
            '    "attrs": [',
            ",\n".join(ar),
            "    ]",
            "}",
        ]
        return "\n".join(r) + "\n"

    def get_json_path(self):
        p = [n.strip() for n in self.name.split("|")]
        return os.path.join(*p) + ".json"

    @classmethod
    def clean_data(cls, data):
        """
        Convert types accoding to interface
        """
        d = deep_copy(data)
        for i_name in d:
            mi = ModelInterface.objects.filter(name=i_name).first()
            if not mi:
                raise ModelDataError("Unknown interface '%s'" % i_name)
            v = d[i_name]
            for a in mi.attrs:
                if a.name in v:
                    vv = v[a.name]
                    if a.type == "strlist":
                        if isinstance(vv, str):
                            vv = [vv]
                        r = set()
                        for x in vv:
                            r.update(x.split(","))
                        vv = [x.strip() for x in sorted(r) if x.strip()]
                    v[a.name] = T_MAP[a.type].clean(vv)
        return d

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"),
                             lock=lambda _: id_lock)
    def get_interface_attr(cls, interface, key):
        mi = ModelInterface.objects.filter(name=interface).first()
        if not mi:
            raise ModelDataError("Invalid interface '%s'" % interface)
        attr = mi.get_attr(key)
        if not attr:
            raise ModelDataError("Invalid attribute '%s.%s'" %
                                 (interface, key))
        return attr
Esempio n. 10
0
class Subscription(Document, DocumentHelperMixin):

    # BIG DB migration 20141028
    bigmig_migrated = BooleanField(default=False)
    bigmig_reassigned = BooleanField(default=False)
    # END BIG DB migration

    feed = ReferenceField('Feed', reverse_delete_rule=CASCADE)
    user = ReferenceField('User', unique_with='feed',
                          reverse_delete_rule=CASCADE)

    # allow the user to rename the field in its own subscription
    name = StringField(verbose_name=_(u'Name'))

    # TODO: convert to UserTag to use ReferenceField and reverse_delete_rule.
    tags = ListField(GenericReferenceField(),
                     default=list, verbose_name=_(u'Tags'),
                     help_text=_(u'Tags that will be applied to new reads in '
                                 u'this subscription.'))

    folders = ListField(ReferenceField(Folder, reverse_delete_rule=PULL),
                        verbose_name=_(u'Folders'), default=list,
                        help_text=_(u'Folder(s) in which this subscription '
                                    u'appears.'))

    all_articles_count = IntRedisDescriptor(
        attr_name='s.aa_c', default=subscription_all_articles_count_default,
        set_default=True, min_value=0)

    unread_articles_count = IntRedisDescriptor(
        attr_name='s.ua_c', default=subscription_unread_articles_count_default,
        set_default=True, min_value=0)

    starred_articles_count = IntRedisDescriptor(
        attr_name='s.sa_c', default=subscription_starred_articles_count_default,
        set_default=True, min_value=0)

    archived_articles_count = IntRedisDescriptor(
        attr_name='s.ra_c',
        default=subscription_archived_articles_count_default,
        set_default=True, min_value=0)

    bookmarked_articles_count = IntRedisDescriptor(
        attr_name='s.ba_c',
        default=subscription_bookmarked_articles_count_default,
        set_default=True, min_value=0)

    meta = {
        'indexes': [
            'user',
            'feed',
            'folders',
        ]
    }

    def __unicode__(self):
        return _(u'{0}+{1} (#{2})').format(
            self.user.username, self.feed.name, self.id)

    @classmethod
    def signal_post_save_handler(cls, sender, document,
                                 created=False, **kwargs):

        subscription = document

        if created:
            if subscription._db_name != settings.MONGODB_NAME_ARCHIVE:
                # HEADS UP: this task name will be registered later
                # by the register_task_method() call.
                subscription_post_create_task.delay(subscription.id)  # NOQA

        # else:
        #     if subscription.feed.is_mailfeed:
        #         mailfeed = MailFeed.get_from_stream_url(subscription.feed.url)
        #
        #         if subscription.user.django == mailfeed.user:
        #             # HEADS UP: we use save() to forward the
        #             # name change to the Feed instance without
        #             # duplicating the code here.
        #             mailfeed.name = subscription.name
        #             mailfeed.save()

    def post_create_task(self):
        """ Method meant to be run from a celery task. """

        # The content of this method is done in subscribe_user_to_feed()
        # to avoid more-than-needed write operations on the database.
        pass

    @classmethod
    def signal_post_delete_handler(cls, sender, document, **kwargs):

        subscription = document

        if subscription._db_name != settings.MONGODB_NAME_ARCHIVE:

            # HEADS UP: we don't pass an ID, else the .get() fails
            # in the task for a good reason: the subscription doesn't
            # exist anymore.
            subscription_post_delete_task.delay(subscription)

    @classmethod
    def subscribe_user_to_feed(cls, user, feed, name=None,
                               force=False, background=False):

        try:
            subscription = cls(user=user, feed=feed).save()

        except (NotUniqueError, DuplicateKeyError):
            if not force:
                LOGGER.info(u'User %s already subscribed to feed %s.',
                            user, feed)
                return cls.objects.get(user=user, feed=feed)

        else:
            subscription.name = name or feed.name
            subscription.tags = feed.tags[:]
            subscription.save()

        if background:
            # HEADS UP: this task name will be registered later
            # by the register_task_method() call.
            # 'True' is for the 'force' argument.
            subscription_check_reads_task.delay(subscription.id, True)

        else:
            subscription.check_reads(force=True)

        LOGGER.info(u'Subscribed %s to %s via %s.', user, feed, subscription)

        return subscription

    @property
    def has_unread(self):

        # We need a boolean value for accurate template caching.
        return self.unread_articles_count != 0

    @property
    def is_closed(self):

        return self.feed.closed

    def mark_all_read(self, latest_displayed_read=None):

        if self.unread_articles_count == 0:
            return

        # count = self.unread_articles_count

        # self.unread_articles_count = 0

        # for folder in self.folders:
        #     folder.unread_articles_count -= count

        # self.user.unread_articles_count -= count

        # Marking all read is not a database-friendly operation,
        # thus it's run via a task to be able to return now immediately,
        # with cache numbers updated.
        #
        # HEADS UP: this task name will be registered later
        # by the register_task_method() call.
        subscription_mark_all_read_in_database_task.delay(
            self.id, now() if latest_displayed_read is None
            #
            # TRICK: we use self.user.reads for 2 reasons:
            #       - avoid importing `Read`, which would create a loop.
            #       - in case of a folder/global initiated mark_all_read(),
            #         the ID can be one of a read in another subscription
            #         and in this case, self.reads.get() will fail.
            #
            else latest_displayed_read.date_created)

    def mark_all_read_in_database(self, prior_datetime):
        """ To avoid marking read the reads that could have been created
            between the task call and the moment it is effectively run,
            we define what to exactly mark as read with the datetime when
            the operation was done by the user.

            Also available as a task for background execution.

            .. note:: the archived reads stay archived, whatever their
                read status is. No need to test this attribute.
        """

        # We touch only unread. This avoid altering the auto_read attribute
        # on reads that have been manually marked read by the user.
        params = {'is_read__ne': True, 'date_created__lte': prior_datetime}

        if self.user.preferences.read.bookmarked_marks_unread:
            # Let bookmarked reads stay unread.
            params['is_bookmarked__ne'] = True

        impacted_unread = self.reads.filter(**params)
        impacted_count  = impacted_unread.count()

        impacted_unread.update(set__is_read=True,
                               set__is_auto_read=True,
                               set__date_read=prior_datetime,
                               set__date_auto_read=prior_datetime)

        # If our caches are correctly computed, doing
        # one more full query just for this is too much.
        #
        # self.compute_cached_descriptors(unread=True)

        self.unread_articles_count -= impacted_count

        for folder in self.folders:
            folder.unread_articles_count -= impacted_count

        self.user.unread_articles_count -= impacted_count

    def check_reads(self, articles=None, force=False, extended_check=False):
        """ Also available as a task for background execution. """

        if not force:
            LOGGER.info(u'Subscription.check_reads() is very costy and should '
                        u'not be needed in normal conditions. Call it with '
                        u'`force=True` if you are sure you want to run it.')
            return

        yesterday = combine(today() - timedelta(days=1), time(0, 0, 0))
        is_older  = False
        my_now    = now()
        reads     = 0
        unreads   = 0
        failed    = 0
        missing   = 0
        rechecked = 0

        # See generic_check_subscriptions_method() for comment about this.
        if articles is None:
            articles = self.feed.good_articles.order_by('-id')

        for article in articles:
            #
            # NOTE: Checking `article.is_good()` is done at a lower
            #       level in the individual `self.create_read()`.
            #       It has nothing to do with the dates-only checks
            #       that we do here.
            #

            params = {}

            if is_older or article.date_published is None:
                params = {
                    'is_read':        True,
                    'is_auto_read':   True,
                    'date_read':      my_now,
                    'date_auto_read': my_now,
                }

            else:
                # As they are ordered by date, switching is_older to True will
                # avoid more date comparisons. MongoDB already did the job.
                if article.date_published < yesterday:

                    is_older = True

                    params = {
                        'is_read':        True,
                        'is_auto_read':   True,
                        'date_read':      my_now,
                        'date_auto_read': my_now,
                    }

                # implicit: else: pass
                # No params == all by default == is_read is False

            # The `create_read()` methods is defined
            # in `nonrel/read.py` to avoid an import loop.
            _, created = self.create_read(article, False, **params)

            if created:
                missing += 1

                if params.get('is_read', False):
                    reads += 1

                else:
                    unreads += 1

            elif created is False:
                rechecked += 1

                if extended_check:
                    try:
                        article.activate_reads()

                    except:
                        LOGGER.exception(u'Problem while activating reads '
                                         u'of Article #%s in Subscription '
                                         u'#%s.check_reads(), continuing '
                                         u'check.', article.id, self.id)

            else:
                failed += 1

        if missing or rechecked:
            #
            # TODO: don't recompute everything, just
            #    add or subscribe the changed counts.
            #
            self.compute_cached_descriptors(all=True, unread=True)

            for folder in self.folders:
                folder.compute_cached_descriptors(all=True, unread=True)

        LOGGER.info(u'Checked subscription #%s. '
                    u'%s/%s non-existing/re-checked, '
                    u'%s/%s read/unread and %s not created.',
                    self.id, missing, rechecked,
                    reads, unreads, failed)

        return missing, rechecked, reads, unreads, failed
Esempio n. 11
0
class Operation(EmbeddedQueryTerm):
    """ An Operation performs some type of operation (so far only boolean: AND, OR, NOT) on multiple query terms to
    build a compound query. There is no implementation yet for more sophisticated mappings, such as SQL join,
    Splunk transaction, map, etc.

    :param QueryComparators operator: The abstract operation to be performed on the input expression(s)
    :param List[QueryTerm] terms: A list of all of the terms joined by the operator
    """
    string_operator = StringField(db_field='operator')
    terms = ListField(EmbeddedDocumentField(EmbeddedQueryTerm))

    def __init__(self, terms, operator=None, **kwargs):
        """
        :type terms: List[QueryTerm]
        :type operator: QueryComparators
        """
        kwargs["terms"] = list(term.prep_value() for term in terms)

        if operator is not None:
            kwargs['string_operator'] = operator.name
            self.operator = operator

        super(Operation, self).__init__(**kwargs)
        if operator is None:
            self.operator = QueryComparators[self.string_operator]

    def compare(self, event):
        if self.operator == QueryComparators.And:
            return all(term.compare(event) for term in self.terms)
        elif self.operator == QueryComparators.Or:
            return any(term.compare(event) for term in self.terms)
        elif self.operator == QueryComparators.Not:
            return not (self.terms[0].compare(event))

    def get_fields(self):
        return {f for term in self.terms for f in term.get_fields()}

    def operation(self, other=None, operator=None):
        # if the operation is with something null ignore it
        if other is None or other is EmptyQuery:
            if operator is self.operator:
                return self
            # but not if the operation is inversion
            else:
                return Operation(terms=[self], operator=operator)

        # if the operators match, then create a new operation and merge them together if possible
        elif operator is self.operator:
            terms = list(self.terms)
            if isinstance(other, Operation) and other.operator is operator:
                terms.extend(other.terms)
            else:
                terms.append(other)
            return Operation(terms=terms, operator=operator)

        else:
            return Operation(terms=[self, other], operator=operator)

    def __repr__(self):
        return '{}(operator={}, terms={})'.format(
            type(self).__name__, self.string_operator, self.terms)

    __str__ = __repr__
Esempio n. 12
0
class Stem(Document):
    meta = {'collection': 'stems'}
    stem = StringField()
    srclangs = ListField(StringField())
    targetlangs = ListField(StringField())
    dicts = ListField(StringField())
Esempio n. 13
0
class MetricConfig(Document):
    meta = {"collection": "noc.pm.metricconfigs"}

    name = StringField(unique=True)
    is_active = BooleanField(default=True)
    handler = StringField()
    interval = IntField(default=60)
    description = StringField(required=False)
    probe = ReferenceField(Probe, reverse_delete_rule=DENY, required=False)
    metrics = ListField(EmbeddedDocumentField(MetricItem))
    config = DictField(default={})

    def __unicode__(self):
        return self.name

    def get_effective_settings(self, trace=False):
        """
        Returns a list of MetricItems, containing all effective
        metrics and thresholds for group
        """
        def q(s):
            return s.replace(" | ",
                             ".").replace(" ", "_").replace("/", "-").lower()

        def apply_settings(name, mi):
            """
            Apply settings to node and all children
            """
            dst = mt_tree[name][0]
            dst.is_active = mi.is_active
            dst.low_warn = mi.low_warn
            dst.high_warn = mi.high_warn
            dst.low_error = mi.low_error
            dst.high_error = mi.high_error
            for c in mt_tree[name][1]:
                apply_settings(c, mi)

        # Build metric type tree
        mt_tree = {}  # Metric type name -> (metric item, [children])
        for mi in self.metrics:
            mt = mi.metric_type
            if mt.name in mt_tree:
                continue
            # Find all children
            nmt = [mt] + sorted(
                MetricType.objects.filter(name__startswith=mt.name + " | "),
                key=lambda x: len(x.name))
            for m in nmt:
                if m.name in mt_tree:
                    continue
                mt_tree[m.name] = [
                    MetricItem(metric_type=m, is_active=True), []
                ]
                parent = " | ".join(p for p in m.name.split(" | ")[:-1])
                if parent in mt_tree:
                    mt_tree[parent][1] += [m.name]
        # Apply settings
        for mi in self.metrics:
            apply_settings(mi.metric_type.name, mi)
        # Fetch leaf nodes
        r = []
        mt = [mi[0] for mi in mt_tree.itervalues() if not mi[1]]
        for mi in mt:
            if not mi.is_active:
                continue
            if mi.metric:
                metric = mi.metric
            else:
                # Auto-generate metric
                metric = "metric.%s.%s" % (q(self.name), q(
                    mi.metric_type.name))
            es = EffectiveSettings(object=self,
                                   model_id="pm.MetricConfig",
                                   metric=metric,
                                   metric_type=mi.metric_type,
                                   is_active=True,
                                   probe=self.probe,
                                   interval=self.interval,
                                   thresholds=[
                                       mi.low_error, mi.low_warn, mi.high_warn,
                                       mi.high_error
                                   ])
            for h in probe_registry.iter_class_handlers(
                    self.handler, mi.metric_type.name):
                if trace:
                    es.trace("Checking %s" % h.handler_name)
                config = {}
                failed = False
                if h.req:
                    for name in h.req:
                        if name in self.config and self.config[name] not in (
                                "", None):
                            config[name] = self.config[name]
                        else:
                            failed = True
                            if trace:
                                es.trace("Cannot get required variable '%s'" %
                                         name)
                            break
                if failed:
                    if trace:
                        es.trace("Giving up")
                    continue
                # Get optional parameters
                for name in h.opt:
                    if name in self.config and self.config[name] not in ("",
                                                                         None):
                        config[name] = self.config[name]
                        # Handler found
                if h.match(config):
                    es.handler = h.handler_name
                    es.config = config
                    es.convert = h.convert
                    es.scale = h.scale
                    if trace:
                        es.trace("Matched handler %s(%s)" %
                                 (h.handler_name, config))
                    break
                elif trace:
                    es.trace("Handler mismatch")
            #
            es.is_active = bool(es.handler)
            if trace and not es.handler:
                es.error("No handler found")
            if es.is_active or trace:
                r += [es]
        # Collapse around handlers
        rr = {}
        for es in r:
            probe_id = es.probe.id if es.probe else None
            if es.handler:
                key = (probe_id, es.handler, es.interval)
            else:
                key = (probe_id, es.metric, es.metric_type, es.interval)
            if key in rr:
                e = rr[key]
                e.metrics += [
                    EffectiveSettingsMetric(metric=es.metric,
                                            metric_type=es.metric_type,
                                            thresholds=es.thresholds,
                                            convert=es.convert,
                                            scale=es.scale)
                ]
            else:
                es.metrics = [
                    EffectiveSettingsMetric(metric=es.metric,
                                            metric_type=es.metric_type,
                                            thresholds=es.thresholds,
                                            convert=es.convert,
                                            scale=es.scale)
                ]
                es.metric = None
                es.metric_type = None
                es.thresholds = None
                es.convert = None
                es.scale = None
                rr[key] = es
        return rr.values()
Esempio n. 14
0
class Group(Document):
    """
    This class represents a collection of target systems. Some are
    added to the group manually, while some are dynamically added
    based on group membership rules.
    """
    meta = {
        'collection': COLLECTION_GROUPS,
        'indexes': [{
            'fields': ['name'],
            'unique': True
        }]
    }
    name = StringField(required=True, null=False, unique=True)

    whitelist_members = ListField(StringField(required=True, null=False))
    blacklist_members = ListField(StringField(required=True, null=False))

    membership_rules = EmbeddedDocumentListField(GroupAutomemberRule,
                                                 null=False)

    @staticmethod
    def get_target_groups(target_name):
        """
        WARNING: Expensive Method
        This method returns a list of groups that a target is in.
        """
        groups = []
        for group in Group.objects():  #pylint: disable=no-member
            if target_name in group.member_names:
                groups.append(group)

        return list(set(groups))

    @staticmethod
    def get_by_name(name):
        """
        This method queries for the group with the given name.
        """
        return Group.objects.get(name=name)  #pylint: disable=no-member

    @staticmethod
    def list_groups():
        """
        This method queries for all group objects.
        """
        return Group.objects()  #pylint: disable=no-member

    @property
    def members(self):
        """
        This property returns member objects of all group members.
        """
        # TODO: Implement membership rules and blacklist
        return Target.objects(name__in=self.whitelist_members)  #pylint: disable=no-member

    @property
    def member_names(self):
        """
        This property returns member object names for all group members.
        """
        # TODO: Implement membership rules and blacklist
        return self.whitelist_members

    @property
    def document(self):
        """
                This property filters and returns the JSON information for a queried group.
        """
        return {
            'name': self.name,
            'whitelist_members': self.whitelist_members,
            'blacklist_members': self.blacklist_members
        }

    def whitelist_member(self, target_name):
        """
        This function attempts to add a target to the member whitelist.
        The target will not be added if the target is in the blacklist.
        """

        if target_name in self.blacklist_members:  #pylint: disable=unsupported-membership-test
            raise MembershipError(
                'Cannot whitelist a member that is on the blacklist.')

        self.whitelist_members.append(target_name)  #pylint: disable=no-member
        self.save()

    def remove_member(self, target_name):
        """
        This function removes a target from the member whitelist.
        """
        if not target_name in self.whitelist_members:  #pylint: disable=unsupported-membership-test
            raise MembershipError(
                'Cannot remove member, member is not whitelisted.')
        self.whitelist_members.remove(target_name)  #pylint: disable=no-member
        self.save()

    def blacklist_member(self, target_name):
        """
        This function removes a target from the member whitelist (if they exist),
        and add them to the blacklist (if they are not yet on there).
        """
        try:
            self.remove_member(target_name)
        except ValueError:
            pass

        if target_name in self.blacklist_members:  #pylint: disable=unsupported-membership-test
            raise MembershipError('Member is already blacklisted.')
        self.blacklist_members.append(target_name)  #pylint: disable=no-member
        self.save()

    def remove(self):
        """
        Remove this document from the database, and perform any related cleanup.
        """
        self.delete()
Esempio n. 15
0
class MetricScope(Document):
    meta = {
        "collection": "noc.metricscopes",
        "strict": False,
        "auto_create_index": False,
        "json_collection": "pm.metricscopes",
        "json_unique_fields": ["name"],
    }

    name = StringField(unique=True)
    uuid = UUIDField(binary=True)
    # Database table name
    table_name = StringField()
    description = StringField(required=False)
    key_fields = ListField(EmbeddedDocumentField(KeyField))
    path = ListField(EmbeddedDocumentField(PathItem))

    _id_cache = cachetools.TTLCache(maxsize=100, ttl=60)

    def __str__(self):
        return self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"), lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return MetricScope.objects.filter(id=id).first()

    @property
    def json_data(self):
        r = {
            "name": self.name,
            "$collection": self._meta["json_collection"],
            "uuid": self.uuid,
            "table_name": self.table_name,
            "description": self.description,
            "key_fields": [kf.to_json() for kf in self.key_fields],
            "path": [p.to_json() for p in self.path],
        }
        return r

    def to_json(self):
        return to_json(
            self.json_data,
            order=[
                "name",
                "$collection",
                "uuid",
                "table_name",
                "description",
                "key_fields",
                "path",
            ],
        )

    def get_json_path(self):
        return "%s.json" % self.name

    def iter_fields(self):
        """
        Yield (field_name, field_type) tuples
        :return:
        """
        from .metrictype import MetricType

        yield ("date", "Date")
        yield ("ts", "DateTime")
        for f in self.key_fields:
            yield (f.field_name, f.field_type)
        if self.path:
            yield ("path", "Array(String)")
        for t in MetricType.objects.filter(scope=self.id).order_by("id"):
            yield (t.field_name, t.field_type)

    def get_create_sql(self):
        """
        Get CREATE TABLE SQL statement
        :return:
        """
        pk = [f.field_name for f in self.key_fields]
        if self.path:
            pk += ["path"]
        pk += ["ts"]
        r = [
            "CREATE TABLE IF NOT EXISTS %s (" % self._get_raw_db_table(),
            ",\n".join("  %s %s" % (n, t) for n, t in self.iter_fields()),
            ") ENGINE = MergeTree(date, (%s), 8192)" % ", ".join(pk),
        ]
        return "\n".join(r)

    def get_create_distributed_sql(self):
        """
        Get CREATE TABLE for Distributed engine
        :return:
        """
        return (
            "CREATE TABLE IF NOT EXISTS %s "
            "AS %s "
            "ENGINE = Distributed(%s, %s, %s)"
            % (
                self.table_name,
                self._get_raw_db_table(),
                config.clickhouse.cluster,
                config.clickhouse.db,
                self._get_raw_db_table(),
            )
        )

    def _get_raw_db_table(self):
        if config.clickhouse.cluster:
            return "raw_%s" % self.table_name
        else:
            return self.table_name

    def ensure_table(self, connect=None):
        """
        Ensure table is exists
        :return: True, if table has been changed
        """
        from noc.core.clickhouse.connect import connection

        def ensure_columns(table_name):
            c = False
            # Alter when necessary
            existing = {}
            for name, type in ch.execute(
                """
                SELECT name, type
                FROM system.columns
                WHERE
                  database=%s
                  AND table=%s
                """,
                [config.clickhouse.db, table_name],
            ):
                existing[name] = type
            after = None
            for f, t in self.iter_fields():
                if f not in existing:
                    ch.execute(
                        post="ALTER TABLE %s ADD COLUMN %s %s AFTER %s" % (table_name, f, t, after)
                    )
                    c = True
                after = f
                if f in existing and existing[f] != t:
                    print("Warning! Type mismatch for column %s: %s <> %s" % (f, existing[f], t))
                    print(
                        "Set command manually: ALTER TABLE %s MODIFY COLUMN %s %s"
                        % (table_name, f, t)
                    )
            return c

        changed = False
        ch = connect or connection(read_only=False)
        if not ch.has_table(self._get_raw_db_table()):
            # Create new table
            ch.execute(post=self.get_create_sql())
            changed = True
        else:
            changed |= ensure_columns(self._get_raw_db_table())
        # Check for distributed table
        if config.clickhouse.cluster:
            if not ch.has_table(self.table_name):
                ch.execute(post=self.get_create_distributed_sql())
                changed = True
            else:
                changed |= ensure_columns(self.table_name)
        return changed
Esempio n. 16
0
class PostModel(DynamicDocument):
    url = StringField()
    abs_rshares = IntField()
    active = DateTimeField()
    allow_curation_rewards = BooleanField()
    allow_replies = BooleanField()
    allow_votes = BooleanField()
    active_votes = DictField()
    author = StringField()
    author_rewards = IntField()
    body = StringField()
    cashout_time = DateTimeField()
    category = StringField()
    children = IntField()  # TODO сделать вывод количества всех комментариев
    children_abs_rshares = IntField()
    children_rshares2 = IntField()
    created = DateTimeField()
    curator_payout_symbol = StringField()
    curator_payout_value = DictField()
    depth = IntField()
    json_metadata = DictField()
    last_payout = DateTimeField()
    last_update = DateTimeField()
    max_accepted_payout_symbol = StringField()
    max_accepted_payout_value = FloatField()
    max_cashout_time = DateTimeField()
    mode = StringField()
    net_rshares = IntField()
    net_votes = IntField()
    parent_author = StringField()
    parent_permlink = StringField()
    percent_steem_dollars = IntField()
    permlink = StringField()
    removed = BooleanField()
    reward_weight = IntField()
    root_comment = ObjectId()
    title = StringField()
    total_payout_symbol = StringField()
    total_payout_value = FloatField()
    total_vote_weight = IntField()
    vote_rshares = IntField()
    author_reputation = IntField()
    beneficiaries = ListField(DictField())
    body_length = IntField()
    community = StringField()
    identifier = StringField()
    max_accepted_payout = DictField()
    patched = BooleanField()
    pending_payout_value = DictField()
    promoted = DictField()
    reblogged_by = ListField(StringField())
    replies = ListField(StringField())  # TODO StringField?
    root_author = StringField()
    root_identifier = StringField()
    root_permlink = StringField()
    root_title = StringField()
    tags = ListField(StringField())
    total_payout_value = DictField()
    total_pending_payout_value = DictField()
    total_vote_weight = IntField()
    vote_rshares = IntField()

    meta = {
        'ordering': ['-created'],

        'indexes': [
            'author',
            'permlink',
            'created',
            'category',
            'json_metadata.location',
            'depth',
            'root_comment',
            'parent_permlink',
            'parent_author',
            'mode',
        ],

        'auto_create_index': True,
        'index_background': True
    }
Esempio n. 17
0
class ActiveAlarm(Document):
    meta = {
        "collection": "noc.alarms.active",
        "strict": False,
        "auto_create_index": False,
        "indexes": [
            "timestamp",
            "root",
            "-severity",
            ("alarm_class", "managed_object"),
            ("discriminator", "managed_object"),
            ("timestamp", "managed_object"),
            "escalation_tt",
            "escalation_ts",
            "adm_path",
            "segment_path",
            "container_path",
            "uplinks",
            ("alarm_class", "rca_neighbors"),
        ],
    }
    status = "A"

    timestamp = DateTimeField(required=True)
    last_update = DateTimeField(required=True)
    managed_object = ForeignKeyField(ManagedObject)
    alarm_class = PlainReferenceField(AlarmClass)
    severity = IntField(required=True)
    vars = DictField()
    # Calculated alarm discriminator
    # Has meaning only for alarms with is_unique flag set
    # Calculated as sha1("value1\x00....\x00valueN").hexdigest()
    discriminator = StringField(required=False)
    log = ListField(EmbeddedDocumentField(AlarmLog))
    # Manual acknowledgement timestamp
    ack_ts = DateTimeField(required=False)
    # Manual acknowledgement user name
    ack_user = StringField(required=False)
    #
    opening_event = ObjectIdField(required=False)
    closing_event = ObjectIdField(required=False)
    # List of subscribers
    subscribers = ListField(ForeignKeyField(User))
    #
    custom_subject = StringField(required=False)
    custom_style = ForeignKeyField(Style, required=False)
    #
    reopens = IntField(required=False)
    # RCA
    # Reference to root cause (Active Alarm or Archived Alarm instance)
    root = ObjectIdField(required=False)
    # Escalated TT ID in form
    # <external system name>:<external tt id>
    escalation_ts = DateTimeField(required=False)
    escalation_tt = StringField(required=False)
    escalation_error = StringField(required=False)
    # span context
    escalation_ctx = LongField(required=False)
    # Close tt when alarm cleared
    close_tt = BooleanField(default=False)
    # Do not clear alarm until *wait_tt* is closed
    wait_tt = StringField()
    wait_ts = DateTimeField()
    # Directly affected services summary, grouped by profiles
    # (connected to the same managed object)
    direct_services = ListField(EmbeddedDocumentField(SummaryItem))
    direct_subscribers = ListField(EmbeddedDocumentField(SummaryItem))
    # Indirectly affected services summary, groupped by profiles
    # (covered by this and all inferred alarms)
    total_objects = ListField(EmbeddedDocumentField(ObjectSummaryItem))
    total_services = ListField(EmbeddedDocumentField(SummaryItem))
    total_subscribers = ListField(EmbeddedDocumentField(SummaryItem))
    # Template and notification group to send close notification
    clear_template = ForeignKeyField(Template, required=False)
    clear_notification_group = ForeignKeyField(NotificationGroup, required=False)
    # Paths
    adm_path = ListField(IntField())
    segment_path = ListField(ObjectIdField())
    container_path = ListField(ObjectIdField())
    # Uplinks, for topology_rca only
    uplinks = ListField(IntField())
    # RCA neighbor cache, for topology_rca only
    rca_neighbors = ListField(IntField())
    dlm_windows = ListField(IntField())
    # RCA_* enums
    rca_type = IntField(default=RCA_NONE)
    # labels
    labels = ListField(StringField())
    effective_labels = ListField(StringField())

    def __str__(self):
        return "%s" % self.id

    def iter_changed_datastream(self, changed_fields=None):
        if config.datastream.enable_alarm:
            yield "alarm", self.id

    def clean(self):
        super().clean()
        if not self.last_update:
            self.last_update = self.timestamp
        data = self.managed_object.data
        self.adm_path = data.adm_path
        self.segment_path = data.segment_path
        self.container_path = data.container_path
        self.uplinks = data.uplinks
        self.rca_neighbors = data.rca_neighbors
        self.dlm_windows = data.dlm_windows
        if not self.id:
            self.effective_labels = [
                label for label in self.iter_effective_labels() if self.can_set_label(label)
            ]

    def safe_save(self, **kwargs):
        """
        Create new alarm or update existing if still exists
        :param kwargs:
        :return:
        """
        if self.id:
            # Update existing only if exists
            if "save_condition" not in kwargs:
                kwargs["save_condition"] = {"id": self.id}
            try:
                self.save(**kwargs)
            except SaveConditionError:
                pass  # Race condition, closed during update
        else:
            self.save()

    def change_severity(self, user="", delta=None, severity=None, to_save=True):
        """
        Change alarm severity
        """
        if isinstance(user, User):
            user = user.username
        if delta:
            self.severity = max(0, self.severity + delta)
            if delta > 0:
                self.log_message("%s has increased alarm severity by %s" % (user, delta))
            else:
                self.log_message("%s has decreased alarm severity by %s" % (user, delta))
        elif severity:
            if isinstance(severity, int) or isinstance(severity, float):
                self.severity = int(severity)
                self.log_message("%s has changed severity to %s" % (user, severity))
            else:
                self.severity = severity.severity
                self.log_message("%s has changed severity to %s" % (user, severity.name))
        if to_save:
            self.safe_save()

    def log_message(self, message, to_save=True, bulk=None, source=None):
        if bulk:
            bulk += [
                UpdateOne(
                    {"_id": self.id},
                    {
                        "$push": {
                            "log": {
                                "timestamp": datetime.datetime.now(),
                                "from_status": self.status,
                                "to_status": self.status,
                                "message": message,
                                "source": source,
                            }
                        }
                    },
                )
            ]
        self.log += [
            AlarmLog(
                timestamp=datetime.datetime.now(),
                from_status=self.status,
                to_status=self.status,
                message=message,
                source=source,
            )
        ]
        if to_save and not bulk:
            self.safe_save()

    def clear_alarm(self, message, ts=None, force=False, source=None):
        """
        Clear alarm
        :param message: Log clearing message
        :param ts: Clearing timestamp
        :param force: Clear ever if wait_tt seg
        :param source: Source clear alarm
        """
        ts = ts or datetime.datetime.now()
        if self.wait_tt and not force:
            # Wait for escalated tt to close
            if not self.wait_ts:
                self.wait_ts = ts
                self.log_message("Waiting for TT to close")
                call_later(
                    "noc.services.escalator.wait_tt.wait_tt",
                    scheduler="escalator",
                    pool=self.managed_object.escalator_shard,
                    alarm_id=self.id,
                )
            return
        if self.alarm_class.clear_handlers:
            # Process clear handlers
            for h in self.alarm_class.get_clear_handlers():
                try:
                    h(self)
                except Exception:
                    error_report()
        log = self.log + [
            AlarmLog(timestamp=ts, from_status="A", to_status="C", message=message, source=source)
        ]
        a = ArchivedAlarm(
            id=self.id,
            timestamp=self.timestamp,
            clear_timestamp=ts,
            managed_object=self.managed_object,
            alarm_class=self.alarm_class,
            severity=self.severity,
            vars=self.vars,
            log=log,
            ack_ts=self.ack_ts,
            ack_user=self.ack_user,
            root=self.root,
            escalation_ts=self.escalation_ts,
            escalation_tt=self.escalation_tt,
            escalation_error=self.escalation_error,
            escalation_ctx=self.escalation_ctx,
            opening_event=self.opening_event,
            closing_event=self.closing_event,
            discriminator=self.discriminator,
            reopens=self.reopens,
            direct_services=self.direct_services,
            direct_subscribers=self.direct_subscribers,
            total_objects=self.total_objects,
            total_services=self.total_services,
            total_subscribers=self.total_subscribers,
            adm_path=self.adm_path,
            segment_path=self.segment_path,
            container_path=self.container_path,
            uplinks=self.uplinks,
            rca_neighbors=self.rca_neighbors,
            rca_type=self.rca_type,
            labels=self.labels,
            effective_labels=self.effective_labels,
        )
        ct = self.alarm_class.get_control_time(self.reopens)
        if ct:
            a.control_time = datetime.datetime.now() + datetime.timedelta(seconds=ct)
        a.save()
        # Send notifications
        if not a.root and not self.reopens:
            a.managed_object.event(
                a.managed_object.EV_ALARM_CLEARED,
                {
                    "alarm": a,
                    "subject": a.subject,
                    "body": a.body,
                    "symptoms": a.alarm_class.symptoms,
                    "recommended_actions": a.alarm_class.recommended_actions,
                    "probable_causes": a.alarm_class.probable_causes,
                },
            )
        elif ct:
            pass
        # Set checks on all consequences
        for d in self._get_collection().find(
            {"root": self.id}, {"_id": 1, "alarm_class": 1, "managed_object": 1}
        ):
            ac = AlarmClass.get_by_id(d["alarm_class"])
            if not ac:
                continue
            t = ac.recover_time
            if not t:
                continue
            call_later(
                "noc.services.correlator.check.check_close_consequence",
                scheduler="correlator",
                pool=self.managed_object.get_effective_fm_pool().name,
                delay=t,
                shard=d.get("managed_object"),
                alarm_id=d["_id"],
            )
        # Clear alarm
        self.delete()
        # Close TT
        # MUST be after .delete() to prevent race conditions
        if a.escalation_tt or self.clear_template:
            if self.clear_template:
                ctx = {"alarm": a}
                subject = self.clear_template.render_subject(**ctx)
                body = self.clear_template.render_body(**ctx)
            else:
                subject = "Alarm cleared"
                body = "Alarm has been cleared"
            call_later(
                "noc.services.escalator.escalation.notify_close",
                scheduler="escalator",
                pool=self.managed_object.escalator_shard,
                max_runs=config.fm.alarm_close_retries,
                alarm_id=self.id,
                tt_id=self.escalation_tt,
                subject=subject,
                body=body,
                notification_group_id=self.clear_notification_group.id
                if self.clear_notification_group
                else None,
                close_tt=self.close_tt,
                login="******",
                queue=a.managed_object.tt_queue,
            )
        # Gather diagnostics
        AlarmDiagnosticConfig.on_clear(a)
        # Return archived
        return a

    def get_template_vars(self):
        """
        Prepare template variables
        """
        vars = self.vars.copy()
        vars.update({"alarm": self})
        return vars

    @property
    def subject(self):
        if self.custom_subject:
            s = self.custom_subject
        else:
            ctx = Context(self.get_template_vars())
            s = DjangoTemplate(self.alarm_class.subject_template).render(ctx)
        if len(s) >= 255:
            s = s[:125] + " ... " + s[-125:]
        return s

    @property
    def body(self):
        ctx = Context(self.get_template_vars())
        s = DjangoTemplate(self.alarm_class.body_template).render(ctx)
        return s

    def subscribe(self, user):
        """
        Change alarm's subscribers
        """
        if user.id not in self.subscribers:
            self.subscribers += [user.id]
            self.log_message(
                "%s(%s): has been subscribed"
                % ((" ".join([user.first_name, user.last_name]), user.username)),
                to_save=False,
                source=user.username,
            )
            self.save()

    def unsubscribe(self, user):
        if self.is_subscribed(user):
            self.subscribers = [u.id for u in self.subscribers if u != user.id]
            self.log_message(
                "%s(%s) has been unsubscribed"
                % ((" ".join([user.first_name, user.last_name]), user.username)),
                to_save=False,
                source=user.username,
            )
            self.save()

    def is_subscribed(self, user):
        return user.id in self.subscribers

    def acknowledge(self, user, msg=""):
        self.ack_ts = datetime.datetime.now()
        self.ack_user = user.username
        self.log = self.log + [
            AlarmLog(
                timestamp=self.ack_ts,
                from_status="A",
                to_status="A",
                message="Acknowledged by %s(%s): %s" % (user.get_full_name(), user.username, msg),
                source=user.username,
            )
        ]
        self.save()

    def unacknowledge(self, user, msg=""):
        self.ack_ts = None
        self.ack_user = None
        self.log = self.log + [
            AlarmLog(
                timestamp=datetime.datetime.now(),
                from_status="A",
                to_status="A",
                message="Unacknowledged by %s(%s): %s" % (user.get_full_name(), user.username, msg),
                source=user.username,
            )
        ]
        self.save()

    @property
    def duration(self):
        dt = datetime.datetime.now() - self.timestamp
        return dt.days * 86400 + dt.seconds

    @property
    def display_duration(self):
        duration = datetime.datetime.now() - self.timestamp
        secs = duration.seconds % 60
        mins = (duration.seconds / 60) % 60
        hours = (duration.seconds / 3600) % 24
        days = duration.days
        r = "%02d:%02d:%02d" % (hours, mins, secs)
        if days:
            r = "%dd %s" % (days, r)
        return r

    @property
    def effective_style(self):
        if self.custom_style:
            return self.custom_style
        else:
            return AlarmSeverity.get_severity(self.severity).style

    def get_root(self):
        """
        Get top-level root alarm
        """
        root = self
        while root.root:
            root = get_alarm(root.root)
        return root

    def update_summary(self):
        """
        Recalculate all summaries for given alarm.
        Performs recursive descent
        :return:
        """

        def update_dict(d1, d2):
            for k in d2:
                if k in d1:
                    d1[k] += d2[k]
                else:
                    d1[k] = d2[k]

        services = SummaryItem.items_to_dict(self.direct_services)
        subscribers = SummaryItem.items_to_dict(self.direct_subscribers)
        objects = {self.managed_object.object_profile.id: 1}

        for a in ActiveAlarm.objects.filter(root=self.id):
            a.update_summary()
            update_dict(objects, SummaryItem.items_to_dict(a.total_objects))
            update_dict(services, SummaryItem.items_to_dict(a.total_services))
            update_dict(subscribers, SummaryItem.items_to_dict(a.total_subscribers))
        obj_list = ObjectSummaryItem.dict_to_items(objects)
        svc_list = SummaryItem.dict_to_items(services)
        sub_list = SummaryItem.dict_to_items(subscribers)
        if (
            svc_list != self.total_services
            or sub_list != self.total_subscribers
            or obj_list != self.total_objects
        ):
            ns = ServiceSummary.get_severity(
                {"service": services, "subscriber": subscribers, "objects": objects}
            )
            self.total_objects = obj_list
            self.total_services = svc_list
            self.total_subscribers = sub_list
            if ns != self.severity:
                self.change_severity(severity=ns, to_save=False)
            self.safe_save()

    def _get_path_summary_bulk(self):
        def list_to_dict(summary):
            if not summary:
                return {}
            return {d["profile"]: d["summary"] for d in summary}

        def e_list_to_dict(summary):
            if not summary:
                return {}
            return {d.profile: d.summary for d in summary}

        def dict_to_list(d):
            return [{"profile": k, "summary": d[k]} for k in sorted(d)]

        def get_summary(docs, key, direct=None):
            r = direct.copy() if direct else {}
            for doc in docs:
                dv = doc.get(key)
                if not dv:
                    continue
                for k in dv:
                    nv = dv[k]
                    if nv:
                        r[k] = r.get(k, 0) + nv
            return r

        def get_root_path(alarm_id, path=None):
            path = path or []
            if alarm_id in path:
                raise ValueError("Loop detected: %s" % (str(x) for x in path))
            path = path + [alarm_id]
            root = alarms[alarm_id].get("root")
            if not root:
                return path
            return get_root_path(root, path)

        alarms = {}  # id -> alarm doc
        children = defaultdict(list)  # id -> [alarm doc, ..]
        # Inject current alarm
        alarms[self.id] = {
            "_id": self.id,
            "root": self.root,
            "severity": self.severity,
            "total_objects": e_list_to_dict(self.total_objects),
            "total_services": e_list_to_dict(self.total_services),
            "total_subscribers": e_list_to_dict(self.total_subscribers),
        }
        # Collect relevant neighbors
        for doc in ActiveAlarm._get_collection().aggregate(
            [
                # Starting from given alarm
                {"$match": {"_id": self.root}},
                # Add to 'path' field all alarm upwards
                {
                    "$graphLookup": {
                        "from": "noc.alarms.active",
                        "connectFromField": "root",
                        "connectToField": "_id",
                        "startWith": "$root",
                        "as": "path",
                        "maxDepth": 50,
                    }
                },
                # Append the necessary fields of given alarm to 'path' field
                # and wipe out all other fields
                {
                    "$project": {
                        "_id": 0,
                        "path": {
                            "$concatArrays": [
                                "$path",
                                [
                                    {
                                        "_id": "$_id",
                                        "root": "$root",
                                        "severity": "$severity",
                                        "direct_services": "$direct_services",
                                        "direct_subscribers": "$direct_subscribers",
                                        "total_objects": "$total_objects",
                                        "total_services": "$total_services",
                                        "total_subscribers": "$total_subscribers",
                                    }
                                ],
                            ]
                        },
                    }
                },
                # Convert path field to the list of documents
                {"$unwind": "$path"},
                # Normalize resulting documents
                {
                    "$project": {
                        "_id": "$path._id",
                        "root": "$path.root",
                        "severity": "$path.severity",
                        "direct_services": "$path.direct_services",
                        "direct_subscribers": "$path.direct_subscribers",
                        "total_objects": "$path.total_objects",
                        "total_services": "$path.total_services",
                        "total_subscribers": "$path.total_subscribers",
                    }
                },
                # Add all children alarms to 'children' field
                {
                    "$lookup": {
                        "from": "noc.alarms.active",
                        "localField": "_id",
                        "foreignField": "root",
                        "as": "children",
                    }
                },
                # Append the neccessary fields of path alarms to `children` field
                # and wipe out all other fields
                {
                    "$project": {
                        "_id": 0,
                        "children": {
                            "$concatArrays": [
                                "$children",
                                [
                                    {
                                        "_id": "$_id",
                                        "root": "$root",
                                        "severity": "$severity",
                                        "direct_services": "$direct_services",
                                        "direct_subscribers": "$direct_subscribers",
                                        "total_objects": "$total_objects",
                                        "total_services": "$total_services",
                                        "total_subscribers": "$total_subscribers",
                                    }
                                ],
                            ]
                        },
                    }
                },
                # Convert path field to the list of documents
                {"$unwind": "$children"},
                # Normalize resulting documents
                {
                    "$project": {
                        "_id": "$children._id",
                        "root": "$children.root",
                        "severity": "$children.severity",
                        "direct_services": "$children.direct_services",
                        "direct_subscribers": "$children.direct_subscribers",
                        "total_objects": "$children.total_objects",
                        "total_services": "$children.total_services",
                        "total_subscribers": "$children.total_subscribers",
                    }
                },
            ]
        ):
            # May contains duplicates, perform deduplication
            doc["direct_services"] = list_to_dict(doc.get("direct_services"))
            doc["direct_subscribers"] = list_to_dict(doc.get("direct_subscribers"))
            doc["total_objects"] = list_to_dict(doc.get("total_objects"))
            doc["total_services"] = list_to_dict(doc.get("total_services"))
            doc["total_subscribers"] = list_to_dict(doc.get("total_subscribers"))
            if doc["_id"] == self.id:
                doc["root"] = self.root
            alarms[doc["_id"]] = doc

        for doc in alarms.values():
            children[doc.get("root")] += [doc]

        # Get path to from current root upwards to global root
        # Check for loops, raise Value error if loop detected
        root_path = get_root_path(self.root)
        bulk = []
        now = datetime.datetime.now()
        for root in root_path:
            doc = alarms[root]
            consequences = children[root]
            total_objects = get_summary(
                consequences, "total_objects", {self.managed_object.object_profile.id: 1}
            )
            total_services = get_summary(consequences, "total_services", doc.get("direct_services"))
            total_subscribers = get_summary(
                consequences, "total_subscribers", doc.get("direct_subscribers")
            )
            if (
                doc["total_objects"] != total_objects
                or doc["total_services"] != total_services
                or doc["total_subscribers"] != total_subscribers
            ):
                # Changed
                severity = ServiceSummary.get_severity(
                    {
                        "service": total_services,
                        "subscriber": total_subscribers,
                        "objects": total_objects,
                    }
                )
                op = {
                    "$set": {
                        "severity": severity,
                        "total_objects": dict_to_list(total_objects),
                        "total_services": dict_to_list(total_services),
                        "total_subscribers": dict_to_list(total_subscribers),
                    }
                }
                if severity != doc.get("severity"):
                    op["$push"] = {
                        "log": {
                            "timestamp": now,
                            "from_status": "A",
                            "to_status": "A",
                            "message": "Severity changed to %d" % severity,
                        }
                    }
                bulk += [UpdateOne({"_id": root}, op)]
        return bulk

    def set_root(self, root_alarm, rca_type=RCA_OTHER):
        """
        Set root cause
        """
        if self.root:
            return
        if self.id == root_alarm.id:
            raise Exception("Cannot set self as root cause")
        # Set root
        self.root = root_alarm.id
        self.rca_type = rca_type
        try:
            bulk = self._get_path_summary_bulk()
        except ValueError:
            return  # Loop detected
        bulk += [
            UpdateOne({"_id": self.id}, {"$set": {"root": root_alarm.id, "rca_type": rca_type}})
        ]
        self.log_message("Alarm %s has been marked as root cause" % root_alarm.id, bulk=bulk)
        # self.save()  Saved by log_message
        root_alarm.log_message("Alarm %s has been marked as child" % self.id, bulk=bulk)
        if self.id:
            ActiveAlarm._get_collection().bulk_write(bulk, ordered=True)

    def escalate(self, tt_id, close_tt=False, wait_tt=None):
        self.escalation_tt = tt_id
        self.escalation_ts = datetime.datetime.now()
        self.close_tt = close_tt
        self.wait_tt = wait_tt
        self.log_message("Escalated to %s" % tt_id)
        q = {"_id": self.id}
        op = {
            "$set": {
                "escalation_tt": self.escalation_tt,
                "escalation_ts": self.escalation_ts,
                "close_tt": self.close_tt,
                "wait_tt": self.wait_tt,
                "escalation_error": None,
            }
        }
        r = ActiveAlarm._get_collection().update_one(q, op)
        if r.acknowledged and not r.modified_count:
            # Already closed, update archive
            ArchivedAlarm._get_collection().update_one(q, op)

    def set_escalation_error(self, error):
        self.escalation_error = error
        self._get_collection().update_one({"_id": self.id}, {"$set": {"escalation_error": error}})

    def set_escalation_context(self):
        current_context, current_span = get_current_span()
        if current_context or self.escalation_ctx:
            self.escalation_ctx = current_context
            self._get_collection().update_one(
                {"_id": self.id}, {"$set": {"escalation_ctx": current_context}}
            )

    def set_clear_notification(self, notification_group, template):
        self.clear_notification_group = notification_group
        self.clear_template = template
        self.safe_save(save_condition={"managed_object": {"$exists": True}, "id": self.id})

    def iter_consequences(self):
        """
        Generator yielding all consequences alarm
        """
        for a in ActiveAlarm.objects.filter(root=self.id):
            yield a
            yield from a.iter_consequences()

    def iter_affected(self):
        """
        Generator yielding all affected managed objects
        """
        seen = {self.managed_object}
        yield self.managed_object
        for a in self.iter_consequences():
            if a.managed_object not in seen:
                seen.add(a.managed_object)
                yield a.managed_object

    def iter_escalated(self):
        """
        Generator yielding all escalated consequences
        """
        for a in self.iter_consequences():
            if a.escalation_tt:
                yield a

    def iter_effective_labels(self):
        return set(self.managed_object.labels or []) | set(
            self.managed_object.object_profile.labels or []
        )

    @classmethod
    def can_set_label(cls, label):
        return Label.get_effective_setting(label, "enable_alarm")
Esempio n. 18
0
class ArchivedAlarm(Document):
    meta = {
        "collection":
        "noc.alarms.archived",
        "strict":
        False,
        "auto_create_index":
        False,
        "indexes": [
            "root",
            "timestamp",
            "managed_object",
            ("managed_object", "discriminator", "control_time"),
            "escalation_tt",
            "escalation_ts",
        ],
    }
    status = "C"

    timestamp = DateTimeField(required=True)
    clear_timestamp = DateTimeField(required=True)
    managed_object = ForeignKeyField(ManagedObject)
    alarm_class = PlainReferenceField(AlarmClass)
    severity = IntField(required=True)
    vars = DictField()
    log = ListField(EmbeddedDocumentField(AlarmLog))
    #
    opening_event = ObjectIdField(required=False)
    closing_event = ObjectIdField(required=False)
    # Number of reopens
    reopens = IntField(required=False)
    # Copied discriminator
    discriminator = StringField(required=False)
    # Manual acknowledgement timestamp
    ack_ts = DateTimeField(required=False)
    # Manual acknowledgement user name
    ack_user = StringField(required=False)
    # Control time within alarm will be reopen instead
    # instead of creating the new alarm
    control_time = DateTimeField(required=False)
    # RCA
    # Reference to root cause (Active Alarm or Archived Alarm instance)
    root = ObjectIdField(required=False)
    # Escalated TT ID in form
    # <external system name>:<external tt id>
    escalation_ts = DateTimeField(required=False)
    escalation_tt = StringField(required=False)
    escalation_error = StringField(required=False)
    escalation_ctx = LongField(required=False)
    escalation_close_ts = DateTimeField(required=False)
    escalation_close_error = StringField(required=False)
    escalation_close_ctx = LongField(required=False)
    # Directly affected services summary, grouped by profiles
    # (connected to the same managed object)
    direct_services = ListField(EmbeddedDocumentField(SummaryItem))
    direct_subscribers = ListField(EmbeddedDocumentField(SummaryItem))
    # Indirectly affected services summary, groupped by profiles
    # (covered by this and all inferred alarms)
    total_objects = ListField(EmbeddedDocumentField(ObjectSummaryItem))
    total_services = ListField(EmbeddedDocumentField(SummaryItem))
    total_subscribers = ListField(EmbeddedDocumentField(SummaryItem))
    # Paths
    adm_path = ListField(IntField())
    segment_path = ListField(ObjectIdField())
    container_path = ListField(ObjectIdField())
    # Uplinks, for topology_rca only
    uplinks = ListField(IntField())
    # RCA neighbor cache, for topology_rca only
    rca_neighbors = ListField(IntField())

    def __str__(self):
        return "%s" % self.id

    def iter_changed_datastream(self, changed_fields=None):
        if config.datastream.enable_alarm:
            yield "alarm", self.id

    def log_message(self, message):
        self.log += [
            AlarmLog(
                timestamp=datetime.datetime.now(),
                from_status=self.status,
                to_status=self.status,
                message=message,
            )
        ]
        self.save()

    def get_template_vars(self):
        """
        Prepare template variables
        """
        vars = self.vars.copy()
        vars.update({"alarm": self})
        return vars

    @property
    def subject(self):
        ctx = Context(self.get_template_vars())
        s = Template(self.alarm_class.subject_template).render(ctx)
        if len(s) >= 255:
            s = s[:125] + " ... " + s[-125:]
        return s

    @property
    def body(self):
        ctx = Context(self.get_template_vars())
        s = Template(self.alarm_class.body_template).render(ctx)
        return s

    @property
    def duration(self):
        dt = self.clear_timestamp - self.timestamp
        return dt.days * 86400 + dt.seconds

    @property
    def display_duration(self):
        duration = self.clear_timestamp - self.timestamp
        secs = duration.seconds % 60
        mins = (duration.seconds / 60) % 60
        hours = (duration.seconds / 3600) % 24
        days = duration.days
        if days:
            return "%dd %02d:%02d:%02d" % (days, hours, mins, secs)
        else:
            return "%02d:%02d:%02d" % (hours, mins, secs)

    @property
    def effective_style(self):
        return AlarmSeverity.get_severity(self.severity).style

    def set_root(self, root_alarm):
        pass

    def reopen(self, message):
        """
        Reopen alarm back
        """
        reopens = self.reopens or 0
        ts = datetime.datetime.now()
        log = self.log + [
            AlarmLog(
                timestamp=ts, from_status="C", to_status="A", message=message)
        ]
        a = ActiveAlarm(
            id=self.id,
            timestamp=self.timestamp,
            last_update=ts,
            managed_object=self.managed_object,
            alarm_class=self.alarm_class,
            severity=self.severity,
            vars=self.vars,
            log=log,
            root=self.root,
            escalation_ts=self.escalation_ts,
            escalation_tt=self.escalation_tt,
            escalation_error=self.escalation_error,
            escalation_ctx=self.escalation_ctx,
            opening_event=self.opening_event,
            discriminator=self.discriminator,
            reopens=reopens + 1,
            direct_services=self.direct_services,
            direct_subscribers=self.direct_subscribers,
            total_objects=self.total_objects,
            total_services=self.total_services,
            total_subscribers=self.total_subscribers,
            adm_path=self.adm_path,
            segment_path=self.segment_path,
            container_path=self.container_path,
            uplinks=self.uplinks,
        )
        a.save()
        # @todo: Clear related correlator jobs
        self.delete()
        # Send notifications
        # Do not set notifications for child and for previously reopened
        # alarms
        if not a.root and not reopens:
            a.managed_object.event(
                a.managed_object.EV_ALARM_REOPENED,
                {
                    "alarm": a,
                    "subject": a.subject,
                    "body": a.body,
                    "symptoms": a.alarm_class.symptoms,
                    "recommended_actions": a.alarm_class.recommended_actions,
                    "probable_causes": a.alarm_class.probable_causes,
                },
            )
        return a

    def iter_consequences(self):
        """
        Generator yielding all consequences alarm
        """
        for a in ArchivedAlarm.objects.filter(root=self.id):
            yield a
            for ca in a.iter_consequences():
                yield ca

    def iter_affected(self):
        """
        Generator yielding all affected managed objects
        """
        seen = {self.managed_object}
        yield self.managed_object
        for a in self.iter_consequences():
            if a.managed_object not in seen:
                seen.add(a.managed_object)
                yield a.managed_object

    def set_escalation_close_error(self, error):
        self.escalation_error = error
        self._get_collection().update(
            {"_id": self.id}, {"$set": {
                "escalation_close_error": error
            }})

    def close_escalation(self):
        now = datetime.datetime.now()
        self.escalation_close_ts = now
        self._get_collection().update({"_id": self.id},
                                      {"$set": {
                                          "escalation_close_ts": now
                                      }})

    def set_escalation_close_ctx(self):
        current_context, current_span = get_current_span()
        if current_context or self.escalation_close_ctx:
            self.escalation_close_ctx = current_context
            self._get_collection().update(
                {"_id": self.id},
                {"$set": {
                    "escalation_close_ctx": current_context
                }})
Esempio n. 19
0
class ActionCommands(Document):
    meta = {
        "collection": "noc.actioncommands",
        "strict": False,
        "auto_create_index": False,
        "json_collection": "sa.actioncommands",
        "json_depends_on": ["sa.actions", "sa.profile"],
        "json_unique_fields": ["name"],
    }
    name = StringField(unique=True)
    uuid = UUIDField(unique=True)
    action = ReferenceField(Action)
    description = StringField()
    profile = PlainReferenceField(Profile)
    config_mode = BooleanField(default=False)
    match = ListField(EmbeddedDocumentField(PlatformMatch))
    commands = StringField()
    preference = IntField(default=1000)
    timeout = IntField(default=60)

    def __str__(self):
        return self.name

    def get_json_path(self):
        p = [quote_safe_path(n.strip()) for n in self.name.split("|")]
        return os.path.join(*p) + ".json"

    @property
    def json_data(self):
        r = {
            "name": self.name,
            "$collection": self._meta["json_collection"],
            "uuid": self.uuid,
            "action__name": self.action.name,
            "description": self.description,
            "profile__name": self.profile.name,
            "config_mode": self.config_mode,
            "match": [c.json_data for c in self.match],
            "commands": self.commands,
            "preference": self.preference,
            "timeout": self.timeout,
        }
        return r

    def to_json(self):
        return to_json(
            self.json_data,
            order=[
                "name",
                "$collection",
                "uuid",
                "action__name",
                "description",
                "profile__name",
                "config_mode",
                "preference",
                "match",
                "commands",
                "timeout",
            ],
        )
Esempio n. 20
0
class Cert(Document):
    """Certificate mongo document model."""

    log_id = IntField(primary_key=True)
    # serial is 20 octets, see: https://tools.ietf.org/html/rfc5280#section-4.1.2.2
    serial = StringField(required=True)
    issuer = StringField(required=True)
    not_before = DateTimeField(required=True)
    not_after = DateTimeField(required=True)
    sct_or_not_before = DateTimeField(required=True)
    sct_exists = BooleanField(required=True)
    pem = StringField(required=True)
    _subjects = ListField(required=True,
                          field=StringField(),
                          db_field="subjects")
    _trimmed_subjects = ListField(required=True,
                                  field=StringField(),
                                  db_field="trimmed_subjects")

    meta = {
        "collection":
        "certs",
        "indexes": [
            "+_subjects",
            "+_trimmed_subjects",
            {
                "fields": ("+issuer", "+serial"),
                "unique": True
            },
        ],
    }

    @property
    def subjects(self):
        """Getter for subjects."""
        return self._subjects

    @subjects.setter
    def subjects(self, values):
        """Subjects setter.

        Normalizes inputs, and dervices trimmed_subjects
        """
        self._subjects = list({i.lower() for i in values})
        self._trimmed_subjects = list(trim_domains(self._subjects))

    @property
    def trimmed_subjects(self):
        """Read-only property.  This is derived from the subjects."""
        return self._trimmed_subjects

    def to_x509(self):
        """Return an x509 subject for this certificate."""
        return x509.load_pem_x509_certificate(bytes(self.pem, "utf-8"),
                                              default_backend())

    @classmethod
    def from_pem(cls, pem):
        """Create a Cert model object from a PEM certificate string.

        Arguments:
        pem -- PEM encoded certificate

        Returns (cert, precert):
            cert: a Cert model object
            precert: a boolean, True if this is a precertificate, False otherwise
        """
        xcert = x509.load_pem_x509_certificate(bytes(pem, "utf-8"),
                                               default_backend())
        dns_names = get_sans_set(xcert)

        sct_or_not_before, sct_exists = get_earliest_sct(xcert)

        cert = cls()
        cert.serial = hex(xcert.serial_number)[2:]
        cert.issuer = xcert.issuer.rfc4514_string()
        cert.not_before = xcert.not_valid_before
        cert.not_after = xcert.not_valid_after
        cert.sct_or_not_before = sct_or_not_before
        cert.sct_exists = sct_exists
        cert.pem = pem
        cert.subjects = dns_names
        return cert, is_poisioned(xcert)
Esempio n. 21
0
class Interface(Document):
    """
    Interfaces
    """
    meta = {
        "collection":
        "noc.interfaces",
        "strict":
        False,
        "auto_create_index":
        False,
        "indexes": [("managed_object", "name"), "mac",
                    ("managed_object", "ifindex"), "service",
                    "aggregated_interface"]
    }
    managed_object = ForeignKeyField(ManagedObject)
    name = StringField()  # Normalized via Profile.convert_interface_name
    type = StringField(choices=[(x, x) for x in INTERFACE_TYPES])
    description = StringField(required=False)
    ifindex = IntField(required=False)
    mac = StringField(required=False)
    aggregated_interface = PlainReferenceField("self", required=False)
    enabled_protocols = ListField(
        StringField(choices=[(x, x) for x in INTERFACE_PROTOCOLS]), default=[])
    profile = PlainReferenceField(InterfaceProfile,
                                  default=InterfaceProfile.get_default_profile)
    # profile locked on manual user change
    profile_locked = BooleanField(required=False, default=False)
    #
    project = ForeignKeyField(Project)
    state = ForeignKeyField(ResourceState)
    vc_domain = ForeignKeyField(VCDomain)
    # Current status
    admin_status = BooleanField(required=False)
    oper_status = BooleanField(required=False)
    oper_status_change = DateTimeField(required=False,
                                       default=datetime.datetime.now)
    full_duplex = BooleanField(required=False)
    in_speed = IntField(required=False)  # Input speed, kbit/s
    out_speed = IntField(required=False)  # Output speed, kbit/s
    bandwidth = IntField(required=False)  # Configured bandwidth, kbit/s
    # Coverage
    coverage = PlainReferenceField(Coverage)
    technologies = ListField(StringField())
    # External NRI interface name
    nri_name = StringField()
    #
    service = ReferenceField(Service)

    PROFILE_LINK = "profile"

    def __unicode__(self):
        return u"%s: %s" % (self.managed_object.name, self.name)

    def iter_changed_datastream(self):
        if config.datastream.enable_managedobject:
            yield "managedobject", self.managed_object.id

    def save(self, *args, **kwargs):
        if not hasattr(self,
                       "_changed_fields") or "name" in self._changed_fields:
            self.name = self.managed_object.get_profile(
            ).convert_interface_name(self.name)
        if (not hasattr(self, "_changed_fields")
                or "mac" in self._changed_fields) and self.mac:
            self.mac = MACAddressParameter().clean(self.mac)
        try:
            super(Interface, self).save(*args, **kwargs)
        except Exception as e:
            raise ValueError("%s: %s" % (e.__doc__, e.message))
        if not hasattr(self,
                       "_changed_fields") or "service" in self._changed_fields:
            ServiceSummary.refresh_object(self.managed_object)

    def on_delete(self):
        # Remove all subinterfaces
        for si in self.subinterface_set.all():
            si.delete()
        # Unlink
        link = self.link
        if link:
            self.unlink()
        # Flush MACDB
        MACDB.objects.filter(interface=self.id).delete()

    @property
    def link(self):
        """
        Return Link instance or None
        :return:
        """
        if self.type == "aggregated":
            q = {
                "interfaces__in": [self.id] + [i.id for i in self.lag_members]
            }
        else:
            q = {"interfaces": self.id}
        return Link.objects.filter(**q).first()

    @property
    def is_linked(self):
        """
        Check interface is linked
        :returns: True if interface is linked, False otherwise
        """
        if self.type == "aggregated":
            q = {
                "interfaces": {
                    "$in": [self.id] + [i.id for i in self.lag_members]
                }
            }
        else:
            q = {"interfaces": self.id}
        return bool(Link._get_collection().with_options(
            read_preference=ReadPreference.SECONDARY_PREFERRED).find_one(q))

    def unlink(self):
        """
        Remove existing link.
        Raise ValueError if interface is not linked
        """
        link = self.link
        if link is None:
            raise ValueError("Interface is not linked")
        if link.is_ptp or link.is_lag:
            link.delete()
        else:
            raise ValueError("Cannot unlink non p-t-p link")

    def link_ptp(self, other, method=""):
        """
        Create p-t-p link with other interface
        Raise ValueError if either of interface already connected.
        :param other: Other Iface for link
        :param method: Linking method
        :type other: Interface
        :returns: Link instance
        """
        def link_mismatched_lag(agg, phy):
            """
            Try to link LAG to physical interface
            :param agg:
            :param phy:
            :return:
            """
            l_members = [i for i in agg.lag_members if i.oper_status]
            if len(l_members) > 1:
                raise ValueError("More then one active interface in LAG")
            link = Link(interfaces=l_members + [phy], discovery_method=method)
            link.save()
            return link

        # Try to check existing LAG
        el = Link.objects.filter(interfaces=self.id).first()
        if el and other not in el.interfaces:
            el = None
        if (self.is_linked or other.is_linked) and not el:
            raise ValueError("Already linked")
        if self.id == other.id:
            raise ValueError("Cannot link with self")
        if self.type in ("physical", "management"):
            if other.type in ("physical", "management"):
                # Refine LAG
                if el:
                    left_ifaces = [
                        i for i in el.interfaces if i not in (self, other)
                    ]
                    if left_ifaces:
                        el.interfaces = left_ifaces
                        el.save()
                    else:
                        el.delete()
                #
                link = Link(interfaces=[self, other], discovery_method=method)
                link.save()
                return link
            elif other.type == "aggregated" and other.profile.allow_lag_mismatch:
                return link_mismatched_lag(other, self)
            else:
                raise ValueError("Cannot connect %s interface to %s" %
                                 (self.type, other.type))
        elif self.type == "aggregated":
            # LAG
            if other.type == "aggregated":
                # Check LAG size match
                # Skip already linked members
                l_members = [i for i in self.lag_members if not i.is_linked]
                r_members = [i for i in other.lag_members if not i.is_linked]
                if len(l_members) != len(r_members):
                    raise ValueError("LAG size mismatch")
                # Create link
                if l_members:
                    link = Link(interfaces=l_members + r_members,
                                discovery_method=method)
                    link.save()
                    return link
                else:
                    return
            elif self.profile.allow_lag_mismatch:
                return link_mismatched_lag(self, other)
            else:
                raise ValueError("Cannot connect %s interface to %s" %
                                 (self.type, other.type))
        raise ValueError("Cannot link")

    @classmethod
    def get_interface(cls, s):
        """
        Parse <managed object>@<interface> string
        and return interface instance or None
        """
        if "@" not in s:
            raise ValueError("Invalid interface: %s" % s)
        o, i = s.rsplit("@", 1)
        # Get managed object
        try:
            mo = ManagedObject.objects.get(name=o)
        except ManagedObject.DoesNotExist:
            raise ValueError("Invalid manged object: %s" % o)
        # Normalize interface name
        i = mo.get_profile().convert_interface_name(i)
        # Look for interface
        iface = Interface.objects.filter(managed_object=mo.id, name=i).first()
        return iface

    @property
    def subinterface_set(self):
        from .subinterface import SubInterface
        return SubInterface.objects.filter(interface=self.id)

    @property
    def lag_members(self):
        if self.type != "aggregated":
            raise ValueError(
                "Cannot net LAG members for not-aggregated interface")
        return Interface.objects.filter(aggregated_interface=self.id)

    @property
    def effective_vc_domain(self):
        if self.type in ("null", "tunnel", "other", "unknown"):
            return None
        if self.vc_domain:
            return self.vc_domain
        if self.managed_object.vc_domain:
            return self.managed_object.vc_domain
        return VCDomain.get_default()

    @property
    def status(self):
        """
        Returns interface status in form of
        Up/100/Full
        """
        def humanize_speed(speed):
            if not speed:
                return "-"
            for t, n in [(1000000, "G"), (1000, "M"), (1, "k")]:
                if speed >= t:
                    if speed // t * t == speed:
                        return "%d%s" % (speed // t, n)
                    else:
                        return "%.2f%s" % (float(speed) / t, n)
            return str(speed)

        s = [{True: "Up", False: "Down", None: "-"}[self.oper_status]]
        # Speed
        if self.oper_status:
            if self.in_speed and self.in_speed == self.out_speed:
                s += [humanize_speed(self.in_speed)]
            else:
                s += [
                    humanize_speed(self.in_speed),
                    humanize_speed(self.out_speed)
                ]
            s += [{True: "Full", False: "Half", None: "-"}[self.full_duplex]]
        else:
            s += ["-", "-"]
        return "/".join(s)

    def set_oper_status(self, status):
        """
        Set current oper status
        """
        if self.oper_status == status:
            return
        now = datetime.datetime.now()
        if self.oper_status != status and (not self.oper_status_change
                                           or self.oper_status_change < now):
            self.update(oper_status=status, oper_status_change=now)
            if self.profile.status_change_notification:
                logger.debug("Sending status change notification to %s",
                             self.profile.status_change_notification.name)
                self.profile.status_change_notification.notify(
                    subject="[%s] Interface %s(%s) is %s" %
                    (self.managed_object.name, self.name, self.description
                     or "", "up" if status else "down"),
                    body="Interface %s (%s) is %s" %
                    (self.name, self.description
                     or "", "up" if status else "down"))

    @property
    def parent(self):
        """
        Returns aggregated interface for LAG or
        self for non-aggregated interface
        """
        if self.aggregated_interface:
            return self.aggregated_interface
        else:
            return self

    def get_profile(self):
        if self.profile:
            return self.profile
        return InterfaceProfile.get_default_profile()
Esempio n. 22
0
class VPN(Document):
    meta = {"collection": "vpns", "strict": False, "auto_create_index": False}

    name = StringField(unique=True)
    profile = PlainReferenceField(VPNProfile)
    description = StringField()
    state = PlainReferenceField(State)
    # Link to parent overlay
    parent = PlainReferenceField("self")
    project = ForeignKeyField(Project)
    route_target = ListField(EmbeddedDocumentField(RouteTargetItem))
    # Labels
    labels = ListField(StringField())
    effective_labels = ListField(StringField())
    # Integration with external NRI and TT systems
    # Reference to remote system object has been imported from
    remote_system = PlainReferenceField(RemoteSystem)
    # Object id in remote system
    remote_id = StringField()
    # Object id in BI
    bi_id = LongField(unique=True)
    # @todo: last_seen
    # @todo: expired

    _id_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _bi_id_cache = cachetools.TTLCache(maxsize=100, ttl=60)

    def __str__(self):
        return self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return VPN.objects.filter(id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_bi_id(cls, id):
        return VPN.objects.filter(bi_id=id).first()

    def clean(self):
        super().clean()
        if self.id and "parent" in self._changed_fields and self.has_loop:
            raise ValidationError("Creating VPN loop")

    @property
    def has_loop(self):
        """
        Check if object creates loop
        """
        if not self.id:
            return False
        p = self.parent
        while p:
            if p.id == self.id:
                return True
            p = p.parent
        return False

    @classmethod
    def can_set_label(cls, label):
        return Label.get_effective_setting(label, "enable_vpn")
Esempio n. 23
0
class Vendor(Document):
    """
    Equipment vendor
    """
    meta = {
        "collection": "noc.vendors",
        "strict": False,
        "auto_create_index": False,
        "json_collection": "inv.vendors",
        "json_unique_fields": ["name", "code"]
    }
    # Short vendor name, included as first part of platform
    name = StringField(unique=True)
    # Full vendor name
    full_name = StringField()
    # Unique id
    uuid = UUIDField(binary=True)
    # List of vendor codes to be searched via .get_by_code()
    code = ListField(StringField(), unique=True)
    # Vendor's site
    site = URLField(required=False)
    # Object id in BI
    bi_id = LongField(unique=True)

    _id_cache = cachetools.TTLCache(1000, ttl=60)
    _bi_id_cache = cachetools.TTLCache(1000, ttl=60)
    _code_cache = cachetools.TTLCache(1000, ttl=60)
    _ensure_cache = cachetools.TTLCache(1000, ttl=60)

    def __unicode__(self):
        return self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"), lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return Vendor.objects.filter(id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"), lock=lambda _: id_lock)
    def get_by_bi_id(cls, id):
        return Vendor.objects.filter(bi_id=id).first()

    @classmethod
    def _get_by_code(cls, code):
        """
        Uncached version of get_by_code
        :param code:
        :return:
        """
        code = code.upper()
        return Vendor.objects.filter(code=code).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_code_cache"),
                             lock=lambda _: id_lock)
    def get_by_code(cls, code):
        return cls._get_by_code(code)

    def clean(self):
        # Convert code to list
        if isinstance(self.code, six.string_types):
            self.code = [self.code]
        # Uppercase code
        self.code = [c.upper() for c in self.code]
        # Fill full name if not set
        if not self.full_name:
            self.full_name = self.name
        #
        super(Vendor, self).clean()

    def on_save(self):
        if not hasattr(self, "_changed_fields") or "name" in self._changed_fields:
            from .platform import Platform

            for p in Platform.objects.filter(vendor=self.id):
                p.save()  # Rebuild full name

    def to_json(self):
        return to_json({
            "name": self.name,
            "$collection": self._meta["json_collection"],
            "full_name": self.full_name,
            "code": self.code,
            "site": self.site,
            "uuid": self.uuid
        },
            order=["name", "uuid", "full_name", "code", "site"]
        )

    def get_json_path(self):
        return "%s.json" % self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_ensure_cache"), lock=lambda _: id_lock)
    def ensure_vendor(cls, code):
        """
        Get or create vendor by code
        :param code:
        :return:
        """
        while True:
            vendor = Vendor._get_by_code(code)
            if vendor:
                return vendor
            try:
                vendor = Vendor(name=code, full_name=code, code=[code], uuid=uuid.uuid4())
                vendor.save()
                return vendor
            except NotUniqueError:
                pass  # Already created by concurrent process, reread
Esempio n. 24
0
class TranslationGroup(EmbeddedDocument):
    translationLemmas = ListField(ReferenceField(Lemma))
    restriction = EmbeddedDocumentField(Restriction)
    exampleGroups = ListField(EmbeddedDocumentField(ExampleGroup))
Esempio n. 25
0
class ConnectionType(Document):
    """
    Equipment vendor
    """

    meta = {
        "collection": "noc.connectiontypes",
        "strict": False,
        "auto_create_index": False,
        "indexes": ["extend", "data", "c_group"],
        "json_collection": "inv.connectiontypes",
        "json_unique_fields": ["name", "uuid"],
    }

    name = StringField(unique=True)
    is_builtin = BooleanField(default=False)
    description = StringField()
    # Type extends another type, if not null
    extend = PlainReferenceField("self", required=False)
    # List of available genders
    genders = StringField(
        choices=[
            "s",  # Genderless connection
            "ss",  # Genderless connection 2 or more objects
            "m",  # Only male type
            "f",  # Only female type
            "mmf",  # female, 1 or more males
            "mf",  # male-female
            "mff",  # male, 2 or more females
        ],
        default="mf",
    )
    # ModelData
    data = DictField(default={})
    # Compatible group
    # Connection compatible with opposite gender of same type
    # and all types having any c_group
    c_group = ListField(StringField())
    uuid = UUIDField(binary=True)

    OPPOSITE_GENDER = {"s": "s", "m": "f", "f": "m"}
    category = ObjectIdField()

    def __str__(self):
        return self.name

    @property
    def json_data(self):
        r = {
            "name": self.name,
            "$collection": self._meta["json_collection"],
            "uuid": self.uuid,
            "description": self.description,
            "genders": self.genders,
            "c_group": self.c_group,
        }
        if self.extend:
            r["extend__name"] = self.extend.name
        return r

    def to_json(self):
        return to_json(self.json_data, order=["name", "$collection", "uuid", "description"])

    def get_json_path(self):
        p = [quote_safe_path(n.strip()) for n in self.name.split("|")]
        return os.path.join(*p) + ".json"

    def get_effective_data(self):
        """
        Calculate effective data
        :return:
        """
        raise NotImplementedError

    def get_superclasses(self):
        s = []
        c = self
        while c:
            c = c.extend
            if c:
                s += [c]
        return s

    def get_subclasses(self):
        s = []
        for c in ConnectionType.objects.filter(extend=self.id):
            s += [c] + c.get_subclasses()
        return s

    def get_inheritance_path(self, other):
        s = []
        # Upward direction
        c = self
        while c:
            s.insert(0, c)
            if other.id == c.id:
                return s
            c = c.extend
        # Not found, try downward direction
        s = []
        c = other
        while c:
            s.insert(0, c)
            if self.id == c.id:
                return s
            c = c.extend
        return s

    def get_by_c_group(self):
        c_group = self.c_group
        if not c_group:
            return []
        r = []
        for ct in ConnectionType.objects.filter(c_group__in=c_group):
            if ct.id != self.id:
                r += [ct]
        return r

    def get_compatible_types(self, gender):
        r = []
        og = self.OPPOSITE_GENDER[gender]
        # Add self type if opposige gender allowed
        if og in self.genders:
            r += [self.id]
        if gender in ["m", "s"]:
            # Add superclasses
            for c in self.get_superclasses():
                if og in c.genders:
                    r += [c.id]
        if gender in ["f", "s"]:
            # Add subclasses
            for c in self.get_subclasses():
                if og in c.genders:
                    r += [c.id]
        if self.c_group:
            for c in self.get_by_c_group():
                if og in c.genders:
                    r += [c.id]
        return r
Esempio n. 26
0
class Contributions(DynamicDocument):
    project = LazyReferenceField(
        "Projects", required=True, passthrough=True, reverse_delete_rule=CASCADE
    )
    identifier = StringField(required=True, help_text="material/composition identifier")
    formula = StringField(help_text="formula (set dynamically if not provided)")
    is_public = BooleanField(
        required=True, default=False, help_text="public/private contribution"
    )
    last_modified = DateTimeField(
        required=True, default=datetime.utcnow, help_text="time of last modification"
    )
    data = DictField(
        default={}, validation=valid_dict, help_text="simple free-form data"
    )
    structures = ListField(ReferenceField("Structures"), default=list, max_length=10)
    tables = ListField(ReferenceField("Tables"), default=list, max_length=10)
    notebook = LazyReferenceField("Notebooks", passthrough=True)
    meta = {
        "collection": "contributions",
        "indexes": [
            "project",
            "identifier",
            "formula",
            "is_public",
            "last_modified",
            {"fields": [(r"data.$**", 1)]},
            "notebook",
        ]
        + list(COMPONENTS.keys()),
    }

    @classmethod
    def post_init(cls, sender, document, **kwargs):
        # replace existing structures/tables with according ObjectIds
        for component, fields in COMPONENTS.items():
            lst = getattr(document, component)
            if lst and lst[0].id is None:  # id is None for incoming POST
                dmodule = import_module(f"mpcontribs.api.{component}.document")
                klass = component.capitalize()
                Docs = getattr(dmodule, klass)
                vmodule = import_module(f"mpcontribs.api.{component}.views")
                Resource = getattr(vmodule, f"{klass}Resource")
                resource = Resource()
                for i, o in enumerate(lst):
                    d = resource.serialize(o, fields=fields)
                    s = json.dumps(d, sort_keys=True).encode("utf-8")
                    digest = md5(s).hexdigest()
                    obj = Docs.objects(md5=digest).only("id").first()
                    if obj:
                        obj.reload()
                        lst[i] = obj

    @classmethod
    def pre_save_post_validation(cls, sender, document, **kwargs):
        if kwargs.get("skip"):
            return

        # set formula field
        if hasattr(document, "formula") and not document.formula:
            formulae = current_app.config["FORMULAE"]
            document.formula = formulae.get(document.identifier, document.identifier)

        # project is LazyReferenceField
        project = document.project.fetch()

        # run data through Pint Quantities and save as dicts
        def make_quantities(path, key, value):
            if key in quantity_keys or not isinstance(value, (str, int, float)):
                return key, value

            str_value = str(value)
            if str_value.count(" ") > 1:
                return key, value

            q = get_quantity(str_value)
            if not q:
                return key, value

            # silently ignore "nan"
            if isnan(q.nominal_value):
                return False

            # try compact representation
            qq = q.value.to_compact()
            q = new_error_units(q, qq)

            # reduce dimensionality if possible
            if not q.check(0):
                qq = q.value.to_reduced_units()
                q = new_error_units(q, qq)

            # ensure that the same units are used across contributions
            field = delimiter.join(["data"] + list(path) + [key])
            try:
                column = project.columns.get(path=field)
                if column.unit != str(q.value.units):
                    qq = q.value.to(column.unit)
                    q = new_error_units(q, qq)
            except DoesNotExist:
                pass  # column doesn't exist yet (generated in post_save)
            except DimensionalityError:
                raise ValueError(f"Can't convert [{q.units}] to [{column.unit}]!")

            # significant digits
            q = truncate_digits(q)

            # return new value dict
            display = str(q.value) if isnan(q.std_dev) else str(q)
            value = {
                "display": display,
                "value": q.nominal_value,
                "error": q.std_dev,
                "unit": str(q.units),
            }
            return key, value

        document.data = remap(document.data, visit=make_quantities, enter=enter)

    @classmethod
    def post_save(cls, sender, document, **kwargs):
        if kwargs.get("skip"):
            return

        # project is LazyReferenceField
        project = document.project.fetch()

        # set columns field for project
        def update_columns(path, key, value):
            path = delimiter.join(["data"] + list(path) + [key])
            is_quantity = isinstance(value, dict) and quantity_keys.issubset(
                value.keys()
            )
            is_text = bool(
                not is_quantity and isinstance(value, str) and key not in quantity_keys
            )
            if is_quantity or is_text:
                project.reload("columns")
                try:
                    column = project.columns.get(path=path)
                    if is_quantity:
                        v = value["value"]
                        if isnan(column.max) or v > column.max:
                            column.max = v
                        if isnan(column.min) or v < column.min:
                            column.min = v

                except DoesNotExist:
                    column = {"path": path}
                    if is_quantity:
                        column["unit"] = value["unit"]
                        column["min"] = column["max"] = value["value"]

                    project.columns.create(**column)

                project.save().reload("columns")
                ncolumns = len(project.columns)
                if ncolumns > 50:
                    raise ValueError("Reached maximum number of columns (50)!")

            return True

        # run update_columns over document data
        remap(document.data, visit=update_columns, enter=enter)

        # add/remove columns for other components
        for path in COMPONENTS.keys():
            try:
                project.columns.get(path=path)
            except DoesNotExist:
                if getattr(document, path):
                    project.columns.create(path=path)
                    project.save().reload("columns")

        document.last_modified = datetime.utcnow()

    @classmethod
    def pre_delete(cls, sender, document, **kwargs):
        args = ["notebook"] + list(COMPONENTS.keys())
        document.reload(*args)

        # remove reference documents
        if document.notebook is not None:
            from mpcontribs.api.notebooks.document import Notebooks

            Notebooks.objects(id=document.notebook.id).delete()

        for component in COMPONENTS.keys():
            # check if other contributions exist before deletion!
            for obj in getattr(document, component):
                q = {component: obj.id}
                if sender.objects(**q).count() < 2:
                    obj.delete()

    @classmethod
    def post_delete(cls, sender, document, **kwargs):
        if kwargs.get("skip"):
            return

        # reset columns field for project
        project = document.project.fetch()

        for column in list(project.columns):
            if not isnan(column.min) and not isnan(column.max):
                column.min, column.max = get_min_max(sender, column.path)
                if isnan(column.min) and isnan(column.max):
                    # just deleted last contribution with this column
                    project.update(pull__columns__path=column.path)
            else:
                # use wildcard index if available -> single field query
                field = column.path.replace(delimiter, "__") + "__type"
                qs = sender.objects(**{field: "string"}).only(column.path)

                if qs.count() < 1 or qs.filter(project__name=project.name).count() < 1:
                    project.update(pull__columns__path=column.path)
Esempio n. 27
0
class MapSettings(Document):
    meta = {
        "collection": "noc.mapsettings",
        "strict": False,
        "auto_create_index": False
    }

    # Segment or selector id
    segment = StringField(unique=True)
    # Change time
    changed = DateTimeField()
    # Changing user
    user = StringField()
    # Paper size
    width = FloatField()
    height = FloatField()
    #
    nodes = ListField(EmbeddedDocumentField(NodeSettings))
    links = ListField(EmbeddedDocumentField(LinkSettings))

    def __str__(self):
        return self.segment

    def get_nodes(self):
        """
        Returns a dict of id -> Node settings
        """
        nodes = {}
        for n in self.nodes:
            nodes[n.node] = n
        return nodes

    @classmethod
    def load_json(cls, data, user=None):
        """
        Load json data of:
        id
        nodes:
            id
            x
            y
        links:
            id
            vertices:
                x
                y
        """
        d = MapSettings.objects.filter(segment=data["id"]).first()
        if d:
            logger.info("Updating settings for %s", data["id"])
        else:
            logger.info("Creating new settings for %s", data["id"])
            d = MapSettings(segment=data["id"], nodes=[], links=[])
        # Update meta
        if user:
            d.user = user
        d.changed = datetime.datetime.now()
        # Update nodes
        new_nodes = {}  # id -> data
        for n in data.get("nodes", []):
            new_nodes[(n["type"], n["id"])] = n
        nn = []
        for n in d.nodes:
            nd = new_nodes.get((n.type, n.id))
            if not nd:
                continue  # Not found
            n.x = nd["x"]
            n.y = nd["y"]
            nn += [n]
            del new_nodes[(n.type, n.id)]
        mx = 0.0
        my = 0.0
        for n in new_nodes:
            nd = new_nodes[n]
            nn += [
                NodeSettings(type=nd["type"],
                             id=nd["id"],
                             x=nd["x"],
                             y=nd["y"])
            ]
            mx = max(mx, nd["x"])
            my = max(my, nd["y"])
        d.width = data.get("width", mx)
        d.height = data.get("height", my)
        d.nodes = sorted(nn, key=lambda x: (x.type, x.id))
        # Update links
        new_links = {}
        for l in data.get("links", []):
            new_links[(l["type"], l["id"])] = l
        nn = []
        for l in d.links:
            nl = new_links.get((l.type, l.id))
            if not nl:
                continue  # Not found
            l.vertices = [
                VertexPosition(x=v["x"], y=v["y"])
                for v in nl.get("vertices", [])
            ]
            l.connector = nl.get("connector", LC_NORMAL)
            nn += [l]
            del new_links[(l.type, l.id)]
        for l in new_links:
            nl = new_links[l]
            nn += [
                LinkSettings(
                    type=nl["type"],
                    id=nl["id"],
                    vertices=[
                        VertexPosition(x=v["x"], y=v["y"])
                        for v in nl.get("vertices", [])
                    ],
                    connector=nl.get("connector", "normal"),
                )
            ]
        d.links = [
            l for l in sorted(nn, key=lambda x: (x.type, x.id))
            if l.vertices or l.connector != LC_NORMAL
        ]
        # Finally save
        d.save()
        return d
Esempio n. 28
0
class ServiceSummary(Document):
    meta = {
        "collection": "noc.servicesummary",
        "strict": False,
        "auto_create_index": False,
        "indexes": ["managed_object", "interface"],
    }
    managed_object = IntField()
    interface = ObjectIdField()
    service = ListField(EmbeddedDocumentField(SummaryItem))
    subscriber = ListField(EmbeddedDocumentField(SummaryItem))

    @classmethod
    def build_summary_for_object(cls, managed_object):
        """
        Build active services summary for managed object
        :param managed_object: Managed Object id
        :return: dict of interface id -> {service: ..., subscriber: ....}
            interface None means unbound or box-wise services
        """
        from noc.inv.models.interface import Interface
        from noc.sa.models.service import Service

        def iter_services(sd):
            yield sd
            for cs in Service._get_collection().find(
                {
                    "parent": sd["_id"],
                    "logical_status": "R"
                },
                {
                    "_id": 1,
                    "subscriber": 1,
                    "profile": 1
                },
            ):
                for ns in iter_services(cs):
                    yield ns

        def add_dict(d1, d2):
            """
            Add all d2 values to d1
            :param d1:
            :param d2:
            :return:
            """
            for k in d2:
                d1[k] = d1.get(k, 0) + d2[k]

        # service -> interface bindings
        svc_interface = dict((
            x["service"], x["_id"]
        ) for x in Interface._get_collection().find(
            {
                "managed_object": managed_object,
                "service": {
                    "$exists": True
                }
            },
            {
                "_id": 1,
                "service": 1
            },
            comment=
            "[servicesummary.build_summary_for_object] Getting services for interfaces",
        ))
        # Iterate over object's services
        # And walk underlying tree
        ri = {}
        for svc in Service._get_collection().find(
            {
                "managed_object": managed_object,
                "logical_status": "R"
            },
            {
                "_id": 1,
                "subscriber": 1,
                "profile": 1
            },
                comment=
                "[servicesummary.build_summary_for_object] Getting object services for object",
        ):
            # All subscribers for underlying tree
            subscribers = set()
            # profile_id -> count
            svc_profiles = defaultdict(int)
            for s in iter_services(svc):
                subscribers.add(s["subscriber"])
                svc_profiles[s["profile"]] += 1
            # Get subscriber profiles count
            ra = Subscriber._get_collection().aggregate([
                {
                    "$match": {
                        "_id": {
                            "$in": list(subscribers)
                        }
                    }
                },
                {
                    "$group": {
                        "_id": "$profile",
                        "total": {
                            "$sum": 1
                        }
                    }
                },
            ])
            subscriber_profiles = dict((x["_id"], x["total"]) for x in ra)
            # Bind to interface
            # None for unbound services
            iface = svc_interface.get(svc["_id"])
            if iface in ri:
                add_dict(ri[iface]["service"], svc_profiles)
                add_dict(ri[iface]["subscriber"], subscriber_profiles)
            else:
                ri[iface] = {
                    "service": dict(svc_profiles),  # defaultdict -> dict
                    "subscriber": subscriber_profiles,
                }
        return ri

    @classmethod
    def refresh_object(cls, managed_object):
        if hasattr(managed_object, "id"):
            managed_object = managed_object.id
        call_later("noc.sa.models.servicesummary.refresh_object",
                   delay=20,
                   managed_object=managed_object)

    @classmethod
    def _refresh_object(cls, managed_object):
        from noc.sa.models.managedobject import ManagedObject
        from noc.inv.models.networksegment import NetworkSegment

        def to_dict(v):
            return dict((r["profile"], r["summary"]) for r in v)

        def to_list(v):
            return [{"profile": k, "summary": v[k]} for k in sorted(v)]

        if hasattr(managed_object, "id"):
            managed_object = managed_object.id
        coll = ServiceSummary._get_collection()
        bulk = []
        # Get existing summary
        old_summary = dict((x["interface"], x) for x in coll.find(
            {"managed_object": managed_object},
            {
                "_id": 1,
                "interface": 1,
                "service": 1,
                "subscriber": 1
            },
            comment=
            "[servicesummary._refresh_object] Refresh summary of services for managed object",
        ))
        # Get actual summary
        new_summary = ServiceSummary.build_summary_for_object(managed_object)
        # Merge summaries
        for iface in old_summary:
            if iface not in new_summary:
                # Stale, delete
                bulk += [DeleteOne({"_id": old_summary[iface]["_id"]})]
                continue
            oi = old_summary[iface]
            old_services = to_dict(oi["service"])
            old_subs = to_dict(oi["subscriber"])
            ni = new_summary[iface]
            if old_services != ni["service"] or old_subs != ni["subscriber"]:
                # Changed, update
                bulk += [
                    UpdateOne(
                        {"_id": oi["_id"]},
                        {
                            "$set": {
                                "service": to_list(ni["service"]),
                                "subscriber": to_list(ni["subscriber"]),
                            }
                        },
                    )
                ]
            # Mark as processed
            del new_summary[iface]
        # Process new items
        bulk += [
            InsertOne({
                "managed_object": managed_object,
                "interface": iface,
                "service": to_list(new_summary[iface]["service"]),
                "subscriber": to_list(new_summary[iface]["subscriber"]),
            }) for iface in new_summary
        ]
        if bulk:
            logger.info("Committing changes to database")
            try:
                r = coll.bulk_write(bulk, ordered=False)
                logger.info("Database has been synced")
                logger.info("Modify: %d, Deleted: %d", r.modified_count,
                            r.deleted_count)
            except BulkWriteError as e:
                logger.error("Bulk write error: '%s'", e.details)
                logger.error("Stopping check")
        mo = ManagedObject.get_by_id(managed_object)
        NetworkSegment.update_summary(mo.segment)

    @classmethod
    def get_object_summary(cls, managed_object):
        def to_dict(v):
            return dict((r["profile"], r["summary"]) for r in v)

        if hasattr(managed_object, "id"):
            managed_object = managed_object.id
        r = {"service": {}, "subscriber": {}, "interface": {}}
        for ss in ServiceSummary._get_collection().find(
            {"managed_object": managed_object},
            {
                "interface": 1,
                "service": 1,
                "subscriber": 1
            },
                comment=
                "[servicesummary.get_object_summary] Getting summary of services for object",
        ):
            ds = to_dict(ss["service"])
            if ss.get("interface"):
                r["interface"][ss["interface"]] = {"service": ds}
            for k, v in ds.items():
                if k in r["service"]:
                    r["service"][k] += v
                else:
                    r["service"][k] = v
            ds = to_dict(ss["subscriber"])
            if ss.get("interface"):
                r["interface"][ss["interface"]]["subscriber"] = ds
            for k, v in ds.items():
                if k in r["subscriber"]:
                    r["subscriber"][k] += v
                else:
                    r["subscriber"][k] = v
        return r

    @classmethod
    def get_objects_summary(cls, managed_objects):
        def to_dict(v):
            return dict((r["profile"], r["summary"]) for r in v)

        kk = {}
        for ss in ServiceSummary._get_collection().find(
            {
                "managed_object": {
                    "$in": [getattr(mo, "id", mo) for mo in managed_objects]
                }
            },
            {
                "managed_object": 1,
                "interface": 1,
                "service": 1,
                "subscriber": 1
            },
                comment=
                "[servicesummary.get_objects_summary] Getting summary of services for objects list",
        ):
            r = {"service": {}, "subscriber": {}, "interface": {}}
            ds = to_dict(ss["service"])
            if ss.get("interface"):
                r["interface"][ss["interface"]] = {"service": ds}
            for k, v in ds.items():
                if k in r["service"]:
                    r["service"][k] += v
                else:
                    r["service"][k] = v
            ds = to_dict(ss["subscriber"])
            if ss.get("interface"):
                r["interface"][ss["interface"]]["subscriber"] = ds
            for k, v in ds.items():
                if k in r["subscriber"]:
                    r["subscriber"][k] += v
                else:
                    r["subscriber"][k] = v
            kk[ss["managed_object"]] = r
        return kk

    @classmethod
    def get_weight(cls, summary):
        """
        Convert result of *get_object_summary* to alarm weight
        """
        w = 0
        subscribers = summary.get("subscriber", {})
        for s in subscribers:
            sp = SubscriberProfile.get_by_id(s)
            if sp and sp.weight:
                w += sp.weight * subscribers[s]
        services = summary.get("service", {})
        for s in services:
            sp = ServiceProfile.get_by_id(s)
            if sp and sp.weight:
                w += sp.weight * services[s]
        objects = summary.get("object", {})
        for s in objects:
            sp = ManagedObjectProfile.get_by_id(s)
            if sp and sp.weight:
                w += sp.weight * objects[s]
        return w

    @classmethod
    def get_severity(cls, summary):
        """
        Convert result of *get_object_summary* to alarm severity
        """
        from noc.fm.models.alarmseverity import AlarmSeverity

        return AlarmSeverity.severity_for_weight(cls.get_weight(summary))

    @classmethod
    def get_direct_summary(cls, managed_objects, summary_all=False):
        """
        ! Method works on mongodb version 3.4 and greater
        Calculate direct services and profiles for a list of managed objects
        :param managed_objects: List of managed object instances or ids
        :param summary_all: Return summary for all services
        :return: tuple of service and subscriber dicts
        """
        services = {}
        subscribers = {}
        pipeline = []
        if not summary_all:
            # Filter managed objects
            pipeline += [{
                "$match": {
                    "managed_object": {
                        "$in":
                        [getattr(mo, "id", mo) for mo in managed_objects]
                    }
                }
            }]
        # Mark service and profile with type field
        pipeline += [
            {
                "$project": {
                    "_id": 0,
                    "service": {
                        "$map": {
                            "input": "$service",
                            "as": "svc",
                            "in": {
                                "type": "svc",
                                "profile": "$$svc.profile",
                                "summary": "$$svc.summary",
                            },
                        }
                    },
                    "subscriber": {
                        "$map": {
                            "input": "$subscriber",
                            "as": "sub",
                            "in": {
                                "type": "sub",
                                "profile": "$$sub.profile",
                                "summary": "$$sub.summary",
                            },
                        }
                    },
                }
            },
            # Concatenate services and profiles
            {
                "$project": {
                    "summary": {
                        "$concatArrays": ["$service", "$subscriber"]
                    }
                }
            },
            # Unwind *summary* array to independed records
            {
                "$unwind": "$summary"
            },
            # Group by (type, profile)
            {
                "$group": {
                    "_id": {
                        "type": "$summary.type",
                        "profile": "$summary.profile"
                    },
                    "summary": {
                        "$sum": "$summary.summary"
                    },
                }
            },
        ]  # noqa
        try:
            for doc in ServiceSummary._get_collection().aggregate(pipeline):
                profile = doc["_id"]["profile"]
                if doc["_id"]["type"] == "svc":
                    services[profile] = services.get(profile,
                                                     0) + doc["summary"]
                else:
                    subscribers[profile] = subscribers.get(profile,
                                                           0) + doc["summary"]
        except OperationFailure:
            # for Mongo less 3.4
            pass
        return services, subscribers
Esempio n. 29
0
class Platform(Document):
    meta = {
        "collection":
        "noc.platforms",
        "strict":
        False,
        "auto_create_index":
        False,
        "json_collection":
        "inv.platforms",
        "json_unique_fields": [("vendor", "name")],
        "indexes": [{
            "fields": ["vendor", "name"],
            "unique": True
        }, ("vendor", "aliases")],
    }
    vendor = PlainReferenceField(Vendor)
    name = StringField()
    description = StringField(required=False)
    # Full name, combined from vendor platform
    full_name = StringField(unique=True)
    # Platform start of sale date
    start_of_sale = DateField()
    # Platform end of sale date
    end_of_sale = DateField()
    # Platform end of support date
    end_of_support = DateField()
    # End of extended support date (installation local)
    end_of_xsupport = DateField()
    # SNMP OID value
    # sysObjectID.0
    snmp_sysobjectid = StringField(regex=r"^1.3.6(\.\d+)+$")
    # Global ID
    uuid = UUIDField(binary=True)
    # Platform aliases
    aliases = ListField(StringField())
    # Labels
    labels = ListField(StringField())
    effective_labels = ListField(StringField())
    # Object id in BI
    bi_id = LongField(unique=True)

    _id_cache = cachetools.TTLCache(1000, ttl=60)
    _bi_id_cache = cachetools.TTLCache(1000, ttl=60)
    _ensure_cache = cachetools.TTLCache(1000, ttl=60)

    def __str__(self):
        return self.full_name

    def clean(self):
        self.full_name = "%s %s" % (self.vendor.name, self.name)
        if self.aliases:
            self.aliases = sorted(a for a in self.aliases if a != self.name)
        super().clean()

    def save(self, *args, **kwargs):
        to_merge_aliases = not hasattr(
            self, "_changed_fields") or "aliases" in self._changed_fields
        super().save(*args, **kwargs)
        if to_merge_aliases:
            for a in self.aliases:
                if a == self.name:
                    continue
                self.merge_platform(a)

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_id(cls, id):
        return Platform.objects.filter(id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"),
                             lock=lambda _: id_lock)
    def get_by_bi_id(cls, id):
        return Platform.objects.filter(bi_id=id).first()

    def to_json(self):
        r = {
            "$collection": self._meta["json_collection"],
            "vendor__code": self.vendor.code[0],
            "name": self.name,
            "uuid": self.uuid,
        }
        if self.aliases:
            r["aliases"] = [str(x) for x in self.aliases]
        if self.description:
            r["description"] = self.description
        if self.start_of_sale:
            r["start_of_sale"] = self.start_of_sale.strftime("%Y-%m-%d")
        if self.end_of_sale:
            r["end_of_sale"] = self.end_of_sale.strftime("%Y-%m-%d")
        if self.end_of_support:
            r["end_of_support"] = self.end_of_support.strftime("%Y-%m-%d")
        if self.snmp_sysobjectid:
            r["snmp_sysobjectid"] = self.snmp_sysobjectid
        if self.labels:
            r["labels"] = self.labels
        return to_json(
            r,
            order=[
                "vendor__code",
                "name",
                "$collection",
                "uuid",
                "aliases",
                "description",
                "start_of_sale",
                "end_of_sale",
                "end_of_support",
                "snmp_sysobjectid",
                "labels",
            ],
        )

    def get_json_path(self):
        return os.path.join(self.vendor.code[0],
                            "%s.json" % self.name.replace("/", "_"))

    @classmethod
    @cachetools.cachedmethod(
        operator.attrgetter("_ensure_cache"),
        key=lambda v, n, strict=False, labels=None: "%s-%s" % (v.id, n),
        lock=lambda _: id_lock,
    )
    def ensure_platform(cls, vendor, name, strict=False, labels=None):
        """
        Get or create platform by vendor and code
        :param vendor:
        :param name:
        :param strict: Return None if platform is not found
        :param labels: List of platform labels
        :return:
        """
        # Try to find platform
        q = Q(vendor=vendor.id, name=name) | Q(vendor=vendor.id, aliases=name)
        platform = Platform.objects.filter(q).first()
        if platform or strict:
            return platform
        # Try to create
        labels = labels or []
        pu = uuid.uuid4()
        d = Platform._get_collection().find_one_and_update(
            {
                "vendor": vendor.id,
                "name": name
            },
            {
                "$setOnInsert": {
                    "uuid": pu,
                    "full_name": "%s %s" % (vendor.name, name),
                    "bi_id": Int64(new_bi_id()),
                    "aliases": [],
                    "labels": labels,
                }
            },
            upsert=True,
            return_document=ReturnDocument.AFTER,
        )
        d["id"] = d["_id"]
        del d["_id"]
        p = Platform(**d)
        p._changed_fields = []
        return p

    @property
    def is_end_of_sale(self):
        """
        Check if platform reached end-of-sale mark
        :return:
        """
        if not self.end_of_sale:
            return False
        return datetime.date.today() > self.end_of_sale

    @property
    def is_end_of_support(self):
        """
        Check if platform reached end-of-support mark
        :return:
        """
        deadline = []
        if self.end_of_support:
            deadline += [self.end_of_support]
        if self.end_of_xsupport:
            deadline += [self.end_of_xsupport]
        if deadline:
            return datetime.date.today() > max(deadline)
        else:
            return False

    def merge_platform(self, alias):
        """
        Merge *alias* platform
        :param alias: platform name
        :return:
        """
        ap = Platform.objects.filter(vendor=self.vendor.id, name=alias).first()
        if not ap:
            return
        # Replace ce platform
        refs = self._on_delete["check"] + self._on_delete[
            "clean"] + self._on_delete["delete"]
        for model_name, field in refs:
            model = get_model(model_name)
            for obj in model.objects.filter(**{field: ap.id}):
                setattr(obj, field, self)
                obj.save()
        # Finally delete aliases platform
        ap.delete()

    @classmethod
    def can_set_label(cls, label):
        return Label.get_effective_setting(label, setting="enable_platform")

    @classmethod
    def iter_lazy_labels(cls, platform: "Platform"):
        yield f"noc::platform::{platform.name}::="
Esempio n. 30
0
class MeasurementUnits(Document):
    meta = {
        "collection": "measurementunits",
        "strict": False,
        "auto_create_index": False,
        "json_collection": "pm.measurementunits",
        "json_unique_fields": ["name"],
    }

    # Unique units name
    name = StringField(unique=True)
    # Global ID
    uuid = UUIDField(binary=True)
    # Optional description
    description = StringField()
    # Short label
    label = StringField()
    # Label for dashboards
    dashboard_label = StringField()
    # Type of scale (K/M/G prefixes)
    # * d - decimal scale, 1/1_000/1_000_000/...
    # * b - binary scale,  1/2^10/2^20/...
    scale_type = StringField(choices=["d", "b"], default="d")
    # Alternative units
    alt_units = ListField(EmbeddedDocumentField(AltUnit))
    # Enumerations
    enum = ListField(EmbeddedDocumentField(EnumValue))

    _id_cache = cachetools.TTLCache(maxsize=100, ttl=60)
    _name_cache = cachetools.TTLCache(maxsize=100, ttl=60)

    def __str__(self):
        return self.name

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_id_cache"), lock=lambda _: id_lock)
    def get_by_id(cls, id) -> Optional["MeasurementUnits"]:
        return MeasurementUnits.objects.filter(id=id).first()

    @classmethod
    @cachetools.cachedmethod(operator.attrgetter("_name_cache"), lock=lambda _: id_lock)
    def get_by_name(cls, name: str) -> Optional["MeasurementUnits"]:
        return MeasurementUnits.objects.filter(name=name).first()

    @property
    def json_data(self):
        r = {
            "name": self.name,
            "$collection": self._meta["json_collection"],
            "uuid": self.uuid,
            "label": self.label,
            "dashboard_label": self.dashboard_label,
            "scale_type": self.scale_type,
        }
        if self.description:
            r["description"] = self.description
        if self.alt_units:
            r["alt_units"] = [x.json_data for x in self.alt_units]
        if self.enum:
            r["enum"] = [x.json_data for x in self.enum]
        return r

    def to_json(self):
        return to_json(
            self.json_data,
            order=[
                "name",
                "$collection",
                "uuid",
                "description",
                "label",
                "dashboard_label",
                "scale_type",
                "alt_units",
                "enum",
            ],
        )

    def get_json_path(self):
        return "%s.json" % quote_safe_path(self.name)