class SimpleDoc(Document): f_str = fields.StringField() f_url = fields.URLField() f_eml = fields.EmailField() f_int = fields.IntField() f_lng = fields.LongField() f_flt = fields.FloatField() f_dec = fields.DecimalField() f_bool = fields.BooleanField() f_dt = fields.DateTimeField() f_oid = fields.ObjectIdField() f_ref = fields.ReferenceField(RefDoc) f_uuid = fields.UUIDField() f_rng_beg = fields.IntField() f_rng_end = fields.IntField()
class TemporalyDataUser(Document): name = fields.StringField(required=True) email = fields.EmailField(required=True) created_at = fields.DateTimeField(default=datetime.now) hash_for_link_activation = fields.UUIDField(default=uuid4) class Meta: verbose_name = 'TempUser' verbose_name_plural = 'TempUsers' def first_name(self): return self.name.split(" ")[0] def __str__(self): return self.name
class CloneClassificationRule(Document): """ Classification rules cloning """ meta = { "collection": "noc.cloneclassificationrules", "strict": False, "auto_create_index": False, "json_collection": "fm.cloneclassificationrules", "json_depends_on": ["fm.eventclassificationrules"], } name = fields.StringField(unique=True) uuid = fields.UUIDField(binary=True) re = fields.StringField(default="^.*$") key_re = fields.StringField(default="^.*$") value_re = fields.StringField(default="^.*$") rewrite_from = fields.StringField() rewrite_to = fields.StringField() def __str__(self): return self.name def to_json(self): return to_json( { "name": self.name, "$collection": self._meta["json_collection"], "uuid": self.uuid, "re": self.re, "key_re": self.key_re, "value_re": self.value_re, "rewrite_from": self.rewrite_from, "rewrite_to": self.rewrite_to, }, order=[ "name", "uuid", "re", "key_re", "value_re", "rewrite_from", "rewrite_to" ], ) def get_json_path(self): p = [quote_safe_path(n.strip()) for n in self.name.split("|")] return os.path.join(*p) + ".json"
class Wallet(db.Document): safe_fields = {"_id", "balance", "currency", "name", "country"} wallet_id = fields.UUIDField(required=True, default=lambda: str(uuid.uuid4()), primary_key=True) balance = fields.FloatField(required=True, default=0.0) currency = fields.StringField(required=True) name = fields.StringField(required=True) city = fields.StringField(required=True) country = fields.StringField(required=True) password = fields.BinaryField(required=True) def get_sanitized_object(self): return { k: v for k, v in self.to_mongo().items() if k in self.safe_fields }
class Foo(Document): string = fields.StringField() required = fields.StringField(required=True) choices = fields.StringField(choices=('foo', 'bar', 'baz')) regex = fields.StringField(regex=r'^[a-z]*$') length = fields.StringField(min_length=1, max_length=3) strings = fields.ListField(fields.StringField()) sorted_strings = fields.SortedListField(fields.StringField()) integer = fields.IntField() bounded_int = fields.IntField(min_value=0, max_value=10) longeger = fields.LongField() bounded_long = fields.LongField(min_value=0, max_value=10) floating = fields.FloatField() bounded_float = fields.FloatField(min_value=0.0, max_value=1.0) boolean = fields.BooleanField() datetime = fields.DateTimeField() complex_datetime = fields.ComplexDateTimeField() binary = fields.BinaryField() bounded_binary = fields.BinaryField(max_bytes=8) mapping = fields.MapField(fields.StringField()) uuid = fields.UUIDField() old_geopoint = fields.GeoPointField() point = fields.PointField() line = fields.LineStringField() polygon = fields.PolygonField() points = fields.MultiPointField() lines = fields.MultiLineStringField() polygons = fields.MultiPolygonField() even_length_string = fields.StringField( validation=lambda s: len(s) % 2 == 0) @fields.EmbeddedDocumentField class embedded_bar(EmbeddedDocument): bar = fields.StringField() @fields.EmbeddedDocumentListField class embedded_baz(EmbeddedDocument): baz = fields.StringField()
class Person(Document): _id = fields.StringField() index = fields.IntField(unique=True) guid = fields.UUIDField(unique=True) has_died = fields.BooleanField() balance = fields.FloatField() picture = fields.URLField() age = fields.IntField() eyeColor = fields.StringField() name = fields.StringField() gender = fields.StringField() company_id = fields.IntField() email = fields.EmailField(unique=True) phone = fields.StringField() address = fields.MultiLineStringField() about = fields.MultiLineStringField() registered = fields.DateTimeField() tags = fields.ListField(fields.StringField()) friends = fields.ListField(fields.DictField()) greeting = fields.MultiLineStringField() favouriteFood = fields.ListField(fields.StringField()) # this field will require splitting into fruit and vegetables favouriteFruits = fields.ListField(fields.StringField()) favouriteVegetables = fields.ListField(fields.StringField()) KNOWN_FRUITS = ['apple', 'orange', 'banana', 'strawberry', 'cucumber'] KNOWN_VEGGIES = ['beetroot', 'carrot', 'celery'] @staticmethod def split_foods_into_fruits_and_vegetables(foods): """ Take a list of foods and split into a list of fruits and a list of veggies. :param foods: list of foods :return: tuple containing list of ruits first, then veggies """ fruits = [f for f in foods if f in Person.KNOWN_FRUITS] veggies = [f for f in foods if f in Person.KNOWN_VEGGIES] return (fruits, veggies)
class EventClass(Document): """ Event class """ meta = { "collection": "noc.eventclasses", "strict": False, "auto_create_index": False, "json_collection": "fm.eventclasses", "json_depends_on": ["fm.alarmclasses"] } name = fields.StringField(required=True, unique=True) uuid = fields.UUIDField(binary=True) description = fields.StringField(required=False) # Event processing action: # D - Drop # L - Log as processed, do not move to archive # A - Log as processed, move to archive action = fields.StringField(required=True, choices=[("D", "Drop"), ("L", "Log"), ("A", "Log & Archive")]) vars = fields.ListField(fields.EmbeddedDocumentField(EventClassVar)) # Text messages subject_template = fields.StringField() body_template = fields.StringField() symptoms = fields.StringField() probable_causes = fields.StringField() recommended_actions = fields.StringField() disposition = fields.ListField( fields.EmbeddedDocumentField(EventDispositionRule)) repeat_suppression = fields.ListField( fields.EmbeddedDocumentField(EventSuppressionRule)) # Window to suppress duplicated events (in seconds) # 0 means no deduplication deduplication_window = fields.IntField(default=3) # Time to live in active window, unless not belonging to any alarm # (in seconds) ttl = fields.IntField(default=86400) # True if event processing is regulated by # Interface Profile.link_events setting link_event = fields.BooleanField(default=False) # handlers = fields.ListField(fields.StringField()) # Plugin settings plugins = fields.ListField(fields.EmbeddedDocumentField(EventPlugin)) # category = fields.ObjectIdField() _id_cache = cachetools.TTLCache(maxsize=100, ttl=60) _name_cache = cachetools.TTLCache(maxsize=100, ttl=60) _handlers_cache = {} def __unicode__(self): return self.name @classmethod @cachetools.cachedmethod(operator.attrgetter("_id_cache"), lock=lambda _: id_lock) def get_by_id(cls, id): return EventClass.objects.filter(id=id).first() @classmethod @cachetools.cachedmethod(operator.attrgetter("_id_cache"), lock=lambda _: id_lock) def get_by_name(cls, name): return EventClass.objects.filter(name=name).first() def get_handlers(self): @cachetools.cached(self._handlers_cache, key=lambda x: x.id, lock=handlers_lock) def _get_handlers(event_class): handlers = [] for hh in event_class.handlers: try: h = get_handler(hh) except ImportError: h = None if h: handlers += [h] return handlers return _get_handlers(self) def save(self, *args, **kwargs): c_name = " | ".join(self.name.split(" | ")[:-1]) c = EventClassCategory.objects.filter(name=c_name).first() if not c: c = EventClassCategory(name=c_name) c.save() self.category = c.id super(EventClass, self).save(*args, **kwargs) @property def display_action(self): return {"D": "Drop", "L": "Log", "A": "Log and Archive"}[self.action] def to_json(self): c = self r = ["{"] r += [" \"name\": \"%s\"," % q(c.name)] r += [" \"$collection\": \"%s\"," % self._meta["json_collection"]] r += [" \"uuid\": \"%s\"," % c.uuid] if c.description: r += [" \"description\": \"%s\"," % q(c.description)] r += [" \"action\": \"%s\"," % q(c.action)] # vars vars = [] for v in c.vars: vd = [" {"] vd += [" \"name\": \"%s\"," % q(v.name)] vd += [" \"description\": \"%s\"," % q(v.description)] vd += [" \"type\": \"%s\"," % q(v.type)] vd += [" \"required\": %s" % q(v.required)] vd += [" }"] vars += ["\n".join(vd)] r += [" \"vars\": ["] r += [",\n".join(vars)] r += [" ],"] if self.link_event: r += [" \"link_event\": true,"] r += [" \"deduplication_window\": %d," % self.deduplication_window] r += [" \"ttl\": %d," % self.ttl] # Handlers if self.handlers: hh = [" \"%s\"" % h for h in self.handlers] r += [" \"handlers\": ["] r += [",\n\n".join(hh)] r += [" ],"] # Text r += [" \"subject_template\": \"%s\"," % q(c.subject_template)] r += [" \"body_template\": \"%s\"," % q(c.body_template)] r += [" \"symptoms\": \"%s\"," % q(c.symptoms)] r += [" \"probable_causes\": \"%s\"," % q(c.probable_causes)] r += [ " \"recommended_actions\": \"%s\"," % q(c.recommended_actions) ] # Disposition rules if c.disposition: r += [" \"disposition\": ["] disp = [] for d in c.disposition: ll = [" {"] lll = [" \"name\": \"%s\"" % q(d.name)] lll += [" \"condition\": \"%s\"" % q(d.condition)] lll += [" \"action\": \"%s\"" % q(d.action)] if d.alarm_class: lll += [ " \"alarm_class__name\": \"%s\"" % q(d.alarm_class.name) ] if d.managed_object: lll += [ " \"managed_object\": \"%s\"" % q(d.managed_object) ] ll += [",\n".join(lll)] ll += [" }"] disp += ["\n".join(ll)] r += [",\n".join(disp)] r += [" ]"] # if not r[-1].endswith(","): r[-1] += "," r += [" \"repeat_suppression\": ["] if c.repeat_suppression: rep = [] for rs in c.repeat_suppression: ll = [" {"] lll = [" \"name\": \"%s\"," % q(rs.name)] lll += [" \"condition\": \"%s\"," % q(rs.condition)] lll += [ " \"event_class__name\": \"%s\"," % q(rs.event_class.name) ] lll += [" \"match_condition\": {"] llll = [] for rsc in rs.match_condition: llll += [ " \"%s\": \"%s\"" % (q(rsc), q(rs.match_condition[rsc])) ] lll += [",\n".join(llll) + "\n },"] lll += [" \"window\": %d," % rs.window] lll += [ " \"suppress\": %s" % ("true" if rs.suppress else "false") ] ll += ["\n".join(lll)] ll += [" }"] rep += ["\n".join(ll)] r += [",\n".join(rep)] r += [" ]"] # Plugins if self.plugins: if not r[-1].endswith(","): r[-1] += "," plugins = [] for p in self.plugins: pd = [" {"] pd += [" \"name\": \"%s\"" % p.name] if p.config: pd[-1] += "," pc = [] for v in p.config: pc += [ " \"%s\": \"%s\"" % (v, p.config.vars[v]) ] pd += [" \"config\": {"] pd += [",\n".join(pc)] pd += [" }"] pd += [" }"] plugins += ["\n".join(pd)] r += [" \"plugins\": ["] r += [",\n".join(plugins)] r += [" ]"] # Close if r[-1].endswith(","): r[-1] = r[-1][:-1] r += ["}", ""] return "\n".join(r) def get_json_path(self): p = [quote_safe_path(n.strip()) for n in self.name.split("|")] return os.path.join(*p) + ".json"
class BaseRevision(EmbeddedDocument): """Base class for revisions""" TAXES_APPLICATION = TAXES_APPLICATION NOT_DUPLICABLE_FIELDS = ('revision', 'issuer', 'issue_date', 'pdf') revision = fields.UUIDField(required=True, binary=True) issuer = fields.ReferenceField("VosaeUser") issue_date = fields.DateTimeField(required=True) sender = fields.StringField(max_length=128) sender_organization = fields.StringField(max_length=128) sender_address = fields.EmbeddedDocumentField("Address") contact = NotPrivateReferenceField("Contact") organization = NotPrivateReferenceField("Organization") billing_address = fields.EmbeddedDocumentField("Address") delivery_address = fields.EmbeddedDocumentField("Address") custom_payment_conditions = fields.StringField(max_length=256) customer_reference = fields.StringField(max_length=128) currency = fields.EmbeddedDocumentField("SnapshotCurrency", required=True) taxes_application = fields.StringField(required=True, choices=TAXES_APPLICATION, default="EXCLUSIVE") line_items = fields.ListField(fields.EmbeddedDocumentField("InvoiceItem")) pdf = LocalizedMapField(fields.ReferenceField("VosaeFile")) meta = { "allow_inheritance": True, # Vosae specific "vosae_mandatory_permissions": ("invoicing_access", ), } def __unicode__(self): return unicode(self.revision) def __init__(self, *args, **kwargs): based_on = kwargs.pop('based_on', None) super(BaseRevision, self).__init__(*args, **kwargs) if based_on: # Update revision with base values for field in list( set(self._fields.keys()).difference( self.NOT_DUPLICABLE_FIELDS)): if hasattr(based_on, field): setattr(self, field, getattr(based_on, field)) if not self.revision: self.revision = unicode(uuid.uuid4()) def validate(self, value, **kwargs): errors = {} try: super(BaseRevision, self).validate(value, **kwargs) except ValidationError as e: errors = e.errors if not self.contact and not self.organization: errors['contact'] = ValidationError( 'Either contact or organization is required', field_name='contact') errors['organization'] = ValidationError( 'Either contact or organization is required', field_name='organization') if errors: raise ValidationError('ValidationError', errors=errors) def duplicate(self, issuer=None): """ Return the duplicate of the current revision with generated revision unique parameters. """ duplicate = copy.deepcopy(self) duplicate.revision = unicode(uuid.uuid4()) duplicate.issue_date = datetime_now() if issuer: duplicate.issuer = issuer return duplicate def get_customer_display(self, only_company=False): """ Returns the customer's name according to this scheme: - Organization (Contact), *if either organization and contact are supplied* - Organization, *if only organization is supplied* - Contact, *if only contact is supplied* - None, *if neither organization nor contact are supplied* :param only_company: forces to only display the company in the first case """ if self.organization and self.organization.corporate_name and self.contact and self.contact.get_full_name( ): if only_company: return self.organization.corporate_name else: return "%s (%s)" % (self.organization.corporate_name, self.contact.get_full_name()) if self.organization and self.organization.corporate_name: return self.organization.corporate_name if self.contact and self.contact.get_full_name(): return self.contact.get_full_name() return None
class EventClassificationRule(Document): """ Classification rules """ meta = { "collection": "noc.eventclassificationrules", "strict": False, "auto_create_index": False, "json_collection": "fm.eventclassificationrules", "json_depends_on": ["fm.eventclasses"] } name = fields.StringField(required=True, unique=True) uuid = fields.UUIDField(binary=True) description = fields.StringField(required=False) event_class = PlainReferenceField(EventClass, required=True) preference = fields.IntField(required=True, default=1000) patterns = fields.ListField( fields.EmbeddedDocumentField(EventClassificationPattern)) datasources = fields.ListField(fields.EmbeddedDocumentField(DataSource)) vars = fields.ListField( fields.EmbeddedDocumentField(EventClassificationRuleVar)) # category = fields.ObjectIdField() def __unicode__(self): return self.name def save(self, *args, **kwargs): c_name = " | ".join(self.name.split(" | ")[:-1]) c = EventClassificationRuleCategory.objects.filter(name=c_name).first() if not c: c = EventClassificationRuleCategory(name=c_name) c.save() self.category = c.id super(EventClassificationRule, self).save(*args, **kwargs) @property def short_name(self): return self.name.split(" | ")[-1] def to_json(self): r = ["{"] r += [" \"name\": \"%s\"," % jq(self.name)] r += [ " \"$collection\": \"%s\"," % jq(self._meta["json_collection"]) ] r += [" \"uuid\": \"%s\"," % self.uuid] if self.description: r += [" \"description\": \"%s\"," % jq(self.description)] r += [" \"event_class__name\": \"%s\"," % jq(self.event_class.name)] r += [" \"preference\": %d," % self.preference] # Dump datasources if self.datasources: r += [" \"datasources\": ["] jds = [] for ds in self.datasources: x = [" \"name\": \"%s\"" % jq(ds.name)] x += [" \"datasource\": \"%s\"" % jq(ds.datasource)] ss = [] for k in sorted(ds.search): ss += [ " \"%s\": \"%s\"" % (jq(k), jq(ds.search[k])) ] x += [" \"search\": {"] x += [",\n".join(ss)] x += [" }"] jds += [" {", ",\n".join(x), " }"] r += [",\n\n".join(jds)] r += [" ],"] # Dump vars if self.vars: r += [" \"vars\": ["] vars = [] for v in self.vars: vd = [" {"] vd += [" \"name\": \"%s\"," % jq(v.name)] vd += [" \"value\": \"%s\"" % jq(v.value)] vd += [" }"] vars += ["\n".join(vd)] r += [",\n\n".join(vars)] r += [" ],"] # Dump patterns r += [" \"patterns\": ["] patterns = [] for p in self.patterns: pt = [] pt += [" {"] pt += [" \"key_re\": \"%s\"," % jq(p.key_re)] pt += [" \"value_re\": \"%s\"" % jq(p.value_re)] pt += [" }"] patterns += ["\n".join(pt)] r += [",\n".join(patterns)] r += [" ]"] r += ["}"] return "\n".join(r) def get_json_path(self): p = [quote_safe_path(n.strip()) for n in self.name.split("|")] return os.path.join(*p) + ".json"
class AlarmClass(nosql.Document): """ Alarm class """ meta = { "collection": "noc.alarmclasses", "allow_inheritance": False, "json_collection": "fm.alarmclasses", "json_depends_on": ["fm.alarmseverities"] } name = fields.StringField(required=True, unique=True) uuid = fields.UUIDField(binary=True) description = fields.StringField(required=False) # Create or not create separate Alarm # if is_unique is True and there is active alarm # Do not create separate alarm if is_unique set is_unique = fields.BooleanField(default=False) # List of var names to be used as discriminator key discriminator = fields.ListField(nosql.StringField()) # Can alarm status be cleared by user user_clearable = fields.BooleanField(default=True) # Default alarm severity default_severity = nosql.PlainReferenceField(AlarmSeverity) # datasources = fields.ListField(fields.EmbeddedDocumentField(DataSource)) vars = fields.ListField(fields.EmbeddedDocumentField(AlarmClassVar)) # Text messages subject_template = fields.StringField() body_template = fields.StringField() symptoms = fields.StringField() probable_causes = fields.StringField() recommended_actions = fields.StringField() # Flap detection flap_condition = fields.StringField(required=False, choices=[("none", "none"), ("count", "count")], default=None) flap_window = fields.IntField(required=False, default=0) flap_threshold = fields.FloatField(required=False, default=0) # RCA root_cause = fields.ListField( fields.EmbeddedDocumentField(AlarmRootCauseCondition)) # Job descriptions jobs = fields.ListField(fields.EmbeddedDocumentField(AlarmClassJob)) # handlers = fields.ListField(fields.StringField()) # Plugin settings plugins = fields.ListField(fields.EmbeddedDocumentField(AlarmPlugin)) # Time in seconds to delay alarm risen notification notification_delay = fields.IntField(required=False) # Control time to reopen alarm instead of creating new control_time0 = fields.IntField(required=False) # Control time to reopen alarm after 1 reopen control_time1 = fields.IntField(required=False) # Control time to reopen alarm after >1 reopen control_timeN = fields.IntField(required=False) # category = nosql.ObjectIdField() def __unicode__(self): return self.name def save(self, *args, **kwargs): c_name = " | ".join(self.name.split(" | ")[:-1]) c = AlarmClassCategory.objects.filter(name=c_name).first() if not c: c = AlarmClassCategory(name=c_name) c.save() self.category = c.id super(AlarmClass, self).save(*args, **kwargs) def get_discriminator(self, vars): """ Calculate discriminator hash :param vars: Dict of vars :returns: Discriminator hash """ if vars: ds = sorted(str(vars[n]) for n in self.discriminator) return hashlib.sha1("\x00".join(ds)).hexdigest() else: return hashlib.sha1("").hexdigest() def to_json(self): c = self r = ["{"] r += [" \"name\": \"%s\"," % q(c.name)] r += [" \"$collection\": \"%s\"," % self._meta["json_collection"]] r += [" \"uuid\": \"%s\"," % c.uuid] if c.description: r += [" \"desciption\": \"%s\"," % q(c.description)] r += [" \"is_unique\": %s," % q(c.is_unique)] if c.is_unique and c.discriminator: r += [ " \"discriminator\": [%s]," % ", ".join(["\"%s\"" % q(d) for d in c.discriminator]) ] r += [" \"user_clearable\": %s," % q(c.user_clearable)] r += [ " \"default_severity__name\": \"%s\"," % q(c.default_severity.name) ] # datasources if c.datasources: r += [" \"datasources\": ["] jds = [] for ds in c.datasources: x = [] x += [" \"name\": \"%s\"" % q(ds.name)] x += [" \"datasource\": \"%s\"" % q(ds.datasource)] ss = [] for k in sorted(ds.search): ss += [ " \"%s\": \"%s\"" % (q(k), q(ds.search[k])) ] x += [ " \"search\": {\n%s\n }" % (",\n".join(ss)) ] jds += [" {\n%s\n }" % ",\n".join(x)] r += [",\n\n".join(jds)] r += [" ],"] # vars vars = [] for v in c.vars: vd = [" {"] vd += [" \"name\": \"%s\"," % q(v.name)] vd += [" \"description\": \"%s\"" % q(v.description)] if v.default: vd[-1] += "," vd += [" \"default\": \"%s\"" % q(v.default)] vd += [" }"] vars += ["\n".join(vd)] r += [" \"vars\": ["] r += [",\n".join(vars)] r += [" ],"] # Handlers if self.handlers: hh = [" \"%s\"" % h for h in self.handlers] r += [" \"handlers\": ["] r += [",\n\n".join(hh)] r += [" ],"] # Text r += [" \"subject_template\": \"%s\"," % q(c.subject_template)] r += [" \"body_template\": \"%s\"," % q(c.body_template)] r += [" \"symptoms\": \"%s\"," % q(c.symptoms)] r += [" \"probable_causes\": \"%s\"," % q(c.probable_causes)] r += [ " \"recommended_actions\": \"%s\"," % q(c.recommended_actions) ] # Root cause if self.root_cause: rc = [] for rr in self.root_cause: rcd = [" {"] rcd += [" \"name\": \"%s\"," % rr.name] rcd += [" \"root__name\": \"%s\"," % rr.root.name] rcd += [" \"window\": %d," % rr.window] if rr.condition: rcd += [ " \"condition\": \"%s\"," % rr.condition ] rcd += [" \"match_condition\": {"] mcv = [] for v in rr.match_condition: mcv += [ " \"%s\": \"%s\"" % (v, rr.match_condition[v]) ] rcd += [",\n".join(mcv)] rcd += [" }"] rcd += [" }"] rc += ["\n".join(rcd)] if r[-1][-1] != ",": r[-1] += "," r += [" \"root_cause\": ["] r += [",\n".join(rc)] r += [" ]"] # Jobs if self.jobs: jobs = [] for job in self.jobs: jd = [" {"] jd += [" \"job\": \"%s\"," % job.job] jd += [" \"interval\": %d," % job.interval] jd += [" \"vars\": {"] jv = [] for v in job.vars: jv += [" \"%s\": \"%s\"" % (v, job.vars[v])] jd += [",\n".join(jv)] jd += [" }"] jd += [" }"] jobs += ["\n".join(jd)] if r[-1][-1] != ",": r[-1] += "," r += [" \"jobs\": ["] r += [",\n".join(jobs)] r += [" ]"] # Plugins if self.plugins: if r[-1][-1] != ",": r[-1] += "," plugins = [] for p in self.plugins: pd = [" {"] pd += [" \"name\": \"%s\"" % p.name] if p.config: pd[-1] += "," pc = [] for v in p.config: pc += [ " \"%s\": \"%s\"" % (v, p.config.vars[v]) ] pd += [" \"config\": {"] pd += [",\n".join(pc)] pd += [" }"] pd += [" }"] plugins += ["\n".join(pd)] r += [" \"plugins\": ["] r += [",\n".join(plugins)] r += [" ]"] if self.notification_delay: if r[-1][-1] != ",": r[-1] += "," r += [" \"notification_delay\": %d" % self.notification_delay] if self.control_time0: if r[-1][-1] != ",": r[-1] += "," r += [" \"control_time0\": %d" % self.control_time0] if self.control_time1: r[-1] += "," r += [" \"control_time1\": %d" % self.control_time1] if self.control_timeN: r[-1] += "," r += [" \"control_timeN\": %d" % self.control_timeN] # Close if r[-1].endswith(","): r[-1] = r[-1][:-1] r += ["}", ""] return "\n".join(r) def get_json_path(self): p = [quote_safe_path(n.strip()) for n in self.name.split("|")] return os.path.join(*p) + ".json" @property def config(self): if not hasattr(self, "_config"): self._config = AlarmClassConfig.objects.filter( alarm_class=self.id).first() return self._config def get_notification_delay(self): if self.config: return self.config.notification_delay or None else: return self.notification_delay or None def get_control_time(self, reopens): if reopens == 0: if self.config: return self.config.control_time0 or None else: return self.control_time0 or None elif reopens == 1: if self.config: return self.config.control_time1 or None else: return self.control_time1 or None else: if self.config: return self.config.control_timeN or None else: return self.control_timeN or None
class Post(Document): id = fields.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) data = fields.StringField()
class EventClass(Document): """ Event class """ meta = { "collection": "noc.eventclasses", "allow_inheritance": False, "json_collection": "fm.eventclasses", "json_depends_on": ["fm.alarmclasses"] } name = fields.StringField(required=True, unique=True) uuid = fields.UUIDField(binary=True) description = fields.StringField(required=False) # Event processing action: # D - Drop # L - Log as processed, do not move to archive # A - Log as processed, move to archive action = fields.StringField(required=True, choices=[("D", "Drop"), ("L", "Log"), ("A", "Log & Archive")]) vars = fields.ListField(fields.EmbeddedDocumentField(EventClassVar)) # Text messages subject_template = fields.StringField() body_template = fields.StringField() symptoms = fields.StringField() probable_causes = fields.StringField() recommended_actions = fields.StringField() disposition = fields.ListField( fields.EmbeddedDocumentField(EventDispositionRule)) repeat_suppression = fields.ListField( fields.EmbeddedDocumentField(EventSuppressionRule)) # True if event processing is regulated by # Interface Profile.link_events setting link_event = fields.BooleanField(default=False) # handlers = fields.ListField(fields.StringField()) # Plugin settings plugins = fields.ListField(fields.EmbeddedDocumentField(EventPlugin)) # category = fields.ObjectIdField() def __unicode__(self): return self.name def save(self, *args, **kwargs): c_name = " | ".join(self.name.split(" | ")[:-1]) c = EventClassCategory.objects.filter(name=c_name).first() if not c: c = EventClassCategory(name=c_name) c.save() self.category = c.id super(EventClass, self).save(*args, **kwargs) @property def display_action(self): return {"D": "Drop", "L": "Log", "A": "Log and Archive"}[self.action] @property def conditional_pyrule_name(self): return ("fm_dc_" + rulename_quote(self.name)).lower() def to_json(self): c = self r = ["{"] r += [" \"name\": \"%s\"," % q(c.name)] r += [" \"$collection\": \"%s\"," % self._meta["json_collection"]] r += [" \"uuid\": \"%s\"," % c.uuid] if c.description: r += [" \"description\": \"%s\"," % q(c.description)] r += [" \"action\": \"%s\"," % q(c.action)] # vars vars = [] for v in c.vars: vd = [" {"] vd += [" \"name\": \"%s\"," % q(v.name)] vd += [" \"description\": \"%s\"," % q(v.description)] vd += [" \"type\": \"%s\"," % q(v.type)] vd += [" \"required\": %s" % q(v.required)] vd += [" }"] vars += ["\n".join(vd)] r += [" \"vars\": ["] r += [",\n".join(vars)] r += [" ],"] if self.link_event: r += [" \"link_event\": true,"] # Handlers if self.handlers: hh = [" \"%s\"" % h for h in self.handlers] r += [" \"handlers\": ["] r += [",\n\n".join(hh)] r += [" ],"] # Text r += [" \"subject_template\": \"%s\"," % q(c.subject_template)] r += [" \"body_template\": \"%s\"," % q(c.body_template)] r += [" \"symptoms\": \"%s\"," % q(c.symptoms)] r += [" \"probable_causes\": \"%s\"," % q(c.probable_causes)] r += [ " \"recommended_actions\": \"%s\"," % q(c.recommended_actions) ] # Disposition rules if c.disposition: r += [" \"disposition\": ["] l = [] for d in c.disposition: ll = [" {"] lll = [" \"name\": \"%s\"" % q(d.name)] lll += [" \"condition\": \"%s\"" % q(d.condition)] lll += [" \"action\": \"%s\"" % q(d.action)] if d.alarm_class: lll += [ " \"alarm_class__name\": \"%s\"" % q(d.alarm_class.name) ] if d.managed_object: lll += [ " \"managed_object\": \"%s\"" % q(d.managed_object) ] ll += [",\n".join(lll)] ll += [" }"] l += ["\n".join(ll)] r += [",\n".join(l)] r += [" ]"] # if c.repeat_suppression: if not r[-1].endswith(","): r[-1] += "," r += [" \"repeat_suppression\": ["] l = [] for rs in c.repeat_suppression: ll = [" {"] lll = [" \"name\": \"%s\"," % q(rs.name)] lll += [" \"condition\": \"%s\"," % q(rs.condition)] lll += [ " \"event_class__name\": \"%s\"," % q(rs.event_class.name) ] lll += [" \"match_condition\": {"] llll = [] for rsc in rs.match_condition: llll += [ " \"%s\": \"%s\"" % (q(rsc), q(rs.match_condition[rsc])) ] lll += [",\n".join(llll) + "\n },"] lll += [" \"window\": %d," % rs.window] lll += [ " \"suppress\": %s" % ("true" if rs.suppress else "false") ] ll += ["\n".join(lll)] ll += [" }"] l += ["\n".join(ll)] r += [",\n".join(l)] r += [" ]"] # Plugins if self.plugins: if not r[-1].endswith(","): r[-1] += "," plugins = [] for p in self.plugins: pd = [" {"] pd += [" \"name\": \"%s\"" % p.name] if p.config: pd[-1] += "," pc = [] for v in p.config: pc += [ " \"%s\": \"%s\"" % (v, p.config.vars[v]) ] pd += [" \"config\": {"] pd += [",\n".join(pc)] pd += [" }"] pd += [" }"] plugins += ["\n".join(pd)] r += [" \"plugins\": ["] r += [",\n".join(plugins)] r += [" ]"] # Close if r[-1].endswith(","): r[-1] = r[-1][:-1] r += ["}", ""] return "\n".join(r) def get_json_path(self): p = [quote_safe_path(n.strip()) for n in self.name.split("|")] return os.path.join(*p) + ".json"
class AlarmClass(nosql.Document): """ Alarm class """ meta = { "collection": "noc.alarmclasses", "strict": False, "auto_create_index": False, "json_collection": "fm.alarmclasses", "json_depends_on": [ "fm.alarmseverities" ], } name = fields.StringField(required=True, unique=True) uuid = fields.UUIDField(binary=True) description = fields.StringField(required=False) # Create or not create separate Alarm # if is_unique is True and there is active alarm # Do not create separate alarm if is_unique set is_unique = fields.BooleanField(default=False) # List of var names to be used as discriminator key discriminator = fields.ListField(nosql.StringField()) # Can alarm status be cleared by user user_clearable = fields.BooleanField(default=True) # Default alarm severity default_severity = nosql.PlainReferenceField(AlarmSeverity) # datasources = fields.ListField(fields.EmbeddedDocumentField(DataSource)) vars = fields.ListField(fields.EmbeddedDocumentField(AlarmClassVar)) # Text messages subject_template = fields.StringField() body_template = fields.StringField() symptoms = fields.StringField() probable_causes = fields.StringField() recommended_actions = fields.StringField() # Flap detection flap_condition = fields.StringField( required=False, choices=[("none", "none"), ("count", "count")], default="none") flap_window = fields.IntField(required=False, default=0) flap_threshold = fields.FloatField(required=False, default=0) # RCA root_cause = fields.ListField( fields.EmbeddedDocumentField(AlarmRootCauseCondition)) topology_rca = fields.BooleanField(default=False) # List of handlers to be called on alarm raising handlers = fields.ListField(fields.StringField()) # List of handlers to be called on alarm clear clear_handlers = fields.ListField(fields.StringField()) # Plugin settings plugins = fields.ListField(fields.EmbeddedDocumentField(AlarmPlugin)) # Time in seconds to delay alarm risen notification notification_delay = fields.IntField(required=False) # Control time to reopen alarm instead of creating new control_time0 = fields.IntField(required=False) # Control time to reopen alarm after 1 reopen control_time1 = fields.IntField(required=False) # Control time to reopen alarm after >1 reopen control_timeN = fields.IntField(required=False) # Consequence recover time # Root cause will be detached if consequence alarm # will not clear itself in *recover_time* recover_time = fields.IntField(required=False, default=300) # bi_id = fields.LongField(unique=True) # category = nosql.ObjectIdField() _id_cache = cachetools.TTLCache(maxsize=1000, ttl=60) _bi_id_cache = cachetools.TTLCache(maxsize=1000, ttl=60) _name_cache = cachetools.TTLCache(maxsize=1000, ttl=60) _handlers_cache = {} _clear_handlers_cache = {} def __unicode__(self): return self.name @classmethod @cachetools.cachedmethod(operator.attrgetter("_id_cache"), lock=lambda _: id_lock) def get_by_id(cls, id): return AlarmClass.objects.filter(id=id).first() @classmethod @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"), lock=lambda _: id_lock) def get_by_bi_id(cls, id): return AlarmClass.objects.filter(bi_id=id).first() @classmethod @cachetools.cachedmethod(operator.attrgetter("_name_cache"), lock=lambda _: id_lock) def get_by_name(cls, name): return AlarmClass.objects.filter(name=name).first() def get_handlers(self): @cachetools.cached(self._handlers_cache, key=lambda x: x.id, lock=handlers_lock) def _get_handlers(alarm_class): handlers = [] for hh in alarm_class.handlers: try: h = get_handler(hh) except ImportError: h = None if h: handlers += [h] return handlers return _get_handlers(self) def get_clear_handlers(self): @cachetools.cached(self._clear_handlers_cache, key=lambda x: x.id, lock=handlers_lock) def _get_handlers(alarm_class): handlers = [] for hh in alarm_class.clear_handlers: try: h = get_handler(hh) except ImportError: h = None if h: handlers += [h] return handlers return _get_handlers(self) def save(self, *args, **kwargs): c_name = " | ".join(self.name.split(" | ")[:-1]) c = AlarmClassCategory.objects.filter(name=c_name).first() if not c: c = AlarmClassCategory(name=c_name) c.save() self.category = c.id super(AlarmClass, self).save(*args, **kwargs) def get_discriminator(self, vars): """ Calculate discriminator hash :param vars: Dict of vars :returns: Discriminator hash """ if vars: ds = sorted(str(vars[n]) for n in self.discriminator) return hashlib.sha1("\x00".join(ds)).hexdigest() else: return hashlib.sha1("").hexdigest() def to_json(self): c = self r = ["{"] r += [" \"name\": \"%s\"," % q(c.name)] r += [" \"$collection\": \"%s\"," % self._meta["json_collection"]] r += [" \"uuid\": \"%s\"," % c.uuid] if c.description: r += [" \"desciption\": \"%s\"," % q(c.description)] r += [" \"is_unique\": %s," % q(c.is_unique)] if c.is_unique and c.discriminator: r += [" \"discriminator\": [%s]," % ", ".join(["\"%s\"" % q(d) for d in c.discriminator])] r += [" \"user_clearable\": %s," % q(c.user_clearable)] r += [" \"default_severity__name\": \"%s\"," % q(c.default_severity.name)] # datasources if c.datasources: r += [" \"datasources\": ["] jds = [] for ds in c.datasources: x = [] x += [" \"name\": \"%s\"" % q(ds.name)] x += [" \"datasource\": \"%s\"" % q(ds.datasource)] ss = [] for k in sorted(ds.search): ss += [" \"%s\": \"%s\"" % (q(k), q(ds.search[k]))] x += [" \"search\": {\n%s\n }" % (",\n".join(ss))] jds += [" {\n%s\n }" % ",\n".join(x)] r += [",\n\n".join(jds)] r += [" ],"] # vars vars = [] for v in c.vars: vd = [" {"] vd += [" \"name\": \"%s\"," % q(v.name)] vd += [" \"description\": \"%s\"" % q(v.description)] if v.default: vd[-1] += "," vd += [" \"default\": \"%s\"" % q(v.default)] vd += [" }"] vars += ["\n".join(vd)] r += [" \"vars\": ["] r += [",\n".join(vars)] r += [" ],"] # Handlers if self.handlers: hh = [" \"%s\"" % h for h in self.handlers] r += [" \"handlers\": ["] r += [",\n\n".join(hh)] r += [" ],"] if self.clear_handlers: hh = [" \"%s\"" % h for h in self.clear_handlers] r += [" \"clear_handlers\": ["] r += [",\n\n".join(hh)] r += [" ],"] # Text r += [" \"subject_template\": \"%s\"," % q(c.subject_template)] r += [" \"body_template\": \"%s\"," % q(c.body_template)] r += [" \"symptoms\": \"%s\"," % q(c.symptoms if c.symptoms else "")] r += [" \"probable_causes\": \"%s\"," % q(c.probable_causes if c.probable_causes else "")] r += [" \"recommended_actions\": \"%s\"," % q(c.recommended_actions if c.recommended_actions else "")] # Root cause if self.root_cause: rc = [] for rr in self.root_cause: rcd = [" {"] rcd += [" \"name\": \"%s\"," % rr.name] rcd += [" \"root__name\": \"%s\"," % rr.root.name] rcd += [" \"window\": %d," % rr.window] if rr.condition: rcd += [" \"condition\": \"%s\"," % rr.condition] rcd += [" \"match_condition\": {"] mcv = [] for v in rr.match_condition: mcv += [" \"%s\": \"%s\"" % (v, rr.match_condition[v])] rcd += [",\n".join(mcv)] rcd += [" }"] rcd += [" }"] rc += ["\n".join(rcd)] if r[-1][-1] != ",": r[-1] += "," r += [" \"root_cause\": ["] r += [",\n".join(rc)] r += [" ]"] if self.topology_rca: if r[-1][-1] != ",": r[-1] += "," r += [" \"topology_rca\": true"] # Plugins if self.plugins: if r[-1][-1] != ",": r[-1] += "," plugins = [] for p in self.plugins: pd = [" {"] pd += [" \"name\": \"%s\"" % p.name] if p.config: pd[-1] += "," pc = [] for v in p.config: pc += [" \"%s\": \"%s\"" % (v, p.config.vars[v])] pd += [" \"config\": {"] pd += [",\n".join(pc)] pd += [" }"] pd += [" }"] plugins += ["\n".join(pd)] r += [" \"plugins\": ["] r += [",\n".join(plugins)] r += [" ]"] if self.notification_delay: if r[-1][-1] != ",": r[-1] += "," r += [" \"notification_delay\": %d" % self.notification_delay] if self.control_time0: if r[-1][-1] != ",": r[-1] += "," r += [" \"control_time0\": %d" % self.control_time0] if self.control_time1: r[-1] += "," r += [" \"control_time1\": %d" % self.control_time1] if self.control_timeN: r[-1] += "," r += [" \"control_timeN\": %d" % self.control_timeN] if self.recover_time: if r[-1][-1] != ",": r[-1] += "," r += [" \"recover_time\": %d" % self.recover_time] # Close if r[-1].endswith(","): r[-1] = r[-1][:-1] r += ["}", ""] return "\n".join(r) def get_json_path(self): p = [quote_safe_path(n.strip()) for n in self.name.split("|")] return os.path.join(*p) + ".json" @property def config(self): if not hasattr(self, "_config"): self._config = AlarmClassConfig.objects.filter(alarm_class=self.id).first() return self._config def get_notification_delay(self): if self.config: return self.config.notification_delay or None else: return self.notification_delay or None def get_control_time(self, reopens): if reopens == 0: if self.config: return self.config.control_time0 or None else: return self.control_time0 or None elif reopens == 1: if self.config: return self.config.control_time1 or None else: return self.control_time1 or None else: if self.config: return self.config.control_timeN or None else: return self.control_timeN or None
class InvoiceRevision(EmbeddedDocument): """ An :class:`~invoicing.models.InvoiceRevision` object is the state of the invoice at a specified time. When an invoice is updated, a revision is automatically created. """ TAXES_APPLICATION = TAXES_APPLICATION revision = fields.UUIDField(required=True, binary=True) issuer = fields.ReferenceField("VosaeUser") issue_date = fields.DateTimeField(required=True) sender = fields.StringField(max_length=128) sender_organization = fields.StringField(max_length=128) sender_address = fields.EmbeddedDocumentField("Address") contact = NotPrivateReferenceField("Contact") organization = NotPrivateReferenceField("Organization") billing_address = fields.EmbeddedDocumentField("Address") delivery_address = fields.EmbeddedDocumentField("Address") quotation_date = DateField() quotation_validity = DateField() purchase_order_date = DateField() invoicing_date = DateField() due_date = DateField() credit_note_emission_date = DateField() custom_payment_conditions = fields.StringField(max_length=256) customer_reference = fields.StringField(max_length=128) currency = fields.EmbeddedDocumentField("SnapshotCurrency", required=True) taxes_application = fields.StringField(required=True, choices=TAXES_APPLICATION, default="EXCLUSIVE") line_items = fields.ListField(fields.EmbeddedDocumentField("InvoiceItem")) pdf = LocalizedMapField(fields.ReferenceField("VosaeFile")) meta = { # Vosae specific "vosae_mandatory_permissions": ("invoicing_access", ), } def __unicode__(self): return unicode(self.revision) def __init__(self, *args, **kwargs): super(InvoiceRevision, self).__init__(*args, **kwargs) if not self.revision: self.revision = unicode(uuid.uuid4()) def validate(self, value, **kwargs): errors = {} try: super(InvoiceRevision, self).validate(value, **kwargs) except ValidationError as e: errors = e.errors if not self.contact and not self.organization: errors['contact'] = ValidationError( 'Either contact or organization is required', field_name='contact') errors['organization'] = ValidationError( 'Either contact or organization is required', field_name='organization') if errors: raise ValidationError('ValidationError', errors=errors) def duplicate(self, issuer=None): """ Return the duplicate of the current revision with generated revision unique parameters. """ duplicate = copy.deepcopy(self) duplicate.revision = unicode(uuid.uuid4()) duplicate.issue_date = datetime_now() if issuer: duplicate.issuer = issuer return duplicate def get_customer_display(self, only_company=False): """ Returns the customer's name according to this scheme: - Organization (Contact), *if either organization and contact are supplied* - Organization, *if only organization is supplied* - Contact, *if only contact is supplied* - None, *if neither organization nor contact are supplied* :param only_company: forces to only display the company in the first case """ if self.organization and self.organization.corporate_name and self.contact and self.contact.get_full_name( ): if only_company: return self.organization.corporate_name else: return "%s (%s)" % (self.organization.corporate_name, self.contact.get_full_name()) if self.organization and self.organization.corporate_name: return self.organization.corporate_name if self.contact and self.contact.get_full_name(): return self.contact.get_full_name() return None
class Post(Document): meta = { "collection": "posts", } objects = QuerySetManager() @classmethod def dead_posts(cls) -> QuerySet[Post]: return cls.objects().filter(is_hidden=True) _id = fields.StringField(name="_id", primary_key=True) organization = fields.StringField(help_text="org where the post belongs") author = fields.StringField(required=False, help_text="Author of the post") title = fields.StringField() body = fields.StringField(help_text="contents of post") created_at = fields.DateTimeField(default=datetime.datetime.utcnow) kind = fields.StringField(choices=["new", "archive"]) location = fields.StringField(required=False, default="home-page") is_hidden = fields.BooleanField(default=False) spam_id = fields.ObjectIdField(required=False, null=True) spam_flagged = fields.BooleanField(null=True) spam_date = fields.DateTimeField(required=False, null=True) spam_user_id = fields.UUIDField(required=False, null=True) comment_count = fields.IntField(required=False) errors = fields.ListField( field=fields.DictField(field=fields.StringField()), default=[], help_text="some sort of errors", ) results = fields.DictField() attachments = fields.EmbeddedDocumentListField( PostAttachment, required=False, help_text="random attachments") main_attachment = fields.EmbeddedDocumentField( PostAttachment, required=True, help_text="random attachments") tags = fields.MapField( required=False, field=fields.StringField(required=True), help_text=("Map tag names to descriptions"), ) font = fields.EnumField(Font) font_required = fields.EnumField(Font, required=True) font_default = fields.EnumField(Font, default=Font.Helvetica) font_required_default = fields.EnumField(Font, required=True, default=Font.Helvetica) url = fields.URLField() url_required = fields.URLField(required=True) url_default = fields.URLField(default="https://example.org") url_required_default = fields.URLField(required=True, default="https://example.org") url_with_extra_args = fields.URLField(verify_exists=False, url_regex=None, schemas=["ftp://"], regex="bar") geo = fields.GeoPointField() geo_required = fields.GeoPointField(required=True) geo_default = fields.GeoPointField(default=(1, 2)) geo_required_default = fields.GeoPointField(required=True, default=(1, 2)) def set_hidden(self, hidden: bool) -> None: self.hidden = hidden self.save() def get_tag_names(self) -> KeysView[str]: return self.tags.keys()