class Job(Document): name = StringField() description = StringField() date = DateTimeField() maximum_samples = IntField() maximum_iteration = IntField() timeout = IntField() enabled = BooleanField() archived = BooleanField() mutation_engine = StringField() fuzzer = StringField() samples = BinaryField() firmware_root = BinaryField() fuzzing_target = BinaryField()
class Metric(Document): meta = { "collection": "noc.ts.metrics", "indexes": ["parent", ("parent", "local")] } name = StringField(unique=True) hash = BinaryField(unique=True) parent = BinaryField() # Name within parent local = StringField() has_children = BooleanField() def __unicode__(self): return self.name
class User(Document): # http://docs.mongoengine.org/guide/defining-documents.html#fields username = StringField(unique=True, required=True) email = EmailField(unique=True) password = BinaryField(required=True) age = IntField() bio = StringField(max_length=100) # Will accepet strings # categoreis = ListField(StringField()) categories = ListField() admin = BooleanField(default=False) registered = BooleanField(default=False) date_created = DateTimeField(default=datetime.utcnow) @classmethod def json(self): user_dict = { "username": self.username, "email": self.email, "age": self.age, "bio": self.bio, "categories": self.categories, "admin": self.admin, "registered": self.registered } return json.dumps(user_dict) # Create meta information about our document # Check out docs for more meta = { "indexes": ["username", "email"], # ordering in descending fashion "ordering": ["-date_created"] }
class Contig(Sequence): meta = { 'allow_inheritance': True, 'index_cls': False, 'collection': "contig_collection", 'indexes': [ 'organism', { "fields": ["organism", "features.locus_tag"] }, { "fields": ["features.identifier"] } ] } organism = StringField() organelle = StringField(required=False) seq_collection_id = ReferenceField(SeqCollection) bigseq = BinaryField() def __init__(self, **kwargs): ''' ''' super(Sequence, self).__init__(**kwargs) self._seq_init() self.size.unit = "bp" def gene(self, name): # gene_filter = lambda x : all([ x[key] == value for key,value in params.items() ]) for f in self.features: if f.has_alias(name): return f raise NotFoundException("Gene not found: " + name)
class Crash(Document): job_id = ObjectIdField() crash_signal = IntField() exploitability = StringField() date = DateTimeField(default=datetime.datetime.now()) crash_hash = StringField() verified = BooleanField() additional = StringField() test_case = BinaryField() iteration = IntField()
class AlarmDiagnostic(Document): meta = { "collection": "noc.alarmdiagnostic", "strict": False, "auto_create_index": False, "indexes": ["alarm"], } alarm = ObjectIdField() timestamp = DateTimeField(default=datetime.datetime.now) expires = DateTimeField() state = StringField( choices=[("R", "On Raise"), ("C", "On Clear"), ("P", "On Periodic")]) data = BinaryField() TTL = datetime.timedelta(days=14) @classmethod def save_diagnostics(cls, alarm, diag, state): data = zlib.compress("\n\n".join(str(d) for d in diag), 9) if state == "C": expires = datetime.datetime.now() + cls.TTL else: expires = None AlarmDiagnostic(alarm=alarm.id, state=state, data=bson.Binary(data), expires=expires).save() @classmethod def get_diagnostics(cls, alarm): if hasattr(alarm, "id"): alarm = alarm.id r = [] for d in AlarmDiagnostic.objects.filter( alarm=alarm).order_by("timestamp"): r += [{ "timestamp": d.timestamp, "state": d.state, "data": zlib.decompress(d.data) }] return r @classmethod def clear_diagnostics(cls, alarm): if hasattr(alarm, "id"): alarm = alarm.id AlarmDiagnostic._get_collection().update_many( {"alarm": alarm}, {"$set": { "expires": datetime.datetime.now() + cls.TTL }})
class WebAuthn(db.Document, WebAuthnMixin): credential_id = BinaryField(primary_key=True, max_bytes=1024, required=True) public_key = BinaryField(required=True) sign_count = IntField(default=0) transports = ListField(required=False) # a JSON string as returned from registration extensions = StringField(max_length=255) lastuse_datetime = DateTimeField(required=True) # name is provided by user - we make sure it is unique per user name = StringField(max_length=64, required=True) usage = StringField(max_length=64, required=True) # we need to be able to look up a user from a credential_id user = ReferenceField("User") # user_id = ObjectIdField(required=True) meta = {"db_alias": db_name} def get_user_mapping(self) -> t.Dict[str, str]: """ Return the mapping from webauthn back to User """ return dict(id=self.user.id)
class User(Document): id = StringField(primary_key=True) username = StringField(unique=True) password = BinaryField() # Strange init requried to make mongoengine and flask-jwt play nice def __init__(self, id=None, username=None, password=None, *args, **kwargs): super(User, self).__init__(*args, **kwargs) self.id = id self.username = username self.password = password def __str__(self): return "User(id='%s')" % self.id
class Avatar(Document): meta = {"collection": "avatars", "strict": False, "auto_create_index": False} user_id = StringField(primary_key=True) content_type = IntField(validation=validate_content_type) data = BinaryField(max_bytes=config.ui.max_avatar_size) def __str__(self) -> str: return self.user_id def get_content_type(self) -> str: """ Return content-type string :return: """ return ContentType(self.content_type).content_type
class admin(Document): name = StringField(unique=True, required=True) username = StringField(unique=True, required=True) password = BinaryField(unique=True, required=True) def json(self): userdict = { "name": self.name, "username": self.username, "password": self.password } return json.dumps(userdict) meta = { "indexes": ["name", "username"] }
class User(Document): firstName = StringField(required=True) lastName = StringField(required=True, default='') email = EmailField(required=True) gender = StringField( required=True, default='Male') # choices=['Male', 'Female', 'Other']) password = BinaryField() resetPasswordToken = StringField() resetPasswordExpires = DateTimeField() role = StringField(default='User') # choices=['User', 'Admin']) dateOfBirth = StringField(required=True) # YYYY/MM/DD Format age = IntField(required=True, default=0) weight = IntField(required=True, default=0) weightUnit = StringField(required=True, default='kg') # choices=['kg', 'lb']) height = DecimalField(required=True, default=0, precision=1) heightUnit = StringField(required=True, default='cm') # choices=['cm', 'm', 'ft']) foodPreference = StringField( required=True, default='Vegetarian' ) # choices=['Vegan', 'Vegetarian', 'Non-Vegetarian']) timeZone = StringField(default='0') # Timezone Offset Value bmi = IntField(default=0) medicalCondition = StringField() targetWeight = IntField(default=0) targetDate = StringField(default='') # YYYY/MM/DD format targetCalories = IntField(default=0) accountCreationDate = DateTimeField(default=datetime.utcnow()) userPhoto = StringField(default='') messages = ListField(EmbeddedDocumentField(Messages)) mealAssigned = ListField(ReferenceField(Meal)) mealExpiry = DateTimeField() unreadCount = IntField(default=0) @staticmethod def pre_save_func(sender, document): document['password'] = bcrypt.generate_password_hash( document['password']) dob = parser.parse(document['dateOfBirth']) today = datetime.today() age = relativedelta.relativedelta(today, dob) document['age'] = age.years
class DataSourceCache(Document): meta = { "collection": "datasource_cache", "strict": False, "auto_create_index": False, "indexes": [{ "fields": ["expires"], "expireAfterSeconds": 0 }], } name = StringField(primary_key=True) data = BinaryField() expires = DateTimeField() chunks = IntField(min_value=0, max_value=5) version = IntField() # Next chunk name next_name = StringField() @classmethod def get_data(cls, name): """ Load cached data :param name: :return: """ data = [] coll = DataSourceCache._get_collection() while name: d = coll.find_one({"_id": name}) if not d: # Not found or broken chain return None if d["version"] != CURRENT_VERSION: # Version bump, rebuild cache return None data += [d["data"]] # Proceed to next chunk when necessary name = d.get("next_name", None) # Finally, decode result # avoid string catenation whenever possible return cls.decode(b"".join(data) if len(data) > 1 else data[0]) @classmethod def set_data(cls, name, data, ttl): """ Write data to cache :param name: :param data: :param ttl: :return: """ data = cls.encode(data) coll = DataSourceCache._get_collection() n_chunk = 0 fmt_chunk_name = "%s.%%d" % name expires = (datetime.datetime.now() + datetime.timedelta(seconds=ttl), ) while data: # Split chunk and rest of data chunk, data = data[:MAX_DATA_SIZE], data[MAX_DATA_SIZE:] # Generate next chunk name when data left if data: n_chunk += 1 next_name = fmt_chunk_name % n_chunk else: next_name = None logger.info("Writing chunk %s", name) # Update chunk coll.update_one( {"_id": name}, { "$set": { "data": bson.Binary(chunk), "version": CURRENT_VERSION, "expires": expires, "next_name": next_name, }, "$setOnInsert": { "name": name }, }, upsert=True, ) # Name for next chunk name = next_name @classmethod def encode(cls, data): """ v1 encoding: cPickle + zlib.compress :param data: :return: """ return bz2.compress(smart_bytes(data), 9) @classmethod def decode(cls, data): """ v2 decoding: bz2 :param data: :return: """ return bz2.decompress(data)
class Dashboard(Document): meta = { "collection": "noc.dashboards", "strict": False, "auto_create_index": False, "indexes": ["owner", "tags"], "json_collection": "bi.dashboards", "json_unique_fields": ["uuid"], } title = StringField() # Username owner = ForeignKeyField(User) # description = StringField() # tags = ListField(StringField()) # Config format version format = IntField(default=1) # gzip'ed data config = BinaryField() # created = DateTimeField(default=datetime.datetime.now) changed = DateTimeField(default=datetime.datetime.now) # access = ListField(EmbeddedDocumentField(DashboardAccess)) # Global ID uuid = UUIDField(binary=True, unique=True) def __str__(self): return self.title or str(self.uuid) @property def name(self): # For collection sync return "%s: %s" % ( self.owner.username if self.owner else "noc", self.title or str(self.uuid), ) def get_user_access(self, user): # Direct match as owner if user == self.owner or user.is_superuser: return DAL_ADMIN level = DAL_NONE groups = user.groups.all() for ar in self.access: if ar.user and ar.user == user: level = max(level, ar.level) if ar.group and ar.group in groups: level = max(level, ar.level) if level == DAL_ADMIN: return level return level def save( self, force_insert=False, validate=True, clean=True, write_concern=None, cascade=None, cascade_kwargs=None, _refs=None, save_condition=None, **kwargs, ): # Split DashBoard Acces to {User, level}, {Group, level} # self.update(add_to_set__access=[parent_1, parent_2, parent_1]) if "access" in getattr(self, "_changed_fields", []): # Check unique processed = [] access = [] for da in sorted(self.access, reverse=True, key=lambda x: x.level): # Deduplicate rights # @todo changing priority (reverse order) if da.user and "u%d" % da.user.id in processed: continue elif da.group and "g%d" % da.group.id in processed: continue if da.user and da.group: # Split User and Group rights access += [ DashboardAccess(user=da.user.id, level=da.level), DashboardAccess(group=da.group.id, level=da.level), ] processed += ["u%d" % da.user.id, "g%d" % da.group.id] continue access += [da] if da.user: processed += ["u%d" % da.user.id] if da.group: processed += ["g%d" % da.group.id] self.access = access super().save( force_insert=force_insert, validate=validate, clean=clean, write_concern=write_concern, cascade=cascade, cascade_kwargs=cascade_kwargs, _refs=_refs, save_condition=save_condition, **kwargs, ) def clean_access(self, item=None): """ Clean access rights update2 = {"$push": {"access": {"$each": [{"user": i.user.id, "level": i.level} for i in items]}}} :param item: All, user, group :return: """ match = {"_id": self.id} if item == "user": update = {"$pull": {"access": {"user": {"$exists": True}}}} elif item == "group": update = {"$pull": {"access": {"group": {"$exists": True}}}} else: update = {"$pull": "access"} self._get_collection().update(match, update) def to_json(self): return to_json( { "title": self.title, "$collection": self._meta["json_collection"], "uuid": str(self.uuid), "description": self.description, "format": self.format, "config": smart_text(b85encode(self.config)), "created": self.created.isoformat(), "changed": self.changed.isoformat(), "access": [], }, order=["title", "uuid", "description", "created"], ) def get_json_path(self): return "%s.json" % self.uuid
class Dashboard(Document): meta = { "collection": "noc.dashboards", "strict": False, "auto_create_index": False, "indexes": ["owner", "tags"] } title = StringField() # Username owner = ForeignKeyField(User) # description = StringField() # tags = ListField(StringField()) # Config format version format = IntField(default=1) # gzip'ed data config = BinaryField() # created = DateTimeField(default=datetime.datetime.now) changed = DateTimeField(default=datetime.datetime.now) # access = ListField(EmbeddedDocumentField(DashboardAccess)) def __unicode__(self): return self.title def get_user_access(self, user): # Direct match as owner if user == self.owner or user.is_superuser: return DAL_ADMIN level = DAL_NONE groups = user.groups.all() for ar in self.access: if ar.user and ar.user == user: level = max(level, ar.level) if ar.group and ar.group in groups: level = max(level, ar.level) if level == DAL_ADMIN: return level return level def save(self, force_insert=False, validate=True, clean=True, write_concern=None, cascade=None, cascade_kwargs=None, _refs=None, save_condition=None, **kwargs): # Split DashBoard Acces to {User, level}, {Group, level} # self.update(add_to_set__access=[parent_1, parent_2, parent_1]) if "access" in getattr(self, '_changed_fields', []): # Check unique processed = [] access = [] for da in sorted(self.access, reverse=True): # Deduplicate rights # @todo changing priority (reverse order) if da.user and "u%d" % da.user.id in processed: continue elif da.group and "g%d" % da.group.id in processed: continue if da.user and da.group: # Split User and Group rights access += [ DashboardAccess(user=da.user.id, level=da.level), DashboardAccess(group=da.group.id, level=da.level) ] processed += ["u%d" % da.user.id, "g%d" % da.group.id] continue access += [da] if da.user: processed += ["u%d" % da.user.id] if da.group: processed += ["g%d" % da.group.id] self.access = access super(Dashboard, self).save(force_insert=force_insert, validate=validate, clean=clean, write_concern=write_concern, cascade=cascade, cascade_kwargs=cascade_kwargs, _refs=_refs, save_condition=save_condition, **kwargs) def clean_access(self, item=None): """ Clean access rights update2 = {"$push": {"access": {"$each": [{"user": i.user.id, "level": i.level} for i in items]}}} :param item: All, user, group :return: """ match = {"_id": self.id} if item == "user": update = {"$pull": {"access": {"user": {"$exists": True}}}} elif item == "group": update = {"$pull": {"access": {"group": {"$exists": True}}}} else: update = {"$pull": "access"} self._get_collection().update(match, update)
class Persons(Document): name = StringField(required=True) face_encodings = BinaryField(required=True)