class Recipe(Document): title = f.StringField(required=True, unique=True) image = f.FileField(required=False) content = f.StringField(required=True) favorite = f.BooleanField(default=False) type_recipe = f.StringField(required=True, choices=['starter', 'main', 'dessert'])
class MessageDocument(Document): text = fields.StringField(required=False) images = fields.ListField( fields.ImageField(collection_name='message_images'), required=False) document = fields.FileField(collection_name='message_documents', required=False) session = fields.ReferenceField(document_type=SessionDocument, required=True) room = fields.ReferenceField(document_type=ChatRoomDocument, required=True) seen_by = fields.ListField( fields.ReferenceField(document_type=SessionDocument), required=False) created_at = fields.DateTimeField(required=False) meta = {'collection': 'messages', 'ordering': ['-created_at']} @classmethod def set_timings(cls, sender, document, **kwargs): """ Set Created and expiration at on Save :param sender: :param document: :param kwargs: :return: """ document.created_at = datetime.utcnow()
class Heroe(gj.Document): nombre = fields.StringField(max_length=50, required=True) superpoder = fields.EmbeddedDocumentField(Superpoder, required=False) # File fields! historia = fields.FileField(required=False)
class TestGridFS(Document): template = fields.FileField() meta = { 'db_alias': 'test', 'collection': 'test_gridfs', }
class Propiedades_Horizontales(Document): NIT = fields.StringField(required=True) RAZON_SOCIAL = fields.StringField(required=True) DESCRIPCION = fields.StringField() DIRECCION = fields.EmbeddedDocumentField(DireccionField) REPRESENTANTE = fields.EmbeddedDocumentField(RepresentanteField) UBICACION = fields.EmbeddedDocumentField(UbicacionField) TIPO = fields.StringField(required=True) VALOR_PROMEDIO_ADMINISTRACION = fields.IntField() ADMINISTRADOR = fields.EmbeddedDocumentField(AdministradorField) CLASE_PROPIEDAD = fields.StringField() RESOLUCION = fields.FileField() ACTA_CONSEJO = fields.FileField() PARQUEADEROS = fields.EmbeddedDocumentField(ParqueaderoField) GRUPOS = fields.EmbeddedDocumentField(GrupoField) MODULOS = fields.ListField(fields.EmbeddedDocumentField(ModuloField)) HABILITADO = fields.BooleanField()
class Song(Entity): """Individual songs """ title = fields.StringField() album = fields.StringField() artist = fields.StringField() runtime = fields.IntField() bitrate = fields.IntField() song = fields.FileField()
class Comprobantes(Document): PROPIEDAD_ID = fields.LazyReferenceField('Propiedades_Horizontales', passthrough=False, dbref=False) UNIDAD_PRIVADA = fields.EmbeddedDocumentField(UnidadPrivadaField) PERIODO = fields.StringField(required=True) VALOR = fields.IntField() DOCUMENTO = fields.FileField() FECHA_PAGO = fields.DateTimeField(default=datetime.datetime.utcnow)
class Noticias(DynamicDocument): PROPIEDAD_ID = fields.LazyReferenceField(Propiedades_Horizontales, passthrough=False, dbref=False) TITULO = fields.StringField(required=True) MENSAJE = fields.StringField(required=True) IMAGEN = fields.FileField() TIPO = fields.StringField(required=True) FECHA_INICIO = fields.DateTimeField(default=datetime.datetime.utcnow) FECHA_FIN = fields.DateTimeField(default=datetime.datetime.utcnow) HABILITADO = fields.BooleanField()
class Employee(Document): name = fields.StringField(required=True, max_length=250) username = fields.StringField(max_length=250) email = fields.EmailField() emp_id = fields.IntField() designation = fields.EmbeddedDocumentField(Designation) file = fields.FileField() def __str__(self): return self.name
class DBImageThumbnail(Document): DBImage = fields.ObjectIdField(required=True) file = fields.FileField(required=True) width = fields.IntField(required=True) height = fields.IntField(required=True) mime_type = fields.StringField(required=True) meta = { 'max_documents': 200000, 'indexes': [{ 'fields': ['DBImage'], 'unique': True }] }
class Archivo(EmbeddedDocument): _id = fields.ObjectIdField(default=ObjectId) nombre = fields.StringField(required=True) tamaño = fields.IntField(required=True) extension = fields.StringField(required=True) fichero = fields.FileField() @property def size_to_mb(self): return round((self.tamaño / 1048576), 2) @property def id_to_str(self): return str(self._id)
class Cuenta_Cobro(Document): UNIDAD_PRIVADA = fields.EmbeddedDocumentField(UnidadPrivadaField) PERIODO = fields.StringField(required=True) CONSECUTIVO = fields.StringField(required=True) FECHA_CUENTA_COBRO = fields.DateTimeField(default=datetime.datetime.utcnow) PERIODO_COBRO = fields.EmbeddedDocumentField(PeriodoCobroField) ELEMENTOS = fields.ListField(fields.EmbeddedDocumentField(ElementoField)) INTERESES_CAUSADOS = fields.FloatField() SALDO_INTERESES = fields.FloatField() SALDO_CAPITAL = fields.FloatField() VALOR_TOTAL = fields.FloatField() PROPIEDAD_HORIZONTAL = fields.EmbeddedDocumentField(PropiedadHorizontalField) COMPROBANTE = fields.EmbeddedDocumentField(ComprobanteField) DOCUMENTO = fields.FileField() HABILITADO = fields.BooleanField()
class LargeBinary(DynamicDocument): ''' The ORM model for large binary. Since the mongodb restrict each object size exceed to 16MB. The binary beyond 16MB should be stored using the GridFS feature. This table model is for dealing with the binary larger than 16MB, using the FileField in mongoengine. The detail description concerning the GridFS can be found in - GridFS supports in mongoengine: http://docs.mongoengine.org/guide/gridfs.html - GridFS: https://docs.mongodb.org/manual/core/gridfs/ ''' parent_id = fields.ObjectIdField() variable = fields.StringField() archiver = fields.StringField() binary = fields.FileField() updated = fields.DateTimeField(default=None)
class compare(Document): time = fields.DateTimeField(default=datetime.utcnow) first = fields.FileField() second = fields.FileField()
class Minidump(mongo.Document): product = fields.StringField() # Crashed application name version = fields.StringField() # Crashed application version platform = fields.StringField() # OS name filename = fields.StringField() minidump = fields.FileField() # Google Breakpad minidump file_path = fields.StringField() stacktrace = fields.StringField() stacktrace_json = fields.DictField() date_created = fields.DateTimeField() crash_reason = fields.StringField() crash_address = fields.StringField() crash_location = fields.StringField() process_uptime = fields.IntField() crash_thread = fields.IntField() meta = {'ordering': ['-date_created'], 'queryset_class': BaseQuerySet} @property def download_link(self): return url_for('crash-reports.download_minidump', minidump_id=str(self.minidump.grid_id)) def save_minidump_file(self, minidump_file): if not os.path.isdir(DUMPS_DIR): os.makedirs(DUMPS_DIR) try: self.filename = secure_filename(minidump_file.filename) target_path = self.get_target_minidump_path() minidump_file.save(target_path) with open(target_path, 'rb') as minidump: if self.minidump: self.minidump.replace(minidump) else: self.minidump.put(minidump) self.file_path = target_path self.save() except Exception as e: current_app.logger.exception( 'Cannot save minidump file: {}'.format(e)) def get_target_minidump_path(self): return os.path.join(DUMPS_DIR, self.filename) def get_stacktrace(self): minidump_path = self.file_path try: minidump_stackwalk_output = subprocess.check_output( [Config.MINIDUMP_STACKWALK, minidump_path, SYMFILES_DIR], stderr=subprocess.DEVNULL) self.stacktrace = minidump_stackwalk_output.decode() self.save() except subprocess.CalledProcessError as e: current_app.logger.exception( 'Cannot get stacktrace: {}'.format(e)) def parse_process_uptime(self): line_start = 'Process uptime: ' stacktrace_lines = self.stacktrace.split('\n') try: process_uptime_line = list( filter(lambda line: str.startswith(line, line_start), stacktrace_lines))[0] raw_uptime = process_uptime_line.replace(line_start, '') if 'not available' not in raw_uptime.lower(): if 'seconds' in raw_uptime: uptime_seconds = int(raw_uptime.split()[0]) else: days, raw_hms = raw_uptime.split(' days ') hms = datetime.strptime(raw_hms, '%H:%M:%S.%f') uptime_seconds = timedelta( days=int(days), hours=hms.hour, minutes=hms.minute, seconds=hms.second).total_seconds() self.process_uptime = uptime_seconds self.save() except Exception as e: current_app.logger.exception( 'Cannot parse process uptime: {}'.format(e)) def parse_stacktrace(self): minidump_path = self.file_path try: stackwalker_output = subprocess.check_output( [Config.STACKWALKER, '--pretty', minidump_path, SYMFILES_DIR], stderr=subprocess.DEVNULL) self.stacktrace_json = json.loads(stackwalker_output.decode()) crash_info = self.stacktrace_json.get('crash_info') if not crash_info: current_app.logger.error( 'Cannot parse stacktrace: No crash info provided.') return self.crash_reason = crash_info.get('type').split()[0] self.crash_address = crash_info.get('address') self.crash_thread = crash_info.get('crashing_thread') crashing_thread = self.stacktrace_json.get('crashing_thread') frame = crashing_thread.get('frames')[0] module = frame.get('module') module_offset = frame.get('module_offset') if module and module_offset: self.crash_location = '{} + {}'.format(module, module_offset) else: self.crash_location = self.crash_address self.save() self.parse_process_uptime() Issue.create_or_update_issue(product=self.product, version=self.version, platform=self.platform, reason=self.crash_reason, location=self.crash_location) except (subprocess.CalledProcessError, IndexError) as e: current_app.logger.exception( 'Cannot parse stacktrace: {}'.format(e)) def create_stacktrace(self): from oopsypad.server.worker import process_minidump process_minidump.delay(str(self.id)) def remove_minidump(self): if self.file_path: if os.path.isfile(self.file_path): try: os.remove(self.file_path) except OSError as e: current_app.logger.exception( 'Cannot remove minidump: {}'.format(e)) if self.minidump: self.minidump.delete() self.save() self.delete() def get_time(self): return self.date_created.strftime('%d-%m-%Y %H:%M') @classmethod def get_by_id(cls, minidump_id): return cls.objects(id=minidump_id).first() @classmethod def create_minidump(cls, product, version, platform, minidump_file): minidump = cls(product=product, version=version, platform=platform, date_created=datetime.now()) minidump.save_minidump_file(minidump_file) minidump.create_stacktrace() return minidump @classmethod def get_last_12_months_minidumps_counts(cls, queryset): today = datetime.today().replace(day=1) counts = [] for months in last_12_months(): months_ago = today - relativedelta(months=months) one_more_months_ago = today - relativedelta(months=months - 1) months_ago_minidumps_count = queryset.filter( Q(date_created__lte=one_more_months_ago) & Q(date_created__gte=months_ago)).count() counts.append(months_ago_minidumps_count) return counts @classmethod def get_versions_per_product(cls, product): return sorted(list(set([i.version for i in cls.objects(product=product)]))) @classmethod def get_last_n_project_minidumps(cls, n, project_name): project_minidumps = cls.objects(product=project_name) return project_minidumps[:n] def __str__(self): return 'Minidump: {} {} {} {}'.format(self.product, self.version, self.platform, self.filename)
class Symfile(mongo.Document): product = fields.StringField() version = fields.StringField(required=True) platform = fields.StringField(required=True) symfile_name = fields.StringField(required=True) symfile_id = fields.StringField(required=True) symfile = fields.FileField(required=True) date_created = fields.DateTimeField() def save_symfile(self, symfile): try: self.symfile_name = secure_filename(symfile.filename) target_path = self.get_symfile_path() if not os.path.isdir(target_path): os.makedirs(target_path) symfile_path = os.path.join(target_path, self.symfile_name) symfile.save(symfile_path) with open(symfile_path, 'rb') as file: self.symfile.put(file, content_type='application/octet-stream', filename=self.symfile_name) self.save() except Exception as e: current_app.logger.exception( 'Cannot save symfile: {}'.format(e)) def get_symfile_path(self): if str(self.platform).lower() == "windows": product_name = "%s.pdb" % self.product else: product_name = self.product return os.path.join(SYMFILES_DIR, product_name, self.symfile_id) @classmethod def create_symfile(cls, product, version, platform, symfile_id, file): symfile = cls.objects(symfile_id=symfile_id).first() if not symfile: symfile = cls(product=product, version=version, platform=platform, symfile_id=symfile_id, date_created=datetime.now()) symfile.save_symfile(file) return symfile def save(self, *args, **kwargs): if not self.date_created: self.date_created = datetime.now() return super().save(**kwargs) def __str__(self): return 'Symfile: {} {} {} {}'.format(self.product, self.version, self.platform, self.symfile_id)
class Test(Document): file = fields.FileField()
class dataB(Document): meta = {"collection": "Celebs"} celeb_name = fields.StringField(required=True) description = fields.StringField() upload_image = fields.FileField()
class ModelWithFileField(Document): file_field = fields.FileField() meta = {"queryset_class": FileFieldHandlingQuerySet}
class Image(EmbeddedDocument): image = fields.FileField() name = fields.StringField(max_length=100)
class MessageStore(BaseDocument): """ TODO: is_spam, is_virus, ... en champs numérique: 0 ou 1 pour stats TODO: is_ham, is_spam, ... 0-prod\rs-admin\archives\rs-admin\rs_admin\mail_parser_utils.py TODO: ('X-Originating-IP', '[88.175.183.38]') """ completed = fields.IntField(default=0) group_name = fields.StringField(required=True, max_length=80, default=constants.GROUP_DEFAULT, verbose_name=gettext(u"Group")) domain_name = fields.StringField(max_length=63, verbose_name=gettext(u"Domain")) policy_uid = fields.StringField() quarantine_id = fields.StringField() is_in = fields.IntField(default=1, verbose_name=gettext(u"Incoming message"), help_text=gettext(u"Incoming or Outgoing Message")) store_key = fields.StringField(required=True, unique=True) sent = fields.DateTimeField(verbose_name=gettext(u"Sent Date")) #sent_origin = fields.StringField() received = fields.DateTimeField(default=utils.timestamp, verbose_name=gettext(u"Received Date")) headers = fields.DictField() message = fields.FileField() #FIXME: ne pas utiliser le nom size !!!! size = fields.LongField(default=0, verbose_name=gettext(u"Size")) subject = fields.StringField(verbose_name=gettext(u"Subject")) message_id = fields.StringField() sender = fields.StringField(verbose_name=gettext(u"Sender")) #Pas de EmailField rcpt = fields.ListField(fields.StringField(), default=[], verbose_name=gettext(u"Recipients(s)")) rcpt_count = fields.IntField(default=0, verbose_name=gettext(u"Rcpts"), help_text=gettext(u"Number of recipients")) #mode proxy ou autre pour rcpt refusé à la livraison - refus partiel seulement rcpt_refused = fields.DictField(default={}) #IP du client original par xforward client_address = fields.StringField(verbose_name=gettext(u"IP Address")) country = fields.StringField( verbose_name=gettext(u"Country"), help_text=gettext(u"Country based on ip address of sender smtp")) #receiveds_header = fields.SortedListField(fields.StringField(), default=[]) is_bounce = fields.IntField(default=0, verbose_name=gettext(u"Bounce ?")) is_spam = fields.IntField(default=0, verbose_name=gettext(u"Spam ?")) is_virus = fields.IntField(default=0, verbose_name=gettext(u"Infected ?")) is_banned = fields.IntField(default=0, verbose_name=gettext(u"Banned ?")) is_unchecked = fields.IntField(default=0, verbose_name=gettext(u"Checked ?")) xforward = fields.DictField() #IP du serveur SMTP postfix server = fields.StringField() queue = fields.IntField(choices=constants.MESSAGE_QUEUE_CHOICES, default=constants.MESSAGE_QUEUE_INCOMING) files_count = fields.IntField( default=0, verbose_name=gettext(u"Files"), help_text=gettext(u"Number of attachments in message")) files = fields.EmbeddedDocumentListField(MessageAttachment) events = fields.EmbeddedDocumentListField(MessageEvent) tags = fields.ListField(fields.StringField(), default=[]) parsing_errors = fields.ListField(fields.StringField(), default=[]) errors_count = fields.IntField(default=0) turing = fields.EmbeddedDocumentField(MessageTuring, required=False) metric = fields.IntField(default=0) def get_filter_result(self): if self.is_virus: return "VIRUS" elif self.is_spam: return "SPAM" elif self.is_banned: return "BANNED" elif self.is_unchecked: return "UNCHECKED" else: return "CLEAN" filter_result = property(fget=get_filter_result) def _complete(self): """ Complete parsing message """ values = {'completed': 1} other_fields = message_complete(self.store_key, self.parse_message(pure_string=True), sender=self.sender) values.update(other_fields) values.update( identify(sender=self.sender, client_address=self.client_address, rcpt=self.rcpt)) fields = MessageStore._fields.keys() for key, value in values.iteritems(): if key in fields: setattr(self, key, value) #TODO: validate / clean return self.save( force_insert=False ) #, validate, clean, write_concern, cascade, cascade_kwargs, _refs, save_condition) def parse_message(self, pure_string=False): msg_string = utils.uncompress(self.message.read()) try: if pure_string: return msg_string return from_string(msg_string) except Exception, err: logger.error(str(err)) return recover(msg_string)
class Department(Document): name = fields.StringField(required=True, max_length=250) file = fields.FileField() def __str__(self): return self.name
class Image(Document): name = fields.StringField(required=True) image_format = fields.StringField(required=True) image = fields.FileField(required=True)
class analyze(Document): time = fields.DateTimeField(default=datetime.utcnow) photo = fields.FileField()
class Tenant(Document, AsyncTTLUploadsMixin): """ The :class:`~core.models.Tenant` represent the tenant organization in the Vosae's SaaS environnement. """ from contacts.models import Address RELATED_WITH_TTL = ['svg_logo', 'img_logo', 'terms'] slug = SlugField(required=True, max_length=64, unique=True) name = fields.StringField(required=True, max_length=128) postal_address = fields.EmbeddedDocumentField("Address", required=True) billing_address = fields.EmbeddedDocumentField("Address", required=True) email = fields.EmailField(required=True, max_length=256) phone = fields.StringField(max_length=16) fax = fields.StringField(max_length=16) svg_logo = fields.ReferenceField("VosaeFile") img_logo = fields.ReferenceField("VosaeFile") logo_cache = fields.FileField() terms = fields.ReferenceField("VosaeFile") registration_info = RegistrationInfoField(required=True) report_settings = fields.EmbeddedDocumentField("ReportSettings", required=True, default=lambda: ReportSettings()) tenant_settings = fields.ReferenceField("TenantSettings", required=True, default=lambda: TenantSettings()) meta = { "indexes": ["slug"] } def __unicode__(self): return self.name @classmethod def post_init(self, sender, document, **kwargs): if not document.id: document.tenant_settings.tenant = document @classmethod def pre_save(self, sender, document, **kwargs): """ If the slug does not exists (eg. on creation), it is generated. """ if not document.id: # TenantSettings and Tenant are cross referenced # We need an id to reference Tenant from TenantSettings document.id = ObjectId() document.tenant_settings.save() if not document.slug: document.slug = generate_unique_slug(document, document._fields.get('slug'), document.name) # Manage logos document.manage_logos() @classmethod def pre_save_post_validation(self, sender, document, **kwargs): """ The :class:`~core.models.Tenant` is associated to a Django group. If the group doesn't exist, it is created. """ Group.objects.get_or_create(name=document.slug) @classmethod def post_save(self, sender, document, created, **kwargs): """ If created, the :class:`~core.models.Tenant` should be initialized. """ from core.models import VosaeGroup # Removed related TTL document.remove_related_ttl() if created: # Ensure that an index with the current search settings is present in ElasticSearch # Done synchronously since we can't currently chain all the related tasks from here conn = pyes.ES(settings.ES_SERVERS, basic_auth=settings.ES_AUTH) conn.ensure_index(document.slug, get_search_settings()) # Creates an admin group admin_group = VosaeGroup(tenant=document, name=pgettext('group_name', 'Administrators'), is_admin=True) for perm, perm_data in admin_group.permissions.perms.iteritems(): admin_group.permissions.perms[perm]['authorization'] = True admin_group.save() def delete(self, force=False, cascade=True, *args, **kwargs): """ Secure hook to delete tenants. :param force: security, must explicitely set force to True to confirm deletion :param cascade: also deletes all linked documents, default to True """ errors = 0 if force: if cascade: from contacts import models as contacts_models from core import models as core_models from data_liberation import models as data_liberation_models from invoicing import models as invoicing_models from notification import models as notification_models from organizer import models as organizer_models from timeline import models as timeline_models from vosae_settings import models as vosae_settings_models models_to_delete = [ contacts_models.Entity, contacts_models.ContactGroup, core_models.VosaeFile, core_models.VosaeGroup, core_models.VosaeUser, data_liberation_models.Export, invoicing_models.InvoiceBase, invoicing_models.Item, invoicing_models.Payment, invoicing_models.Tax, notification_models.Notification, organizer_models.Calendar, organizer_models.CalendarList, organizer_models.VosaeEvent, timeline_models.TimelineEntry, vosae_settings_models.TenantSettings, ] for model in models_to_delete: try: if model in [contacts_models.Entity, core_models.VosaeGroup, core_models.VosaeUser, invoicing_models.Tax]: for obj in model.objects(tenant=self): obj.delete(force=True) model.objects(tenant=self).delete() except: errors += 1 # Deletes the associated django Group try: Group.objects.get(name=self.slug).delete() except: errors += 1 # Removes ElasticSearch index conn = pyes.ES(settings.ES_SERVERS, basic_auth=settings.ES_AUTH) try: conn.indices.delete_index(self.slug) except: errors += 1 if errors: print '{0} errors occured'.format(errors) super(Tenant, self).delete(*args, **kwargs) def manage_logos(self): from PIL import Image if self.img_logo and self.img_logo.file and 'img_logo' in getattr(self, '_changed_fields', []): # Ensure appropriate fit image = Image.open(self.img_logo.file) if image.size[0] > 400 or image.size[1] > 160: image.thumbnail((400, 160), Image.ANTIALIAS) self.img_logo.file.seek(0) image.save(self.img_logo.file, image.format) self.img_logo.file.truncate() self.img_logo.file.seek(0) # Set the cache self.logo_cache.replace(self.img_logo.file) self.img_logo.file.seek(0) self.img_logo.file.close() elif self.svg_logo and self.svg_logo.file and 'svg_logo' in getattr(self, '_changed_fields', []): # Vector formats are not supported for now pass
class FileDoc(Document): f = fields.FileField(collection_name='files') i = fields.ImageField(collection_name='images')
class EmbeddedFile(EmbeddedDocument): filename = fields.StringField() doc = fields.FileField(required=True)