def __init__(self, grid_id=None, key=None, instance=None): self.fs = gridfs.GridFS(_get_db()) # Filesystem instance self.newfile = None # Used for partial writes self.grid_id = grid_id # Store GridFS id for file self.gridout = None self.key = key self.instance = instance
def __get__(self, instance, owner): """Descriptor to automatically dereference references. """ if instance is None: # Document class being used rather than a document object return self if isinstance(self.field, ReferenceField): referenced_type = self.field.document_type # Get value from document instance if available value_list = instance._data.get(self.name) if value_list: deref_list = [] for value in value_list: # Dereference DBRefs if isinstance(value, (pymongo.dbref.DBRef)): value = _get_db().dereference(value) deref_list.append(referenced_type._from_son(value)) else: deref_list.append(value) instance._data[self.name] = deref_list if isinstance(self.field, GenericReferenceField): value_list = instance._data.get(self.name) if value_list: deref_list = [] for value in value_list: # Dereference DBRefs if isinstance(value, (dict, pymongo.son.SON)): deref_list.append(self.field.dereference(value)) else: deref_list.append(value) instance._data[self.name] = deref_list return super(ListField, self).__get__(instance, owner)
def dereference(self, value): doc_cls = get_document(value['_cls']) reference = value['_ref'] doc = _get_db().dereference(reference) if doc is not None: doc = doc_cls._from_son(doc) return doc
def drop_collection(cls,is_sure=False): """Drops the entire collection associated with this :class:`~mongoengine.Document` type from the database. """ if is_sure: db = _get_db(cls.dbname) db.drop_collection(cls._meta['collection'])
def _get_collection(self): """Returns the collection for the document.""" db = _get_db() collection_name = self._get_collection_name() if not hasattr(self, '_collection') or self._collection is None: # Create collection as a capped collection if specified if self._meta['max_size'] or self._meta['max_documents']: # Get max document limit and max byte size from meta max_size = self._meta['max_size'] or 10000000 # 10MB default max_documents = self._meta['max_documents'] if collection_name in db.collection_names(): self._collection = db[collection_name] # The collection already exists, check if its capped # options match the specified capped options options = self._collection.options() if options.get('max') != max_documents or \ options.get('size') != max_size: msg = ('Cannot create collection "%s" as a capped ' 'collection as it already exists') % self._collection raise InvalidCollectionError(msg) else: # Create the collection as a capped collection opts = {'capped': True, 'size': max_size} if max_documents: opts['max'] = max_documents self._collection = db.create_collection( collection_name, **opts ) else: self._collection = db[collection_name] return self._collection
def _fetch_objects(self, doc_type=None): """Fetch all references and convert to their document objects """ object_map = {} for col, dbrefs in self.reference_map.iteritems(): keys = object_map.keys() refs = list(set([dbref for dbref in dbrefs if str(dbref) not in keys])) if hasattr(col, 'objects'): # We have a document class for the refs references = col.objects.in_bulk(refs) for key, doc in references.iteritems(): object_map[key] = doc else: # Generic reference: use the refs data to convert to document if doc_type and not isinstance(doc_type, (ListField, DictField, MapField,) ): references = doc_type._get_db()[col].find({'_id': {'$in': refs}}) for ref in references: doc = doc_type._from_son(ref) object_map[doc.id] = doc else: references = _get_db()[col].find({'_id': {'$in': refs}}) for ref in references: if '_cls' in ref: doc = get_document(ref["_cls"])._from_son(ref) else: doc = doc_type._from_son(ref) object_map[doc.id] = doc return object_map
def exec_js(self, code, *fields, **options): """Execute a Javascript function on the server. A list of fields may be provided, which will be translated to their correct names and supplied as the arguments to the function. A few extra variables are added to the function's scope: ``collection``, which is the name of the collection in use; ``query``, which is an object representing the current query; and ``options``, which is an object containing any options specified as keyword arguments. :param code: a string of Javascript code to execute :param fields: fields that you will be using in your function, which will be passed in to your function as arguments :param options: options that you want available to the function (accessed in Javascript through the ``options`` object) """ fields = [QuerySet._translate_field_name(self._document, f) for f in fields] collection = self._document._meta['collection'] scope = { 'collection': collection, 'query': self._query, 'options': options or {}, } code = pymongo.code.Code(code, scope=scope) db = _get_db() return db.eval(code, *fields)
def dereference(self, value): doc_cls = get_document(value['_cls']) reference = value['_ref'] doc = _get_db(doc_cls._meta['db_name']).dereference(reference) if doc is not None: doc = doc_cls._from_son(doc) doc._default_load_status = FieldStatus.LOADED return doc
def generate_new_value(self): """ Generate and Increment the counter """ sequence_id = "{0}.{1}".format(self.owner_document._get_collection_name(), self.name) collection = _get_db()[self.collection_name] counter = collection.find_and_modify( query={"_id": sequence_id}, update={"$inc": {"next": 1}}, new=True, upsert=True ) return counter["next"]
def __get__(self, instance, owner): """Descriptor for instantiating a new QuerySet object when Document.objects is accessed. """ if instance is not None: # Document class being used rather than a document object return self # we can't do async queries if we're on the root greenlet since we have # nothing to yield back to allow_async = bool(greenlet.getcurrent().parent) db = _get_db(owner._meta['db_name'], allow_async=allow_async) collection = owner._meta['collection'] if (db, collection) not in self._collections: # Create collection as a capped collection if specified if owner._meta['max_size'] or owner._meta['max_documents']: # Get max document limit and max byte size from meta max_size = owner._meta['max_size'] or 10000000 # 10MB default max_documents = owner._meta['max_documents'] if collection in db.collection_names(): self._collections[(db, collection)] = db[collection] # The collection already exists, check if its capped # options match the specified capped options options = self._collections[(db, collection)].options() if options.get('max') != max_documents or \ options.get('size') != max_size: msg = ('Cannot create collection "%s" as a capped ' 'collection as it already exists') % collection raise InvalidCollectionError(msg) else: # Create the collection as a capped collection opts = {'capped': True, 'size': max_size} if max_documents: opts['max'] = max_documents self._collections[(db, collection)] = db.create_collection( collection, **opts ) else: self._collections[(db, collection)] = db[collection] # owner is the document that contains the QuerySetManager queryset_class = owner._meta['queryset_class'] or QuerySet queryset = queryset_class(owner, self._collections[(db, collection)]) if self._manager_func: if self._manager_func.func_code.co_argcount == 1: queryset = self._manager_func(queryset) else: queryset = self._manager_func(owner, queryset) return queryset
def __get__(self, instance, owner): """Descriptor for instantiating a new QuerySet object when Document.objects is accessed. """ if instance is not None: # Document class being used rather than a document object return self if self._collection is None: db = _get_db() collection = owner._meta["collection"] # Create collection as a capped collection if specified if owner._meta["max_size"] or owner._meta["max_documents"]: # Get max document limit and max byte size from meta max_size = owner._meta["max_size"] or 10000000 # 10MB default max_documents = owner._meta["max_documents"] if collection in db.collection_names(): self._collection = db[collection] # The collection already exists, check if its capped # options match the specified capped options options = self._collection.options() if options.get("max") != max_documents or options.get("size") != max_size: msg = ( 'Cannot create collection "%s" as a capped ' "collection as it already exists" ) % collection raise InvalidCollectionError(msg) else: # Create the collection as a capped collection opts = {"capped": True, "size": max_size} if max_documents: opts["max"] = max_documents self._collection = db.create_collection(collection, opts) else: self._collection = db[collection] # owner is the document that contains the QuerySetManager # queryset = QuerySet(owner, self._collection) # m added in to extend queryset to imitate Django models.Manager if hasattr(owner, "_manager"): queryset = owner._manager(owner, self._collection) else: queryset = QuerySet(owner, self._collection) if self._manager_func: if self._manager_func.func_code.co_argcount == 1: queryset = self._manager_func(queryset) else: queryset = self._manager_func(owner, queryset) return queryset
def __get__(self, instance, owner): """Descriptor to automatically dereference references. """ if instance is None: # Document class being used rather than a document object return self if isinstance(self.field, StrictReferenceField): referenced_type = self.field.document_type # Get value from document instance if available value_dict = instance._data.get(self.name) if value_dict: deref_dict = {} for key, value in value_dict.iteritems(): # Dereference DBRefs if isinstance(value, pymongo.objectid.ObjectId): value = referenced_type.objects.with_id(value) deref_dict[key] = value else: deref_dict[key] = value instance._data[self.name] = deref_list if isinstance(self.field, ReferenceField): referenced_type = self.field.document_type # Get value from document instance if available value_dict = instance._data.get(self.name) if value_dict: deref_dict = {} for key, value in value_dict.iteritems(): # Dereference DBRefs if isinstance(value, (pymongo.dbref.DBRef)): value = _get_db().dereference(value) deref_dict[key] = referenced_type._from_son(value) else: deref_dict[key] = value instance._data[self.name] = deref_list if isinstance(self.field, GenericReferenceField): value_dict = instance._data.get(self.name) if value_dict: deref_dict = {} for key, value in value_dict.iteritems(): # Dereference DBRefs if isinstance(value, (dict, pymongo.son.SON)): deref_dict[key] = self.field.dereference(value) else: deref_dict[key] = value instance._data[self.name] = deref_dict return super(TypedDictField, self).__get__(instance, owner)
def _pymongo(cls, use_async=True): # we can't do async queries if we're on the root greenlet since we have # nothing to yield back to use_async &= bool(greenlet.getcurrent().parent) if not hasattr(cls, '_pymongo_collection'): cls._pymongo_collection = {} if use_async not in cls._pymongo_collection: cls._pymongo_collection[use_async] = \ _get_db(cls._meta['db_name'], allow_async=use_async)[cls._meta['collection']] return cls._pymongo_collection[use_async]
def __get__(self, instance, owner): """Descriptor to allow lazy dereferencing. """ if instance is None: # Document class being used rather than a document object return self # Get value from document instance if available value = instance._data.get(self.name) # Dereference DBRefs if isinstance(value, (pymongo.dbref.DBRef)): value = _get_db().dereference(value) if value is not None: instance._data[self.name] = self.document_type._from_son(value) return super(ReferenceField, self).__get__(instance, owner)
def dereference(self, dbref): if not dbref: return None doc_cls = self.document_type if hasattr(doc_cls, "by_id") and callable(doc_cls.by_id): doc = doc_cls.by_id(dbref.id) else: doc = _get_db(doc_cls._meta['db_name']).dereference(dbref) if doc is not None: doc = doc_cls._from_son(doc) doc._all_loaded = True doc._default_load_status = FieldStatus.LOADED return doc
def __get__(self, instance, owner): """Descriptor for instantiating a new QuerySet object when Document.objects is accessed. """ if instance is not None: # Document class being used rather than a document object return self db = _get_db() if db not in self._collections: collection = owner._meta['collection'] # Create collection as a capped collection if specified if owner._meta['max_size'] or owner._meta['max_documents']: # Get max document limit and max byte size from meta max_size = owner._meta['max_size'] or 10000000 # 10MB default max_documents = owner._meta['max_documents'] if collection in db.collection_names(): self._collections[db] = db[collection] # The collection already exists, check if its capped # options match the specified capped options options = self._collections[db].options() if options.get('max') != max_documents or \ options.get('size') != max_size: msg = ('Cannot create collection "%s" as a capped ' 'collection as it already exists') % collection raise InvalidCollectionError(msg) else: # Create the collection as a capped collection opts = {'capped': True, 'size': max_size} if max_documents: opts['max'] = max_documents self._collections[db] = db.create_collection( collection, **opts ) else: self._collections[db] = db[collection] # owner is the document that contains the QuerySetManager queryset = QuerySet(owner, self._collections[db]) if self._manager_func: if self._manager_func.func_code.co_argcount == 1: queryset = self._manager_func(queryset) else: queryset = self._manager_func(owner, queryset) return queryset
def __get__(self, instance, owner): """Descriptor for instantiating a new QuerySet object when Document.objects is accessed. """ if instance is not None: # Document class being used rather than a document object return self db = _get_db() collection = owner._meta['collection'] if (db, collection) not in self._collections: # Create collection as a capped collection if specified if owner._meta['max_size'] or owner._meta['max_documents']: # Get max document limit and max byte size from meta max_size = owner._meta['max_size'] or 10000000 # 10MB default max_documents = owner._meta['max_documents'] if collection in db.collection_names(): self._collections[(db, collection)] = db[collection] # The collection already exists, check if its capped # options match the specified capped options options = self._collections[(db, collection)].options() if options.get('max') != max_documents or \ options.get('size') != max_size: msg = ('Cannot create collection "%s" as a capped ' 'collection as it already exists') % collection raise InvalidCollectionError(msg) else: # Create the collection as a capped collection opts = {'capped': True, 'size': max_size} if max_documents: opts['max'] = max_documents self._collections[(db, collection)] = db.create_collection( collection, **opts) else: self._collections[(db, collection)] = db[collection] # owner is the document that contains the QuerySetManager queryset_class = owner._meta['queryset_class'] or QuerySet queryset = queryset_class(owner, self._collections[(db, collection)]) if self._manager_func: if self._manager_func.func_code.co_argcount == 1: queryset = self._manager_func(queryset) else: queryset = self._manager_func(owner, queryset) return queryset
def exec_js(self, code, *fields, **options): """Execute a Javascript function on the server. A list of fields may be provided, which will be translated to their correct names and supplied as the arguments to the function. A few extra variables are added to the function's scope: ``collection``, which is the name of the collection in use; ``query``, which is an object representing the current query; and ``options``, which is an object containing any options specified as keyword arguments. As fields in MongoEngine may use different names in the database (set using the :attr:`db_field` keyword argument to a :class:`Field` constructor), a mechanism exists for replacing MongoEngine field names with the database field names in Javascript code. When accessing a field, use square-bracket notation, and prefix the MongoEngine field name with a tilde (~). :param code: a string of Javascript code to execute :param fields: fields that you will be using in your function, which will be passed in to your function as arguments :param options: options that you want available to the function (accessed in Javascript through the ``options`` object) """ code = self._sub_js_fields(code) fields = [ QuerySet._translate_field_name(self._document, f) for f in fields ] collection = self._document._meta['collection'] scope = { 'collection': collection, 'options': options or {}, } query = self._query if self._where_clause: query['$where'] = self._where_clause scope['query'] = query code = pymongo.code.Code(code, scope=scope) db = _get_db() return db.eval(code, *fields)
def exec_js(self, code, *fields, **options): """Execute a Javascript function on the server. A list of fields may be provided, which will be translated to their correct names and supplied as the arguments to the function. A few extra variables are added to the function's scope: ``collection``, which is the name of the collection in use; ``query``, which is an object representing the current query; and ``options``, which is an object containing any options specified as keyword arguments. As fields in MongoEngine may use different names in the database (set using the :attr:`db_field` keyword argument to a :class:`Field` constructor), a mechanism exists for replacing MongoEngine field names with the database field names in Javascript code. When accessing a field, use square-bracket notation, and prefix the MongoEngine field name with a tilde (~). :param code: a string of Javascript code to execute :param fields: fields that you will be using in your function, which will be passed in to your function as arguments :param options: options that you want available to the function (accessed in Javascript through the ``options`` object) """ code = self._sub_js_fields(code) fields = [QuerySet._translate_field_name(self._document, f) for f in fields] collection = self._document._meta['collection'] scope = { 'collection': collection, 'options': options or {}, } query = self._query if self._where_clause: query['$where'] = self._where_clause scope['query'] = query code = pymongo.code.Code(code, scope=scope) db = _get_db() return db.eval(code, *fields)
def __get__(self, instance, owner): """Descriptor to automatically dereference references. """ if instance is None: # Document class being used rather than a document object return self if isinstance(self.field, ReferenceField): referenced_type = self.field.document_type # Get value from document instance if available value_list = instance._data.get(self.name) if value_list: deref_list = [] for value in value_list: # Dereference DBRefs if isinstance(value, (pymongo.dbref.DBRef)): deref_value = _get_db().dereference(value) if deref_value: deref_list.append(referenced_type._from_son(deref_value)) else: # ignore objects that can't be dereferenced # but warn the user something went wrong warnings.warn(u"object " + unicode(value) + " can not be dereferenced. Maybe it has been deleted from the database") else: deref_list.append(value) instance._data[self.name] = deref_list if isinstance(self.field, GenericReferenceField): value_list = instance._data.get(self.name) if value_list: deref_list = [] for value in value_list: # Dereference DBRefs if isinstance(value, (dict, pymongo.son.SON)): deref_list.append(self.field.dereference(value)) else: deref_list.append(value) instance._data[self.name] = deref_list return super(ListField, self).__get__(instance, owner)
def _fetch_objects(self, doc_type=None): """Fetch all references and convert to their document objects """ object_map = {} for col, dbrefs in self.reference_map.iteritems(): keys = object_map.keys() refs = list( set([dbref for dbref in dbrefs if str(dbref) not in keys])) if hasattr(col, 'objects'): # We have a document class for the refs references = col.objects.in_bulk(refs) for key, doc in references.iteritems(): object_map[key] = doc else: # Generic reference: use the refs data to convert to document references = _get_db()[col].find({'_id': {'$in': refs}}) for ref in references: if '_cls' in ref: doc = get_document(ref['_cls'])._from_son(ref) else: doc = doc_type._from_son(ref) object_map[doc.id] = doc return object_map
def drop_collection(cls): """Drops the entire collection associated with this :class:`~mongoengine.Document` type from the database. """ db = _get_db() db.drop_collection(cls._meta['collection'])
def __init__(self): self.fs = gridfs.GridFS(_get_db()) # Filesystem instance self.newfile = None # Used for partial writes self.grid_id = None # Store GridFS id for file
def __init__(self, grid_id=None): self.fs = gridfs.GridFS(_get_db()) # Filesystem instance self.newfile = None # Used for partial writes self.grid_id = grid_id # Store GridFS id for file