def _validate(self, value, **kwargs): Document = _import_class('Document') EmbeddedDocument = _import_class('EmbeddedDocument') # Check the Choices Constraint if self.choices: choice_list = self.choices if isinstance(self.choices[0], (list, tuple)): choice_list = [k for k, v in self.choices] # Choices which are other types of Documents if isinstance(value, (Document, EmbeddedDocument)): if not any(isinstance(value, c) for c in choice_list): self.error( 'Value must be instance of %s' % unicode(choice_list) ) # Choices which are types other than Documents elif value not in choice_list: self.error('Value must be one of %s' % unicode(choice_list)) # check validation argument if self.validation is not None: if callable(self.validation): if not self.validation(value): self.error('Value does not match custom validation method') else: raise ValueError('validation argument for "%s" must be a ' 'callable.' % self.name) self.validate(value, **kwargs)
def cascade_save(self, *args, **kwargs): """Recursively saves any references / generic references on an objects""" _refs = kwargs.get('_refs', []) or [] ReferenceField = _import_class('ReferenceField') GenericReferenceField = _import_class('GenericReferenceField') for name, cls in list(self._fields.items()): if not isinstance(cls, (ReferenceField, GenericReferenceField)): continue ref = self._data.get(name) if not ref or isinstance(ref, DBRef): continue if not getattr(ref, '_changed_fields', True): continue ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data)) if ref and ref_id not in _refs: _refs.append(ref_id) kwargs["_refs"] = _refs ref.save(**kwargs) ref._changed_fields = []
def __setattr__(self, name, value): # Handle dynamic data only if an initialised dynamic document if self._dynamic and not self._dynamic_lock: field = None if not hasattr(self, name) and not name.startswith('_'): DynamicField = _import_class("DynamicField") field = DynamicField(db_field=name) field.name = name self._dynamic_fields[name] = field if not name.startswith('_'): value = self.__expand_dynamic_values(name, value) # Handle marking data as changed if name in self._dynamic_fields: self._data[name] = value if hasattr(self, '_changed_fields'): self._mark_as_changed(name) if (self._is_document and not self._created and name in self._meta.get('shard_key', tuple()) and self._data.get(name) != value): OperationError = _import_class('OperationError') msg = "Shard Keys are immutable. Tried to update %s" % name raise OperationError(msg) # Check if the user has created a new instance of a class if (self._is_document and self._initialised and self._created and name == self._meta['id_field']): super(BaseDocument, self).__setattr__('_created', False) super(BaseDocument, self).__setattr__(name, value)
def validate(self, value, clean=True): """If field is provided ensure the value is valid. """ errors = {} if self.field: if hasattr(value, 'iteritems') or hasattr(value, 'items'): sequence = value.iteritems() else: sequence = enumerate(value) EmbeddedDocumentField = _import_class("EmbeddedDocumentField") GenericEmbeddedDocumentField = _import_class("GenericEmbeddedDocumentField") for k, v in sequence: try: if isinstance(self.field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)): self.field._validate(v, clean=clean) else: self.field._validate(v) except ValidationError, error: errors[k] = error.errors or error except (ValueError, AssertionError), error: errors[k] = error
def _validate(self, value, **kwargs): Document = _import_class('Document') EmbeddedDocument = _import_class('EmbeddedDocument') # check choices if self.choices: is_cls = isinstance(value, (Document, EmbeddedDocument)) value_to_check = value.__class__ if is_cls else value err_msg = 'an instance' if is_cls else 'one' if isinstance(self.choices[0], (list, tuple)): option_keys = [k for k, v in self.choices] if value_to_check not in option_keys: msg = ('Value must be %s of %s' % (err_msg, str(option_keys))) self.error(msg) elif value_to_check not in self.choices: msg = ('Value must be %s of %s' % (err_msg, str(self.choices))) self.error(msg) # check validation argument if self.validation is not None: if isinstance(self.validation, collections.Callable): if not self.validation(value): self.error('Value does not match custom validation method') else: raise ValueError('validation argument for "%s" must be a ' 'callable.' % self.name) self.validate(value, **kwargs)
def __init__(self, list_items, instance, name): Document = _import_class("Document") EmbeddedDocument = _import_class("EmbeddedDocument") if isinstance(instance, (Document, EmbeddedDocument)): self._instance = weakref.proxy(instance) self._name = name super(BaseList, self).__init__(list_items)
def __init__(self, dict_items, instance, name): Document = _import_class('Document') EmbeddedDocument = _import_class('EmbeddedDocument') if isinstance(instance, (Document, EmbeddedDocument)): self._instance = weakref.proxy(instance) self._name = name return super(BaseDict, self).__init__(dict_items)
def __init__(self, list_items, instance, name): Document = _import_class('Document') EmbeddedDocument = _import_class('EmbeddedDocument') if isinstance(instance, (Document, EmbeddedDocument)): self._instance = instance self._name = name return super(BaseList, self).__init__(list_items)
def __get__(self, instance, owner): """Descriptor to automatically dereference references.""" if instance is None: # Document class being used rather than a document object return self ReferenceField = _import_class('ReferenceField') GenericReferenceField = _import_class('GenericReferenceField') EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField') auto_dereference = instance._fields[self.name]._auto_dereference dereference = (auto_dereference and (self.field is None or isinstance(self.field, (GenericReferenceField, ReferenceField)))) _dereference = _import_class('DeReference')() if (instance._initialised and dereference and instance._data.get(self.name) and not getattr(instance._data[self.name], '_dereferenced', False)): instance._data[self.name] = _dereference( instance._data.get(self.name), max_depth=1, instance=instance, name=self.name ) if hasattr(instance._data[self.name], '_dereferenced'): instance._data[self.name]._dereferenced = True value = super(ComplexBaseField, self).__get__(instance, owner) # Convert lists / values so we can watch for any changes on them if isinstance(value, (list, tuple)): if (issubclass(type(self), EmbeddedDocumentListField) and not isinstance(value, EmbeddedDocumentList)): value = EmbeddedDocumentList(value, instance, self.name) elif not isinstance(value, BaseList): value = BaseList(value, instance, self.name) instance._data[self.name] = value elif isinstance(value, dict) and not isinstance(value, BaseDict): value = BaseDict(value, instance, self.name) instance._data[self.name] = value if (auto_dereference and instance._initialised and isinstance(value, (BaseList, BaseDict)) and not value._dereferenced): value = _dereference( value, max_depth=1, instance=instance, name=self.name ) value._dereferenced = True instance._data[self.name] = value return value
def _lookup_field(cls, parts): """Lookup a field based on its attribute and return a list containing the field's parents and the field. """ ListField = _import_class("ListField") if not isinstance(parts, (list, tuple)): parts = [parts] fields = [] field = None for field_name in parts: # Handle ListField indexing: if field_name.isdigit() and isinstance(field, ListField): new_field = field.field fields.append(field_name) continue if field is None: # Look up first field from the document if field_name == 'pk': # Deal with "primary key" alias field_name = cls._meta['id_field'] if field_name in cls._fields: field = cls._fields[field_name] elif cls._dynamic: DynamicField = _import_class('DynamicField') field = DynamicField(db_field=field_name) else: raise LookUpError('Cannot resolve field "%s"' % field_name) else: ReferenceField = _import_class('ReferenceField') GenericReferenceField = _import_class('GenericReferenceField') if isinstance(field, (ReferenceField, GenericReferenceField)): raise LookUpError('Cannot perform join in mongoDB: %s' % '__'.join(parts)) if hasattr(getattr(field, 'field', None), 'lookup_member'): new_field = field.field.lookup_member(field_name) else: # Look up subfield on the previous field new_field = field.lookup_member(field_name) if not new_field and isinstance(field, ComplexBaseField): fields.append(field_name) continue elif not new_field: raise LookUpError('Cannot resolve field "%s"' % field_name) field = new_field # update field to the new field type fields.append(field) return fields
def __get_changed_fields(self, inspected=None): """Returns a list of all fields that have explicitly been changed. """ EmbeddedDocument = _import_class("EmbeddedDocument") DynamicEmbeddedDocument = _import_class("DynamicEmbeddedDocument") ReferenceField = _import_class("ReferenceField") SortedListField = _import_class("SortedListField") changed_fields = getattr(self, '_changed_fields', []) original_values = getattr(self, '_original_values', {}) inspected = inspected or set() if hasattr(self, 'id') and isinstance(self.id, Hashable): if self.id in inspected: return changed_fields inspected.add(self.id) for field_name in self._fields_ordered: db_field_name = self._db_field_map.get(field_name, field_name) key = '%s.' % db_field_name data = self._data.get(field_name, None) field = self._fields.get(field_name) if hasattr(data, 'id'): if data.id in inspected: continue if isinstance(field, ReferenceField): continue elif (isinstance(data, (EmbeddedDocument, DynamicEmbeddedDocument)) and db_field_name not in changed_fields): # Find all embedded fields that have been changed changed = data._get_changed_fields(inspected) for k in changed: if k: field_name = "%s%s" % (key, k) if field_name not in changed_fields: changed_fields.append(field_name) if k in getattr(data, '_original_values', {}): original_values[field_name] = getattr(data, '_original_values', {})[k] elif (isinstance(data, (list, tuple, dict)) and db_field_name not in changed_fields): if (hasattr(field, 'field') and isinstance(field.field, ReferenceField)): continue elif isinstance(field, SortedListField) and field._ordering: # if ordering is affected whole list is changed if any(map(lambda d: field._ordering in d._changed_fields, data)): changed_fields.append(db_field_name) continue self._nestable_types_changed_fields( changed_fields, key, data, inspected) return changed_fields
def _to_mongo(self, role=None): data = SON() data["_id"] = None data['_cls'] = self._class_name EmbeddedDocumentField = _import_class("EmbeddedDocumentField") GenericEmbeddedDocumentField = _import_class("GenericEmbeddedDocumentField") parsed_role, role_filter = self._find_role(role=role) for field_name in self: value = self._data.get(field_name, None) field = self._fields.get(field_name) if field is None and self._dynamic: field = self._dynamic_fields.get(field_name) if parsed_role and role_filter: if role_filter(field_name, value): continue if value is not None: if isinstance(field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)): value = field.to_mongo(value, role=role) else: value = field.to_mongo(value) # Handle self generating fields if value is None and field._auto_gen: value = field.generate() self._data[field_name] = value if value is not None: data[field.db_field] = value # If "_id" has not been set, then try and set it Document = _import_class("Document") if isinstance(self, Document): if data["_id"] is None: data["_id"] = self._data.get("id", None) if data['_id'] is None or (role_filter and role_filter("id", value)): data.pop('_id') # Only add _cls if allow_inheritance is True if (not hasattr(self, '_meta') or not self._meta.get('allow_inheritance', ALLOW_INHERITANCE)): data.pop('_cls') return data
def _get_changed_fields(self, inspected=None): """Returns a list of all fields that have explicitly been changed. """ EmbeddedDocument = _import_class("EmbeddedDocument") DynamicEmbeddedDocument = _import_class("DynamicEmbeddedDocument") ReferenceField = _import_class("ReferenceField") _changed_fields = [] _changed_fields += getattr(self, '_changed_fields', []) inspected = inspected or set() if hasattr(self, 'id'): if self.id in inspected: return _changed_fields inspected.add(self.id) for field_name in self._fields_ordered: db_field_name = self._db_field_map.get(field_name, field_name) key = '%s.' % db_field_name data = self._data.get(field_name, None) field = self._fields.get(field_name) if hasattr(data, 'id'): if data.id in inspected: continue inspected.add(data.id) if isinstance(field, ReferenceField): continue elif (isinstance(data, (EmbeddedDocument, DynamicEmbeddedDocument)) and db_field_name not in _changed_fields): # Find all embedded fields that have been changed changed = data._get_changed_fields(inspected) _changed_fields += ["%s%s" % (key, k) for k in changed if k] elif (isinstance(data, (list, tuple, dict)) and db_field_name not in _changed_fields): # Loop list / dict fields as they contain documents # Determine the iterator to use if not hasattr(data, 'items'): iterator = enumerate(data) else: iterator = data.iteritems() for index, value in iterator: if not hasattr(value, '_get_changed_fields'): continue if (hasattr(field, 'field') and isinstance(field.field, ReferenceField)): continue list_key = "%s%s." % (key, index) changed = value._get_changed_fields(inspected) _changed_fields += ["%s%s" % (list_key, k) for k in changed if k] return _changed_fields
def _get_changed_fields(self, inspected=None): """Return a list of all fields that have explicitly been changed. """ EmbeddedDocument = _import_class('EmbeddedDocument') DynamicEmbeddedDocument = _import_class('DynamicEmbeddedDocument') ReferenceField = _import_class('ReferenceField') SortedListField = _import_class('SortedListField') changed_fields = [] changed_fields += getattr(self, '_changed_fields', []) inspected = inspected or set() if hasattr(self, 'id') and isinstance(self.id, Hashable): if self.id in inspected: return changed_fields inspected.add(self.id) for field_name in self._fields_ordered: db_field_name = self._db_field_map.get(field_name, field_name) key = '%s.' % db_field_name data = self._data.get(field_name, None) field = self._fields.get(field_name) if hasattr(data, 'id'): if data.id in inspected: continue if isinstance(field, ReferenceField): continue elif ( isinstance(data, (EmbeddedDocument, DynamicEmbeddedDocument)) and db_field_name not in changed_fields ): # Find all embedded fields that have been changed changed = data._get_changed_fields(inspected) changed_fields += ['%s%s' % (key, k) for k in changed if k] elif (isinstance(data, (list, tuple, dict)) and db_field_name not in changed_fields): if (hasattr(field, 'field') and isinstance(field.field, ReferenceField)): continue elif isinstance(field, SortedListField) and field._ordering: # if ordering is affected whole list is changed if any([field._ordering in d._changed_fields for d in data]): changed_fields.append(db_field_name) continue self._nestable_types_changed_fields( changed_fields, key, data, inspected) return changed_fields
def _validate_choices(self, value): Document = _import_class("Document") EmbeddedDocument = _import_class("EmbeddedDocument") choice_list = self.choices if isinstance(choice_list[0], (list, tuple)): choice_list = [k for k, _ in choice_list] # Choices which are other types of Documents if isinstance(value, (Document, EmbeddedDocument)): if not any(isinstance(value, c) for c in choice_list): self.error("Value must be instance of %s" % str(choice_list)) # Choices which are types other than Documents elif value not in choice_list: self.error("Value must be one of %s" % str(choice_list))
def __init__(self, cls): """ Construct the no_dereference context manager. :param cls: the class to turn dereferencing off on """ self.cls = cls ReferenceField = _import_class('ReferenceField') GenericReferenceField = _import_class('GenericReferenceField') ComplexBaseField = _import_class('ComplexBaseField') self.deref_fields = [k for k, v in self.cls._fields.iteritems() if isinstance(v, (ReferenceField, GenericReferenceField, ComplexBaseField))]
def __getitem__(self, *args, **kwargs): value = super(BaseDict, self).__getitem__(*args, **kwargs) EmbeddedDocument = _import_class('EmbeddedDocument') if isinstance(value, EmbeddedDocument) and value._instance is None: value._instance = self._instance return value
def _get_changed_fields(self): """Returns a list of all fields that have explicitly been changed. """ changed_fields = set(self._changed_fields) EmbeddedDocumentField = _import_class("EmbeddedDocumentField") for field_name, field in self._fields.iteritems(): if field_name not in changed_fields: if (isinstance(field, ComplexBaseField) and isinstance(field.field, EmbeddedDocumentField)): field_value = getattr(self, field_name, None) if field_value: for idx in (field_value if isinstance(field_value, dict) else xrange(len(field_value))): changed_subfields = field_value[idx]._get_changed_fields() if changed_subfields: changed_fields |= set(['.'.join([field_name, str(idx), subfield_name]) for subfield_name in changed_subfields]) elif isinstance(field, EmbeddedDocumentField): field_value = getattr(self, field_name, None) if field_value: changed_subfields = field_value._get_changed_fields() if changed_subfields: changed_fields |= set(['.'.join([field_name, subfield_name]) for subfield_name in changed_subfields]) return changed_fields
def __expand_dynamic_values(self, name, value): """Expand any dynamic values to their correct types / values.""" if not isinstance(value, (dict, list, tuple)): return value # If the value is a dict with '_cls' in it, turn it into a document is_dict = isinstance(value, dict) if is_dict and '_cls' in value: cls = get_document(value['_cls']) return cls(**value) if is_dict: value = { k: self.__expand_dynamic_values(k, v) for k, v in value.items() } else: value = [self.__expand_dynamic_values(name, v) for v in value] # Convert lists / values so we can watch for any changes on them EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField') if (isinstance(value, (list, tuple)) and not isinstance(value, BaseList)): if issubclass(type(self), EmbeddedDocumentListField): value = EmbeddedDocumentList(value, self, name) else: value = BaseList(value, self, name) elif isinstance(value, dict) and not isinstance(value, BaseDict): value = BaseDict(value, self, name) return value
def _geo_indices(cls, inspected=None, parent_field=None): inspected = inspected or [] geo_indices = [] inspected.append(cls) geo_field_type_names = ["EmbeddedDocumentField", "GeoPointField", "PointField", "LineStringField", "PolygonField"] geo_field_types = tuple([_import_class(field) for field in geo_field_type_names]) for field in cls._fields.values(): if not isinstance(field, geo_field_types): continue if hasattr(field, 'document_type'): field_cls = field.document_type if field_cls in inspected: continue if hasattr(field_cls, '_geo_indices'): geo_indices += field_cls._geo_indices( inspected, parent_field=field.db_field) elif field._geo_index: field_name = field.db_field if parent_field: field_name = "%s.%s" % (parent_field, field_name) geo_indices.append({'fields': [(field_name, field._geo_index)]}) return geo_indices
def __set__(self, instance, value): """Descriptor for assigning a value to a field in a document. """ # If setting to None and theres a default # Then set the value to the default value if value is None: if self.null: value = None elif self.default is not None: value = self.default if callable(value): value = value() if instance._initialised: try: if (self.name not in instance._data or instance._data[self.name] != value): instance._mark_as_changed(self.name) except: # Values cant be compared eg: naive and tz datetimes # So mark it as changed instance._mark_as_changed(self.name) EmbeddedDocument = _import_class('EmbeddedDocument') if isinstance(value, EmbeddedDocument): value._instance = weakref.proxy(instance) instance._data[self.name] = value
def __init__(self, dict_items, instance, name): BaseDocument = _import_class('BaseDocument') if isinstance(instance, BaseDocument): self._instance = weakref.proxy(instance) self._name = name super(BaseDict, self).__init__(dict_items)
def delete(self, **write_concern): """Delete the :class:`~mongoengine.Document` from the database. This will only take effect if the document has been previously saved. :param write_concern: Extra keyword arguments are passed down which will be used as options for the resultant ``getLastError`` command. For example, ``save(..., write_concern={w: 2, fsync: True}, ...)`` will wait until at least two servers have recorded the write and will force an fsync on the primary server. """ signals.pre_delete.send(self.__class__, document=self) # Delete FileFields separately FileField = _import_class('FileField') for name, field in self._fields.items(): if isinstance(field, FileField): getattr(self, name).delete() try: self._qs.filter( **self._object_key).delete(write_concern=write_concern, _from_doc_delete=True) except pymongo.errors.OperationFailure as err: message = 'Could not delete document (%s)' % err.message raise OperationError(message) signals.post_delete.send(self.__class__, document=self)
def __get__( self, instance, owner ): """Descriptor to automatically dereference references. """ if instance is None: # Document class being used rather than a document object return self # We only care about lists that contain documents/references here. # Code is adapted from `ComplexBaseField.__get__`. if isinstance( self.field, ( GenericReferenceField, ReferenceField ) ): # dereference = self._auto_dereference _dereference = _import_class("DeReference")() self._auto_dereference = instance._fields[self.name]._auto_dereference # If we ever uncomment the piece below, make sure to include something like `if hasattr( instance, '_cache' ) and all( instance._cache[ doc ] for doc in value ): pass` # if instance._initialised and dereference: # instance._data[self.name] = _dereference( # instance._data.get(self.name), max_depth=1, instance=instance, # name=self.name # ) # Skip `ComplexBaseField`, we're modifying that code right here; retrieve document data from `BaseField` value = super( ComplexBaseField, self ).__get__( instance, owner ) # Convert lists to BaseList so we can watch for any changes on them if isinstance(value, (list, tuple)) and not isinstance( value, BaseList ): value = BaseList( value, instance, self.name ) instance._data[ self.name ] = value # If we have raw values, obtain documents; either from cache, or by dereferencing if self._auto_dereference and instance._initialised and isinstance( value, BaseList ) and not value._dereferenced: # If we can find all objects in the cache, use it. Otherwise, retrieve all of them. if hasattr( instance, '_cache' ) and all( instance._cache[ doc ] for doc in value ): for index, doc in enumerate( value ): super( BaseList, value ).__setitem__( index, instance._cache[ doc ] ) else: value = _dereference( value, max_depth=1, instance=instance, name=self.name ) value._dereferenced = True # For the list of retrieved documents, replace already known entries with cached documents. # Add others to the cache. if hasattr( instance, '_cache' ): for index, doc in enumerate( value ): if doc in instance._cache: doc = instance._cache[ doc ] # Be careful not to trigger `BaseList` append/remove again, # since this'll get us an infinite loop super( BaseList, value ).__setitem__( index, doc ) else: instance._cache.add( doc ) instance._data[self.name] = value else: # If we're not dealing with documents/references, just call the super value = super( ListField, self ).__get__( instance, owner ) return value
def to_mongo(self, use_db_field=True): """Return as SON data ready for use with MongoDB. """ data = SON() data["_id"] = None data['_cls'] = self._class_name for field_name in self: value = self._data.get(field_name, None) field = self._fields.get(field_name) if field is None and self._dynamic: field = self._dynamic_fields.get(field_name) if value is not None: EmbeddedDocument = _import_class("EmbeddedDocument") if isinstance(value, (EmbeddedDocument)) and use_db_field==False: value = field.to_mongo(value, use_db_field) else: value = field.to_mongo(value) # Handle self generating fields if value is None and field._auto_gen: value = field.generate() self._data[field_name] = value if value is not None: if use_db_field: data[field.db_field] = value else: data[field.name] = value # If "_id" has not been set, then try and set it Document = _import_class("Document") if isinstance(self, Document): if data["_id"] is None: data["_id"] = self._data.get("id", None) if data['_id'] is None: data.pop('_id') # Only add _cls if allow_inheritance is True if (not hasattr(self, '_meta') or not self._meta.get('allow_inheritance', ALLOW_INHERITANCE)): data.pop('_cls') return data
def to_python(self, value): """Convert a MongoDB-compatible type to a Python type.""" if isinstance(value, six.string_types): return value if hasattr(value, 'to_python'): return value.to_python() BaseDocument = _import_class('BaseDocument') if isinstance(value, BaseDocument): # Something is wrong, return the value as it is return value is_list = False if not hasattr(value, 'items'): try: is_list = True value = {idx: v for idx, v in enumerate(value)} except TypeError: # Not iterable return the value return value if self.field: self.field._auto_dereference = self._auto_dereference value_dict = {key: self.field.to_python(item) for key, item in value.items()} else: Document = _import_class('Document') value_dict = {} for k, v in value.items(): if isinstance(v, Document): # We need the id from the saved object to create the DBRef if v.pk is None: self.error('You can only reference documents once they' ' have been saved to the database') collection = v._get_collection_name() value_dict[k] = DBRef(collection, v.pk) elif hasattr(v, 'to_python'): value_dict[k] = v.to_python() else: value_dict[k] = self.to_python(v) if is_list: # Convert back to a list return [v for _, v in sorted(value_dict.items(), key=operator.itemgetter(0))] return value_dict
def select_related(self, max_depth=1): """Handles dereferencing of :class:`~bson.dbref.DBRef` objects to a maximum depth in order to cut down the number queries to mongodb. .. versionadded:: 0.5 """ DeReference = _import_class('DeReference') DeReference()([self], max_depth + 1) return self
def validate(self, clean=True): """Ensure that all fields' values are valid and that required fields are present. """ # Ensure that each field is matched to a valid value errors = {} if clean: try: self.clean() except ValidationError as error: errors[NON_FIELD_ERRORS] = error # Get a list of tuples of field names and their current values fields = [(self._fields.get(name, self._dynamic_fields.get(name)), self._data.get(name)) for name in self._fields_ordered] EmbeddedDocumentField = _import_class("EmbeddedDocumentField") GenericEmbeddedDocumentField = _import_class( "GenericEmbeddedDocumentField") for field, value in fields: if value is not None: try: if isinstance(field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)): field._validate(value, clean=clean) else: field._validate(value) except ValidationError as error: errors[field.name] = error.errors or error except (ValueError, AttributeError, AssertionError) as error: errors[field.name] = error elif field.required and not getattr(field, '_auto_gen', False): errors[field.name] = ValidationError('Field is required', field_name=field.name) if errors: pk = "None" if hasattr(self, 'pk'): pk = self.pk elif self._instance and hasattr(self._instance, 'pk'): pk = self._instance.pk message = "ValidationError (%s:%s) " % (self._class_name, pk) raise ValidationError(message, errors=errors)
def to_mongo(self,deref=True): """Return as SON data ready for use with MongoDB. """ data = SON() data["_id"] = None data['_cls'] = self._class_name ReferenceField = _import_class('ReferenceField') for field_name in self: value = self._data.get(field_name, None) field = self._fields.get(field_name) if field is None and self._dynamic: field = self._dynamic_fields.get(field_name) if value is not None and not isinstance(value,ReferenceField): value = field.to_mongo(value) elif value is not None and isinstance(field,ReferenceField): value = field.to_mongo(value,deref) #print field,value # Handle self generating fields if value is None and field._auto_gen: value = field.generate() self._data[field_name] = value if value is not None: data[field.db_field] = value # If "_id" has not been set, then try and set it Document = _import_class("Document") if isinstance(self, Document): if data["_id"] is None: data["_id"] = self._data.get("id", None) if data['_id'] is None: data.pop('_id') # Only add _cls if allow_inheritance is True if (not hasattr(self, '_meta') or not self._meta.get('allow_inheritance', ALLOW_INHERITANCE)): data.pop('_cls') #print 'data',data return data
def _validate_choices(self, value): Document = _import_class('Document') EmbeddedDocument = _import_class('EmbeddedDocument') choice_list = self.choices if isinstance(next(iter(choice_list)), (list, tuple)): # next(iter) is useful for sets choice_list = [k for k, _ in choice_list] # Choices which are other types of Documents if isinstance(value, (Document, EmbeddedDocument)): if not any(isinstance(value, c) for c in choice_list): self.error( 'Value must be an instance of %s' % ( six.text_type(choice_list) ) ) # Choices which are types other than Documents elif value not in choice_list: self.error('Value must be one of %s' % six.text_type(choice_list))
def _clear_changed_fields(self): """Using _get_changed_fields iterate and remove any fields that are marked as changed. """ ReferenceField = _import_class("ReferenceField") GenericReferenceField = _import_class("GenericReferenceField") for changed in self._get_changed_fields(): parts = changed.split(".") data = self for part in parts: if isinstance(data, list): try: data = data[int(part)] except IndexError: data = None elif isinstance(data, dict): data = data.get(part, None) else: field_name = data._reverse_db_field_map.get(part, part) data = getattr(data, field_name, None) if not isinstance(data, LazyReference) and hasattr( data, "_changed_fields" ): if getattr(data, "_is_document", False): continue data._changed_fields = [] elif isinstance(data, (list, tuple, dict)): if hasattr(data, "field") and isinstance( data.field, (ReferenceField, GenericReferenceField) ): continue BaseDocument._nestable_types_clear_changed_fields(data) self._changed_fields = []
def _clear_changed_fields(self): self._changed_fields = [] EmbeddedDocumentField = _import_class("EmbeddedDocumentField") for field_name, field in self._fields.iteritems(): if (isinstance(field, ComplexBaseField) and isinstance(field.field, EmbeddedDocumentField)): field_value = getattr(self, field_name, None) if field_value: for idx in (field_value if isinstance(field_value, dict) else xrange(len(field_value))): field_value[idx]._clear_changed_fields() elif isinstance(field, EmbeddedDocumentField): field_value = getattr(self, field_name, None) if field_value: field_value._clear_changed_fields()
def __getitem__(self, key): value = super().__getitem__(key) EmbeddedDocument = _import_class("EmbeddedDocument") if isinstance(value, EmbeddedDocument) and value._instance is None: value._instance = self._instance elif isinstance(value, dict) and not isinstance(value, BaseDict): value = BaseDict(value, None, f"{self._name}.{key}") super().__setitem__(key, value) value._instance = self._instance elif isinstance(value, list) and not isinstance(value, BaseList): value = BaseList(value, None, f"{self._name}.{key}") super().__setitem__(key, value) value._instance = self._instance return value
def __getitem__(self, key): value = super(BaseDict, self).__getitem__(key) EmbeddedDocument = _import_class('EmbeddedDocument') if isinstance(value, EmbeddedDocument) and value._instance is None: value._instance = self._instance elif isinstance(value, dict) and not isinstance(value, BaseDict): value = BaseDict(value, None, '%s.%s' % (self._name, key)) super(BaseDict, self).__setitem__(key, value) value._instance = self._instance elif isinstance(value, list) and not isinstance(value, BaseList): value = BaseList(value, None, '%s.%s' % (self._name, key)) super(BaseDict, self).__setitem__(key, value) value._instance = self._instance return value
def patch_mongoengine_field(field_name): """ patch mongoengine.[field_name] for comparison support becouse it's required in django.forms.models.fields_for_model importing using mongoengine internal import cache """ from mongoengine import common field = common._import_class(field_name) for k in ["__eq__", "__lt__", "__hash__", "attname"]: if k not in field.__dict__: setattr(field, k, djangoflavor.DjangoField.__dict__[k]) # set auto_created False for check in django db model when delete if field_name == "ObjectIdField": setattr(field, "auto_created", False)
def __setattr__(self, name, value): # Handle dynamic data only if an initialised dynamic document if self._dynamic and not self._dynamic_lock: if not hasattr(self, name) and not name.startswith("_"): DynamicField = _import_class("DynamicField") field = DynamicField(db_field=name, null=True) field.name = name self._dynamic_fields[name] = field self._fields_ordered += (name,) if not name.startswith("_"): value = self.__expand_dynamic_values(name, value) # Handle marking data as changed if name in self._dynamic_fields: self._data[name] = value if hasattr(self, "_changed_fields"): self._mark_as_changed(name) try: self__created = self._created except AttributeError: self__created = True if ( self._is_document and not self__created and name in self._meta.get("shard_key", tuple()) and self._data.get(name) != value ): msg = "Shard Keys are immutable. Tried to update %s" % name raise OperationError(msg) try: self__initialised = self._initialised except AttributeError: self__initialised = False # Check if the user has created a new instance of a class if ( self._is_document and self__initialised and self__created and name == self._meta.get("id_field") ): super().__setattr__("_created", False) super().__setattr__(name, value)
def _prepare_query_for_iterable(field, op, value): # We need a special check for BaseDocument, because - although it's iterable - using # it as such in the context of this method is most definitely a mistake. BaseDocument = _import_class('BaseDocument') if isinstance(value, BaseDocument): raise TypeError("When using the `in`, `nin`, `all`, or " "`near`-operators you can\'t use a " "`Document`, you must wrap your object " "in a list (object -> [object]).") if not hasattr(value, '__iter__'): raise TypeError("The `in`, `nin`, `all`, or " "`near`-operators must be applied to an " "iterable (e.g. a list).") return [field.prepare_query_value(op, v) for v in value]
def to_python(self, value): """Convert a MongoDB-compatible type to a Python type.""" if isinstance(value, six.string_types): return value if hasattr(value, 'to_python'): return value.to_python() is_list = False if not hasattr(value, 'items'): try: is_list = True value = {k: v for k, v in enumerate(value)} except TypeError: # Not iterable return the value return value if self.field: self.field._auto_dereference = self._auto_dereference value_dict = { key: self.field.to_python(item) for key, item in list(value.items()) } else: Document = _import_class('Document') value_dict = {} for k, v in list(value.items()): if isinstance(v, Document): # We need the id from the saved object to create the DBRef if v.pk is None: self.error('You can only reference documents once they' ' have been saved to the database') collection = v._get_collection_name() value_dict[k] = DBRef(collection, v.pk) elif hasattr(v, 'to_python'): value_dict[k] = v.to_python() else: value_dict[k] = self.to_python(v) if is_list: # Convert back to a list return [ v for _, v in sorted(list(value_dict.items()), key=operator.itemgetter(0)) ] return value_dict
def __get__(self, instance, owner): """Descriptor for retrieving a value from a field in a document. Do any necessary conversion between Python and MongoDB types. """ if instance is None: # Document class being used rather than a document object return self # Get value from document instance if available, if not use default value = instance._data.get(self.name) if value is None: value = self.default # Allow callable default values if callable(value): value = value() EmbeddedDocument = _import_class('EmbeddedDocument') if isinstance(value, EmbeddedDocument) and value._instance is None: value._instance = weakref.proxy(instance) return value
def _get_changed_fields(self): """Return a list of all fields that have explicitly been changed.""" EmbeddedDocument = _import_class("EmbeddedDocument") LazyReferenceField = _import_class("LazyReferenceField") ReferenceField = _import_class("ReferenceField") GenericLazyReferenceField = _import_class("GenericLazyReferenceField") GenericReferenceField = _import_class("GenericReferenceField") SortedListField = _import_class("SortedListField") changed_fields = [] changed_fields += getattr(self, "_changed_fields", []) for field_name in self._fields_ordered: db_field_name = self._db_field_map.get(field_name, field_name) key = "%s." % db_field_name data = self._data.get(field_name, None) field = self._fields.get(field_name) if db_field_name in changed_fields: # Whole field already marked as changed, no need to go further continue if isinstance(field, ReferenceField): # Don't follow referenced documents continue if isinstance(data, EmbeddedDocument): # Find all embedded fields that have been changed changed = data._get_changed_fields() changed_fields += [f"{key}{k}" for k in changed if k] elif isinstance(data, (list, tuple, dict)): if hasattr(field, "field") and isinstance( field.field, ( LazyReferenceField, ReferenceField, GenericLazyReferenceField, GenericReferenceField, ), ): continue elif isinstance(field, SortedListField) and field._ordering: # if ordering is affected whole list is changed if any(field._ordering in d._changed_fields for d in data): changed_fields.append(db_field_name) continue self._nestable_types_changed_fields(changed_fields, key, data) return changed_fields
def _nestable_types_clear_changed_fields(data): """Inspect nested data for changed fields :param data: data to inspect for changes """ Document = _import_class("Document") # Loop list / dict fields as they contain documents # Determine the iterator to use if not hasattr(data, "items"): iterator = enumerate(data) else: iterator = data.items() for index_or_key, value in iterator: if hasattr(value, "_get_changed_fields") and not isinstance( value, Document): # don't follow references value._clear_changed_fields() elif isinstance(value, (list, tuple, dict)): BaseDocument._nestable_types_clear_changed_fields(value)
def __getitem__(self, key): value = super(BaseList, self).__getitem__(key) if isinstance(key, slice): # When receiving a slice operator, we don't convert the structure and bind # to parent's instance. This is buggy for now but would require more work to be handled properly return value EmbeddedDocument = _import_class('EmbeddedDocument') if isinstance(value, EmbeddedDocument) and value._instance is None: value._instance = self._instance elif isinstance(value, dict) and not isinstance(value, BaseDict): # Replace dict by BaseDict value = BaseDict(value, None, '%s.%s' % (self._name, key)) super(BaseList, self).__setitem__(key, value) value._instance = self._instance elif isinstance(value, list) and not isinstance(value, BaseList): # Replace list by BaseList value = BaseList(value, None, '%s.%s' % (self._name, key)) super(BaseList, self).__setitem__(key, value) value._instance = self._instance return value
def to_mongo(self): """Return as SON data ready for use with MongoDB. """ data = SON() data["_id"] = None data['_cls'] = self._class_name for field_name in self: value = self._data.get(field_name, None) field = self._fields.get(field_name) if field is None and self._dynamic: field = self._dynamic_fields.get(field_name) if value is not None: value = field.to_mongo(value) # Handle self generating fields if value is None and field._auto_gen: value = field.generate() self._data[field_name] = value if value is not None: data[field.db_field] = value # If "_id" has not been set, then try and set it Document = _import_class("Document") if isinstance(self, Document): if data["_id"] is None: data["_id"] = self._data.get("id", None) if data['_id'] is None: data.pop('_id') # Only add _cls if allow_inheritance is True if (not hasattr(self, '_meta') or not self._meta.get('allow_inheritance', ALLOW_INHERITANCE)): data.pop('_cls') return data
def __expand_dynamic_values(self, name, value): """expand any dynamic values to their correct types / values""" if not isinstance(value, (dict, list, tuple)): return value EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField') is_list = False if not hasattr(value, 'items'): is_list = True value = dict([(k, v) for k, v in enumerate(value)]) if not is_list and '_cls' in value: cls = get_document(value['_cls']) return cls(**value) data = {} for k, v in value.items(): key = name if is_list else k data[k] = self.__expand_dynamic_values(key, v) if is_list: # Convert back to a list data_items = sorted(data.items(), key=operator.itemgetter(0)) value = [v for k, v in data_items] else: value = data # Convert lists / values so we can watch for any changes on them if (isinstance(value, (list, tuple)) and not isinstance(value, BaseList)): if issubclass(type(self), EmbeddedDocumentListField): value = EmbeddedDocumentList(value, self, name) else: value = BaseList(value, self, name) elif isinstance(value, dict) and not isinstance(value, BaseDict): value = BaseDict(value, self, name) return value
def _geo_indices(cls, inspected=None, parent_field=None): inspected = inspected or [] geo_indices = [] inspected.append(cls) geo_field_type_names = ( "EmbeddedDocumentField", "GeoPointField", "PointField", "LineStringField", "PolygonField", ) geo_field_types = tuple( [_import_class(field) for field in geo_field_type_names] ) for field in cls._fields.values(): if not isinstance(field, geo_field_types): continue if hasattr(field, "document_type"): field_cls = field.document_type if field_cls in inspected: continue if hasattr(field_cls, "_geo_indices"): geo_indices += field_cls._geo_indices( inspected, parent_field=field.db_field ) elif field._geo_index: field_name = field.db_field if parent_field: field_name = "{}.{}".format(parent_field, field_name) geo_indices.append({"fields": [(field_name, field._geo_index)]}) return geo_indices
def to_mongo(self, value, use_db_field=True, fields=None): """Convert a Python type to a MongoDB-compatible type.""" Document = _import_class("Document") EmbeddedDocument = _import_class("EmbeddedDocument") GenericReferenceField = _import_class("GenericReferenceField") if isinstance(value, str): return value if hasattr(value, "to_mongo"): if isinstance(value, Document): return GenericReferenceField().to_mongo(value) cls = value.__class__ val = value.to_mongo(use_db_field, fields) # If it's a document that is not inherited add _cls if isinstance(value, EmbeddedDocument): val["_cls"] = cls.__name__ return val is_list = False if not hasattr(value, "items"): try: is_list = True value = {k: v for k, v in enumerate(value)} except TypeError: # Not iterable return the value return value if self.field: value_dict = { key: self.field._to_mongo_safe_call(item, use_db_field, fields) for key, item in value.items() } else: value_dict = {} for k, v in value.items(): if isinstance(v, Document): # We need the id from the saved object to create the DBRef if v.pk is None: self.error("You can only reference documents once they" " have been saved to the database") # If its a document that is not inheritable it won't have # any _cls data so make it a generic reference allows # us to dereference meta = getattr(v, "_meta", {}) allow_inheritance = meta.get("allow_inheritance") if not allow_inheritance and not self.field: value_dict[k] = GenericReferenceField().to_mongo(v) else: collection = v._get_collection_name() value_dict[k] = DBRef(collection, v.pk) elif hasattr(v, "to_mongo"): cls = v.__class__ val = v.to_mongo(use_db_field, fields) # If it's a document that is not inherited add _cls if isinstance(v, (Document, EmbeddedDocument)): val["_cls"] = cls.__name__ value_dict[k] = val else: value_dict[k] = self.to_mongo(v, use_db_field, fields) if is_list: # Convert back to a list return [ v for _, v in sorted(value_dict.items(), key=operator.itemgetter(0)) ] return value_dict
def __init__(self, *args, **values): """ Initialise a document or embedded document :param __auto_convert: Try and will cast python objects to Object types :param values: A dictionary of values for the document """ if args: # Combine positional arguments with named arguments. # We only want named arguments. field = iter(self._fields_ordered) for value in args: name = next(field) if name in values: raise TypeError("Multiple values for keyword argument '" + name + "'") values[name] = value __auto_convert = values.pop("__auto_convert", True) signals.pre_init.send(self.__class__, document=self, values=values) self._data = {} # Assign default values to instance for key, field in self._fields.iteritems(): if self._db_field_map.get(key, key) in values: continue value = getattr(self, key, None) setattr(self, key, value) # Set passed values after initialisation if self._dynamic: self._dynamic_fields = {} dynamic_data = {} for key, value in values.iteritems(): if key in self._fields or key == '_id': setattr(self, key, value) elif self._dynamic: dynamic_data[key] = value else: FileField = _import_class('FileField') for key, value in values.iteritems(): if key == '__auto_convert': continue key = self._reverse_db_field_map.get(key, key) if key in self._fields or key in ('id', 'pk', '_cls'): if __auto_convert and value is not None: field = self._fields.get(key) if field and not isinstance(field, FileField): value = field.to_python(value) setattr(self, key, value) else: self._data[key] = value # Set any get_fieldname_display methods self.__set_field_display() if self._dynamic: self._dynamic_lock = False for key, value in dynamic_data.iteritems(): setattr(self, key, value) # Flag initialised self._initialised = True signals.post_init.send(self.__class__, document=self)
def _lookup_field(cls, parts): """Given the path to a given field, return a list containing the Field object associated with that field and all of its parent Field objects. Args: parts (str, list, or tuple) - path to the field. Should be a string for simple fields existing on this document or a list of strings for a field that exists deeper in embedded documents. Returns: A list of Field instances for fields that were found or strings for sub-fields that weren't. Example: >>> user._lookup_field('name') [<mongoengine.fields.StringField at 0x1119bff50>] >>> user._lookup_field('roles') [<mongoengine.fields.EmbeddedDocumentListField at 0x1119ec250>] >>> user._lookup_field(['roles', 'role']) [<mongoengine.fields.EmbeddedDocumentListField at 0x1119ec250>, <mongoengine.fields.StringField at 0x1119ec050>] >>> user._lookup_field('doesnt_exist') raises LookUpError >>> user._lookup_field(['roles', 'doesnt_exist']) [<mongoengine.fields.EmbeddedDocumentListField at 0x1119ec250>, 'doesnt_exist'] """ # TODO this method is WAY too complicated. Simplify it. # TODO don't think returning a string for embedded non-existent fields is desired ListField = _import_class('ListField') DynamicField = _import_class('DynamicField') if not isinstance(parts, (list, tuple)): parts = [parts] fields = [] field = None for field_name in parts: # Handle ListField indexing: if field_name.isdigit() and isinstance(field, ListField): fields.append(field_name) continue # Look up first field from the document if field is None: if field_name == 'pk': # Deal with "primary key" alias field_name = cls._meta['id_field'] if field_name in cls._fields: field = cls._fields[field_name] elif cls._dynamic: field = DynamicField(db_field=field_name) elif cls._meta.get('allow_inheritance') or cls._meta.get( 'abstract', False): # 744: in case the field is defined in a subclass for subcls in cls.__subclasses__(): try: field = subcls._lookup_field([field_name])[0] except LookUpError: continue if field is not None: break else: raise LookUpError('Cannot resolve field "%s"' % field_name) else: raise LookUpError('Cannot resolve field "%s"' % field_name) else: ReferenceField = _import_class('ReferenceField') GenericReferenceField = _import_class('GenericReferenceField') # If previous field was a reference, throw an error (we # cannot look up fields that are on references). if isinstance(field, (ReferenceField, GenericReferenceField)): raise LookUpError('Cannot perform join in mongoDB: %s' % '__'.join(parts)) # If the parent field has a "field" attribute which has a # lookup_member method, call it to find the field # corresponding to this iteration. if hasattr(getattr(field, 'field', None), 'lookup_member'): new_field = field.field.lookup_member(field_name) # If the parent field is a DynamicField or if it's part of # a DynamicDocument, mark current field as a DynamicField # with db_name equal to the field name. elif cls._dynamic and (isinstance(field, DynamicField) or getattr( getattr(field, 'document_type', None), '_dynamic', None)): new_field = DynamicField(db_field=field_name) # Else, try to use the parent field's lookup_member method # to find the subfield. elif hasattr(field, 'lookup_member'): new_field = field.lookup_member(field_name) # Raise a LookUpError if all the other conditions failed. else: raise LookUpError('Cannot resolve subfield or operator {} ' 'on the field {}'.format( field_name, field.name)) # If current field still wasn't found and the parent field # is a ComplexBaseField, add the name current field name and # move on. if not new_field and isinstance(field, ComplexBaseField): fields.append(field_name) continue elif not new_field: raise LookUpError('Cannot resolve field "%s"' % field_name) field = new_field # update field to the new field type fields.append(field) return fields
def query(_doc_cls=None, **kwargs): """Transform a query from Django-style format to Mongo format.""" mongo_query = {} merge_query = defaultdict(list) for key, value in sorted(kwargs.items()): if key == '__raw__': mongo_query.update(value) continue parts = key.rsplit('__') indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()] parts = [part for part in parts if not part.isdigit()] # Check for an operator and transform to mongo-style if there is op = None if len(parts) > 1 and parts[-1] in MATCH_OPERATORS: op = parts.pop() # Allow to escape operator-like field name by __ if len(parts) > 1 and parts[-1] == '': parts.pop() negate = False if len(parts) > 1 and parts[-1] == 'not': parts.pop() negate = True if _doc_cls: # Switch field names to proper names [set in Field(name='foo')] try: fields = _doc_cls._lookup_field(parts) except Exception as e: raise InvalidQueryError(e) parts = [] CachedReferenceField = _import_class('CachedReferenceField') GenericReferenceField = _import_class('GenericReferenceField') cleaned_fields = [] for field in fields: append_field = True if isinstance(field, six.string_types): parts.append(field) append_field = False # is last and CachedReferenceField elif isinstance(field, CachedReferenceField) and fields[-1] == field: parts.append('%s._id' % field.db_field) else: parts.append(field.db_field) if append_field: cleaned_fields.append(field) # Convert value to proper value field = cleaned_fields[-1] singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not'] singular_ops += STRING_OPERATORS if op in singular_ops: if isinstance(field, six.string_types): if (op in STRING_OPERATORS and isinstance(value, six.string_types)): StringField = _import_class('StringField') value = StringField.prepare_query_value(op, value) else: value = field else: value = field.prepare_query_value(op, value) if isinstance(field, CachedReferenceField) and value: value = value['_id'] elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict): # Raise an error if the in/nin/all/near param is not iterable. value = _prepare_query_for_iterable(field, op, value) # If we're querying a GenericReferenceField, we need to alter the # key depending on the value: # * If the value is a DBRef, the key should be "field_name._ref". # * If the value is an ObjectId, the key should be "field_name._ref.$id". if isinstance(field, GenericReferenceField): if isinstance(value, DBRef): parts[-1] += '._ref' elif isinstance(value, ObjectId): parts[-1] += '._ref.$id' # if op and op not in COMPARISON_OPERATORS: if op: if op in GEO_OPERATORS: value = _geo_operator(field, op, value) elif op in ('match', 'elemMatch'): ListField = _import_class('ListField') EmbeddedDocumentField = _import_class('EmbeddedDocumentField') if ( isinstance(value, dict) and isinstance(field, ListField) and isinstance(field.field, EmbeddedDocumentField) ): value = query(field.field.document_type, **value) else: value = field.prepare_query_value(op, value) value = {'$elemMatch': value} elif op in CUSTOM_OPERATORS: NotImplementedError('Custom method "%s" has not ' 'been implemented' % op) elif op not in STRING_OPERATORS: value = {'$' + op: value} if negate: value = {'$not': value} for i, part in indices: parts.insert(i, part) key = '.'.join(parts) if op is None or key not in mongo_query: mongo_query[key] = value elif key in mongo_query: if isinstance(mongo_query[key], dict): mongo_query[key].update(value) # $max/minDistance needs to come last - convert to SON value_dict = mongo_query[key] if ('$maxDistance' in value_dict or '$minDistance' in value_dict) and \ ('$near' in value_dict or '$nearSphere' in value_dict): value_son = SON() for k, v in value_dict.iteritems(): if k == '$maxDistance' or k == '$minDistance': continue value_son[k] = v # Required for MongoDB >= 2.6, may fail when combining # PyMongo 3+ and MongoDB < 2.6 near_embedded = False for near_op in ('$near', '$nearSphere'): if isinstance(value_dict.get(near_op), dict) and ( IS_PYMONGO_3 or get_connection().max_wire_version > 1): value_son[near_op] = SON(value_son[near_op]) if '$maxDistance' in value_dict: value_son[near_op][ '$maxDistance'] = value_dict['$maxDistance'] if '$minDistance' in value_dict: value_son[near_op][ '$minDistance'] = value_dict['$minDistance'] near_embedded = True if not near_embedded: if '$maxDistance' in value_dict: value_son['$maxDistance'] = value_dict['$maxDistance'] if '$minDistance' in value_dict: value_son['$minDistance'] = value_dict['$minDistance'] mongo_query[key] = value_son else: # Store for manually merging later merge_query[key].append(value) # The queryset has been filter in such a way we must manually merge for k, v in merge_query.items(): merge_query[k].append(mongo_query[k]) del mongo_query[k] if isinstance(v, list): value = [{k: val} for val in v] if '$and' in mongo_query.keys(): mongo_query['$and'].extend(value) else: mongo_query['$and'] = value return mongo_query
def update(_doc_cls=None, **update): """Transform an update spec from Django-style format to Mongo format. """ mongo_update = {} for key, value in update.items(): if key == '__raw__': mongo_update.update(value) continue parts = key.split('__') # if there is no operator, default to 'set' if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS: parts.insert(0, 'set') # Check for an operator and transform to mongo-style if there is op = None if parts[0] in UPDATE_OPERATORS: op = parts.pop(0) # Convert Pythonic names to Mongo equivalents if op in ('push_all', 'pull_all'): op = op.replace('_all', 'All') elif op == 'dec': # Support decrement by flipping a positive value's sign # and using 'inc' op = 'inc' value = -value elif op == 'add_to_set': op = 'addToSet' elif op == 'set_on_insert': op = 'setOnInsert' match = None if parts[-1] in COMPARISON_OPERATORS: match = parts.pop() # Allow to escape operator-like field name by __ if len(parts) > 1 and parts[-1] == '': parts.pop() if _doc_cls: # Switch field names to proper names [set in Field(name='foo')] try: fields = _doc_cls._lookup_field(parts) except Exception as e: raise InvalidQueryError(e) parts = [] cleaned_fields = [] appended_sub_field = False for field in fields: append_field = True if isinstance(field, six.string_types): # Convert the S operator to $ if field == 'S': field = '$' parts.append(field) append_field = False else: parts.append(field.db_field) if append_field: appended_sub_field = False cleaned_fields.append(field) if hasattr(field, 'field'): cleaned_fields.append(field.field) appended_sub_field = True # Convert value to proper value if appended_sub_field: field = cleaned_fields[-2] else: field = cleaned_fields[-1] GeoJsonBaseField = _import_class('GeoJsonBaseField') if isinstance(field, GeoJsonBaseField): value = field.to_mongo(value) if op == 'pull': if field.required or value is not None: if match == 'in' and not isinstance(value, dict): value = _prepare_query_for_iterable(field, op, value) else: value = field.prepare_query_value(op, value) elif op == 'push' and isinstance(value, (list, tuple, set)): value = [field.prepare_query_value(op, v) for v in value] elif op in (None, 'set', 'push'): if field.required or value is not None: value = field.prepare_query_value(op, value) elif op in ('pushAll', 'pullAll'): value = [field.prepare_query_value(op, v) for v in value] elif op in ('addToSet', 'setOnInsert'): if isinstance(value, (list, tuple, set)): value = [field.prepare_query_value(op, v) for v in value] elif field.required or value is not None: value = field.prepare_query_value(op, value) elif op == 'unset': value = 1 if match: match = '$' + match value = {match: value} key = '.'.join(parts) if not op: raise InvalidQueryError('Updates must supply an operation ' 'eg: set__FIELD=value') if 'pull' in op and '.' in key: # Dot operators don't work on pull operations # unless they point to a list field # Otherwise it uses nested dict syntax if op == 'pullAll': raise InvalidQueryError('pullAll operations only support ' 'a single field depth') # Look for the last list field and use dot notation until there field_classes = [c.__class__ for c in cleaned_fields] field_classes.reverse() ListField = _import_class('ListField') EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField') if ListField in field_classes or EmbeddedDocumentListField in field_classes: # Join all fields via dot notation to the last ListField or EmbeddedDocumentListField # Then process as normal if ListField in field_classes: _check_field = ListField else: _check_field = EmbeddedDocumentListField last_listField = len( cleaned_fields) - field_classes.index(_check_field) key = '.'.join(parts[:last_listField]) parts = parts[last_listField:] parts.insert(0, key) parts.reverse() for key in parts: value = {key: value} elif op == 'addToSet' and isinstance(value, list): value = {key: {'$each': value}} elif op == 'push': if parts[-1].isdigit(): key = parts[0] position = int(parts[-1]) # $position expects an iterable. If pushing a single value, # wrap it in a list. if not isinstance(value, (set, tuple, list)): value = [value] value = {key: {'$each': value, '$position': position}} else: value = {key: value} else: value = {key: value} key = '$' + op if key not in mongo_update: mongo_update[key] = value elif key in mongo_update and isinstance(mongo_update[key], dict): mongo_update[key].update(value) return mongo_update
def __new__(mcs, name, bases, attrs): flattened_bases = mcs._get_bases(bases) super_new = super(DocumentMetaclass, mcs).__new__ # If a base class just call super metaclass = attrs.get('my_metaclass') if metaclass and issubclass(metaclass, DocumentMetaclass): return super_new(mcs, name, bases, attrs) attrs['_is_document'] = attrs.get('_is_document', False) attrs['_cached_reference_fields'] = [] # EmbeddedDocuments could have meta data for inheritance if 'meta' in attrs: attrs['_meta'] = attrs.pop('meta') # EmbeddedDocuments should inherit meta data if '_meta' not in attrs: meta = MetaDict() for base in flattened_bases[::-1]: # Add any mixin metadata from plain objects if hasattr(base, 'meta'): meta.merge(base.meta) elif hasattr(base, '_meta'): meta.merge(base._meta) attrs['_meta'] = meta attrs['_meta'][ 'abstract'] = False # 789: EmbeddedDocument shouldn't inherit abstract # If allow_inheritance is True, add a "_cls" string field to the attrs if attrs['_meta'].get('allow_inheritance'): StringField = _import_class('StringField') attrs['_cls'] = StringField() # Handle document Fields # Merge all fields from subclasses doc_fields = {} for base in flattened_bases[::-1]: if hasattr(base, '_fields'): doc_fields.update(base._fields) # Standard object mixin - merge in any Fields if not hasattr(base, '_meta'): base_fields = {} for attr_name, attr_value in iteritems(base.__dict__): if not isinstance(attr_value, BaseField): continue attr_value.name = attr_name if not attr_value.db_field: attr_value.db_field = attr_name base_fields[attr_name] = attr_value doc_fields.update(base_fields) # Discover any document fields field_names = {} for attr_name, attr_value in iteritems(attrs): if not isinstance(attr_value, BaseField): continue attr_value.name = attr_name if not attr_value.db_field: attr_value.db_field = attr_name doc_fields[attr_name] = attr_value # Count names to ensure no db_field redefinitions field_names[attr_value.db_field] = field_names.get( attr_value.db_field, 0) + 1 # Ensure no duplicate db_fields duplicate_db_fields = [ k for k, v in list(field_names.items()) if v > 1 ] if duplicate_db_fields: msg = ('Multiple db_fields defined for: %s ' % ', '.join(duplicate_db_fields)) raise InvalidDocumentError(msg) # Set _fields and db_field maps attrs['_fields'] = doc_fields attrs['_db_field_map'] = { k: getattr(v, 'db_field', k) for k, v in list(doc_fields.items()) } attrs['_reverse_db_field_map'] = { v: k for k, v in list(attrs['_db_field_map'].items()) } attrs['_fields_ordered'] = tuple(i[1] for i in sorted( (v.creation_counter, v.name) for v in itervalues(doc_fields))) # # Set document hierarchy # superclasses = () class_name = [name] for base in flattened_bases: if (not getattr(base, '_is_base_cls', True) and not getattr(base, '_meta', {}).get('abstract', True)): # Collate hierarchy for _cls and _subclasses class_name.append(base.__name__) if hasattr(base, '_meta'): # Warn if allow_inheritance isn't set and prevent # inheritance of classes where inheritance is set to False allow_inheritance = base._meta.get('allow_inheritance') if not allow_inheritance and not base._meta.get('abstract'): raise ValueError( 'Document %s may not be subclassed. ' 'To enable inheritance, use the "allow_inheritance" meta attribute.' % base.__name__) # Get superclasses from last base superclass document_bases = [ b for b in flattened_bases if hasattr(b, '_class_name') ] if document_bases: superclasses = document_bases[0]._superclasses superclasses += (document_bases[0]._class_name, ) _cls = '.'.join(reversed(class_name)) attrs['_class_name'] = _cls attrs['_superclasses'] = superclasses attrs['_subclasses'] = (_cls, ) attrs['_types'] = attrs['_subclasses'] # TODO depreciate _types # Create the new_class new_class = super_new(mcs, name, bases, attrs) # Set _subclasses for base in document_bases: if _cls not in base._subclasses: base._subclasses += (_cls, ) base._types = base._subclasses # TODO depreciate _types (Document, EmbeddedDocument, DictField, CachedReferenceField) = mcs._import_classes() if issubclass(new_class, Document): new_class._collection = None # Add class to the _document_registry _document_registry[new_class._class_name] = new_class # In Python 2, User-defined methods objects have special read-only # attributes 'im_func' and 'im_self' which contain the function obj # and class instance object respectively. With Python 3 these special # attributes have been replaced by __func__ and __self__. The Blinker # module continues to use im_func and im_self, so the code below # copies __func__ into im_func and __self__ into im_self for # classmethod objects in Document derived classes. if six.PY3: for val in list(new_class.__dict__.values()): if isinstance(val, classmethod): f = val.__get__(new_class) if hasattr(f, '__func__') and not hasattr(f, 'im_func'): f.__dict__.update({'im_func': getattr(f, '__func__')}) if hasattr(f, '__self__') and not hasattr(f, 'im_self'): f.__dict__.update({'im_self': getattr(f, '__self__')}) # Handle delete rules for field in itervalues(new_class._fields): f = field if f.owner_document is None: f.owner_document = new_class delete_rule = getattr(f, 'reverse_delete_rule', DO_NOTHING) if isinstance(f, CachedReferenceField): if issubclass(new_class, EmbeddedDocument): raise InvalidDocumentError('CachedReferenceFields is not ' 'allowed in EmbeddedDocuments') if not f.document_type: raise InvalidDocumentError( 'Document is not available to sync') if f.auto_sync: f.start_listener() f.document_type._cached_reference_fields.append(f) if isinstance(f, ComplexBaseField) and hasattr(f, 'field'): delete_rule = getattr(f.field, 'reverse_delete_rule', DO_NOTHING) if isinstance(f, DictField) and delete_rule != DO_NOTHING: msg = ('Reverse delete rules are not supported ' 'for %s (field: %s)' % (field.__class__.__name__, field.name)) raise InvalidDocumentError(msg) f = field.field if delete_rule != DO_NOTHING: if issubclass(new_class, EmbeddedDocument): msg = ('Reverse delete rules are not supported for ' 'EmbeddedDocuments (field: %s)' % field.name) raise InvalidDocumentError(msg) f.document_type.register_delete_rule(new_class, field.name, delete_rule) if (field.name and hasattr(Document, field.name) and EmbeddedDocument not in new_class.mro()): msg = ('%s is a document method and not a valid ' 'field name' % field.name) raise InvalidDocumentError(msg) return new_class
def to_mongo(self, use_db_field=True, fields=None): """ Return as SON data ready for use with MongoDB. """ if not fields: fields = [] data = SON() data["_id"] = None data['_cls'] = self._class_name EmbeddedDocumentField = _import_class("EmbeddedDocumentField") # only root fields ['test1.a', 'test2'] => ['test1', 'test2'] root_fields = set([f.split('.')[0] for f in fields]) for field_name in self: if root_fields and field_name not in root_fields: continue value = self._data.get(field_name, None) field = self._fields.get(field_name) if field is None and self._dynamic: field = self._dynamic_fields.get(field_name) if value is not None: if isinstance(field, EmbeddedDocumentField): if fields: key = '%s.' % field_name embedded_fields = [ i.replace(key, '') for i in fields if i.startswith(key) ] else: embedded_fields = [] value = field.to_mongo(value, use_db_field=use_db_field, fields=embedded_fields) else: value = field.to_mongo(value) # Handle self generating fields if value is None and field._auto_gen: value = field.generate() self._data[field_name] = value if value is not None: if use_db_field: data[field.db_field] = value else: data[field.name] = value # If "_id" has not been set, then try and set it Document = _import_class("Document") if isinstance(self, Document): if data["_id"] is None: data["_id"] = self._data.get("id", None) if data['_id'] is None: data.pop('_id') # Only add _cls if allow_inheritance is True if (not hasattr(self, '_meta') or not self._meta.get('allow_inheritance', ALLOW_INHERITANCE)): data.pop('_cls') return data
def _import_classes(mcs): Document = _import_class('Document') EmbeddedDocument = _import_class('EmbeddedDocument') DictField = _import_class('DictField') CachedReferenceField = _import_class('CachedReferenceField') return Document, EmbeddedDocument, DictField, CachedReferenceField
def to_mongo(self, value): """Convert a Python type to a MongoDB-compatible type. """ Document = _import_class("Document") EmbeddedDocument = _import_class("EmbeddedDocument") GenericReferenceField = _import_class("GenericReferenceField") if isinstance(value, str): return value if hasattr(value, 'to_mongo'): if isinstance(value, Document): return GenericReferenceField().to_mongo(value) cls = value.__class__ val = value.to_mongo() # If we its a document thats not inherited add _cls if (isinstance(value, EmbeddedDocument)): val['_cls'] = cls.__name__ return val is_list = False if not hasattr(value, 'items'): try: is_list = True value = dict([(k, v) for k, v in enumerate(value)]) except TypeError: # Not iterable return the value return value if self.field: value_dict = dict([(key, self.field.to_mongo(item)) for key, item in value.items()]) else: value_dict = {} for k, v in value.items(): if isinstance(v, Document): # We need the id from the saved object to create the DBRef if v.pk is None: self.error('You can only reference documents once they' ' have been saved to the database') # If its a document that is not inheritable it won't have # any _cls data so make it a generic reference allows # us to dereference meta = getattr(v, '_meta', {}) allow_inheritance = ( meta.get('allow_inheritance', ALLOW_INHERITANCE) is True) if not allow_inheritance and not self.field: value_dict[k] = GenericReferenceField().to_mongo(v) else: collection = v._get_collection_name() value_dict[k] = DBRef(collection, v.pk) elif hasattr(v, 'to_mongo'): cls = v.__class__ val = v.to_mongo() # If we its a document thats not inherited add _cls if (isinstance(v, (Document, EmbeddedDocument))): val['_cls'] = cls.__name__ value_dict[k] = val else: value_dict[k] = self.to_mongo(v) if is_list: # Convert back to a list return [v for k, v in sorted(list(value_dict.items()), key=operator.itemgetter(0))] return value_dict
class BaseDocument(object): __slots__ = ('_changed_fields', '_initialised', '_created', '_data', '_dynamic_fields', '_auto_id_field', '_db_field_map', '__weakref__') _dynamic = False _dynamic_lock = True STRICT = False def __init__(self, *args, **values): """ Initialise a document or embedded document :param __auto_convert: Try and will cast python objects to Object types :param values: A dictionary of values for the document """ self._initialised = False self._created = True if args: # Combine positional arguments with named arguments. # We only want named arguments. field = iter(self._fields_ordered) # If its an automatic id field then skip to the first defined field if self._auto_id_field: next(field) for value in args: name = next(field) if name in values: raise TypeError("Multiple values for keyword argument '" + name + "'") values[name] = value __auto_convert = values.pop("__auto_convert", True) # 399: set default values only to fields loaded from DB __only_fields = set(values.pop("__only_fields", values)) _created = values.pop("_created", True) signals.pre_init.send(self.__class__, document=self, values=values) # Check if there are undefined fields supplied to the constructor, # if so raise an Exception. if not self._dynamic and (self._meta.get('strict', True) or _created): for var in values.keys(): if var not in self._fields.keys() + [ 'id', 'pk', '_cls', '_text_score' ]: msg = ( "The field '{0}' does not exist on the document '{1}'" ).format(var, self._class_name) raise FieldDoesNotExist(msg) if self.STRICT and not self._dynamic: self._data = StrictDict.create(allowed_keys=self._fields_ordered)() else: self._data = SemiStrictDict.create( allowed_keys=self._fields_ordered)() self._data = {} self._dynamic_fields = SON() # Assign default values to instance for key, field in self._fields.iteritems(): if self._db_field_map.get(key, key) in __only_fields: continue value = getattr(self, key, None) setattr(self, key, value) if "_cls" not in values: self._cls = self._class_name # Set passed values after initialisation if self._dynamic: dynamic_data = {} for key, value in values.iteritems(): if key in self._fields or key == '_id': setattr(self, key, value) elif self._dynamic: dynamic_data[key] = value else: FileField = _import_class('FileField') for key, value in values.iteritems(): if key == '__auto_convert': continue key = self._reverse_db_field_map.get(key, key) if key in self._fields or key in ('id', 'pk', '_cls'): if __auto_convert and value is not None: field = self._fields.get(key) if field and not isinstance(field, FileField): value = field.to_python(value) setattr(self, key, value) else: self._data[key] = value # Set any get_fieldname_display methods self.__set_field_display() if self._dynamic: self._dynamic_lock = False for key, value in dynamic_data.iteritems(): setattr(self, key, value) # Flag initialised self._initialised = True self._created = _created signals.post_init.send(self.__class__, document=self) def __delattr__(self, *args, **kwargs): """Handle deletions of fields""" field_name = args[0] if field_name in self._fields: default = self._fields[field_name].default if callable(default): default = default() setattr(self, field_name, default) else: super(BaseDocument, self).__delattr__(*args, **kwargs) def __setattr__(self, name, value): # Handle dynamic data only if an initialised dynamic document if self._dynamic and not self._dynamic_lock: if not hasattr(self, name) and not name.startswith('_'): DynamicField = _import_class("DynamicField") field = DynamicField(db_field=name) field.name = name self._dynamic_fields[name] = field self._fields_ordered += (name, ) if not name.startswith('_'): value = self.__expand_dynamic_values(name, value) # Handle marking data as changed if name in self._dynamic_fields: self._data[name] = value if hasattr(self, '_changed_fields'): self._mark_as_changed(name) try: self__created = self._created except AttributeError: self__created = True if (self._is_document and not self__created and name in self._meta.get('shard_key', tuple()) and self._data.get(name) != value): OperationError = _import_class('OperationError') msg = "Shard Keys are immutable. Tried to update %s" % name raise OperationError(msg) try: self__initialised = self._initialised except AttributeError: self__initialised = False # Check if the user has created a new instance of a class if (self._is_document and self__initialised and self__created and name == self._meta.get('id_field')): super(BaseDocument, self).__setattr__('_created', False) super(BaseDocument, self).__setattr__(name, value) def __getstate__(self): data = {} for k in ('_changed_fields', '_initialised', '_created', '_dynamic_fields', '_fields_ordered'): if hasattr(self, k): data[k] = getattr(self, k) data['_data'] = self.to_mongo() return data def __setstate__(self, data): if isinstance(data["_data"], SON): data["_data"] = self.__class__._from_son(data["_data"])._data for k in ('_changed_fields', '_initialised', '_created', '_data', '_dynamic_fields'): if k in data: setattr(self, k, data[k]) if '_fields_ordered' in data: if self._dynamic: setattr(self, '_fields_ordered', data['_fields_ordered']) else: _super_fields_ordered = type(self)._fields_ordered setattr(self, '_fields_ordered', _super_fields_ordered) dynamic_fields = data.get('_dynamic_fields') or SON() for k in dynamic_fields.keys(): setattr(self, k, data["_data"].get(k)) def __iter__(self): return iter(self._fields_ordered) def __getitem__(self, name): """Dictionary-style field access, return a field's value if present. """ try: if name in self._fields_ordered: return getattr(self, name) except AttributeError: pass raise KeyError(name) def __setitem__(self, name, value): """Dictionary-style field access, set a field's value. """ # Ensure that the field exists before settings its value if not self._dynamic and name not in self._fields: raise KeyError(name) return setattr(self, name, value) def __contains__(self, name): try: val = getattr(self, name) return val is not None except AttributeError: return False def __len__(self): return len(self._data) def __repr__(self): try: u = self.__str__() except (UnicodeEncodeError, UnicodeDecodeError): u = '[Bad Unicode data]' repr_type = str if u is None else type(u) return repr_type('<%s: %s>' % (self.__class__.__name__, u)) def __str__(self): if hasattr(self, '__unicode__'): if PY3: return self.__unicode__() else: return unicode(self).encode('utf-8') return txt_type('%s object' % self.__class__.__name__) def __eq__(self, other): if isinstance(other, self.__class__) and hasattr( other, 'id') and other.id is not None: return self.id == other.id if isinstance(other, DBRef): return self._get_collection_name( ) == other.collection and self.id == other.id if self.id is None: return self is other return False def __ne__(self, other): return not self.__eq__(other) def __hash__(self): if getattr(self, 'pk', None) is None: # For new object return super(BaseDocument, self).__hash__() else: return hash(self.pk) def clean(self): """ Hook for doing document level data cleaning before validation is run. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS. """ pass def get_text_score(self): """ Get text score from text query """ if '_text_score' not in self._data: raise InvalidDocumentError( 'This document is not originally built from a text query') return self._data['_text_score'] def to_mongo(self, use_db_field=True, fields=None): """ Return as SON data ready for use with MongoDB. """ if not fields: fields = [] data = SON() data["_id"] = None data['_cls'] = self._class_name EmbeddedDocumentField = _import_class("EmbeddedDocumentField") # only root fields ['test1.a', 'test2'] => ['test1', 'test2'] root_fields = set([f.split('.')[0] for f in fields]) for field_name in self: if root_fields and field_name not in root_fields: continue value = self._data.get(field_name, None) field = self._fields.get(field_name) if field is None and self._dynamic: field = self._dynamic_fields.get(field_name) if value is not None: if isinstance(field, EmbeddedDocumentField): if fields: key = '%s.' % field_name embedded_fields = [ i.replace(key, '') for i in fields if i.startswith(key) ] else: embedded_fields = [] value = field.to_mongo(value, use_db_field=use_db_field, fields=embedded_fields) else: value = field.to_mongo(value) # Handle self generating fields if value is None and field._auto_gen: value = field.generate() self._data[field_name] = value if value is not None: if use_db_field: data[field.db_field] = value else: data[field.name] = value # If "_id" has not been set, then try and set it Document = _import_class("Document") if isinstance(self, Document): if data["_id"] is None: data["_id"] = self._data.get("id", None) if data['_id'] is None: data.pop('_id') # Only add _cls if allow_inheritance is True if (not hasattr(self, '_meta') or not self._meta.get('allow_inheritance', ALLOW_INHERITANCE)): data.pop('_cls') return data def validate(self, clean=True): """Ensure that all fields' values are valid and that required fields are present. """ # Ensure that each field is matched to a valid value errors = {} if clean: try: self.clean() except ValidationError, error: errors[NON_FIELD_ERRORS] = error # Get a list of tuples of field names and their current values fields = [(self._fields.get(name, self._dynamic_fields.get(name)), self._data.get(name)) for name in self._fields_ordered] EmbeddedDocumentField = _import_class("EmbeddedDocumentField") GenericEmbeddedDocumentField = _import_class( "GenericEmbeddedDocumentField") for field, value in fields: if value is not None: try: if isinstance( field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)): field._validate(value, clean=clean) else: field._validate(value) except ValidationError, error: errors[field.name] = error.errors or error except (ValueError, AttributeError, AssertionError), error: errors[field.name] = error
def __init__(self, *args, **values): """ Initialise a document or an embedded document. :param values: A dictionary of keys and values for the document. It may contain additional reserved keywords, e.g. "__auto_convert". :param __auto_convert: If True, supplied values will be converted to Python-type values via each field's `to_python` method. :param __only_fields: A set of fields that have been loaded for this document. Empty if all fields have been loaded. :param _created: Indicates whether this is a brand new document or whether it's already been persisted before. Defaults to true. """ self._initialised = False self._created = True if args: raise TypeError( "Instantiating a document with positional arguments is not " "supported. Please use `field_name=value` keyword arguments.") __auto_convert = values.pop("__auto_convert", True) __only_fields = set(values.pop("__only_fields", values)) _created = values.pop("_created", True) signals.pre_init.send(self.__class__, document=self, values=values) # Check if there are undefined fields supplied to the constructor, # if so raise an Exception. if not self._dynamic and (self._meta.get("strict", True) or _created): _undefined_fields = set(values.keys()) - set( list(self._fields.keys()) + ["id", "pk", "_cls", "_text_score"]) if _undefined_fields: msg = ('The fields "{}" do not exist on the document "{}"' ).format(_undefined_fields, self._class_name) raise FieldDoesNotExist(msg) if self.STRICT and not self._dynamic: self._data = StrictDict.create(allowed_keys=self._fields_ordered)() else: self._data = {} self._dynamic_fields = SON() # Assign default values to the instance. # We set default values only for fields loaded from DB. See # https://github.com/mongoengine/mongoengine/issues/399 for more info. for key, field in self._fields.items(): if self._db_field_map.get(key, key) in __only_fields: continue value = getattr(self, key, None) setattr(self, key, value) if "_cls" not in values: self._cls = self._class_name # Set passed values after initialisation if self._dynamic: dynamic_data = {} for key, value in values.items(): if key in self._fields or key == "_id": setattr(self, key, value) else: dynamic_data[key] = value else: FileField = _import_class("FileField") for key, value in values.items(): key = self._reverse_db_field_map.get(key, key) if key in self._fields or key in ("id", "pk", "_cls"): if __auto_convert and value is not None: field = self._fields.get(key) if field and not isinstance(field, FileField): value = field.to_python(value) setattr(self, key, value) else: self._data[key] = value # Set any get_<field>_display methods self.__set_field_display() if self._dynamic: self._dynamic_lock = False for key, value in dynamic_data.items(): setattr(self, key, value) # Flag initialised self._initialised = True self._created = _created signals.post_init.send(self.__class__, document=self)
def __init__(self, *args, **values): """ Initialise a document or embedded document :param __auto_convert: Try and will cast python objects to Object types :param values: A dictionary of values for the document """ self._initialised = False self._created = True if args: # Combine positional arguments with named arguments. # We only want named arguments. field = iter(self._fields_ordered) # If its an automatic id field then skip to the first defined field if self._auto_id_field: next(field) for value in args: name = next(field) if name in values: raise TypeError("Multiple values for keyword argument '" + name + "'") values[name] = value __auto_convert = values.pop("__auto_convert", True) # 399: set default values only to fields loaded from DB __only_fields = set(values.pop("__only_fields", values)) _created = values.pop("_created", True) signals.pre_init.send(self.__class__, document=self, values=values) # Check if there are undefined fields supplied to the constructor, # if so raise an Exception. if not self._dynamic and (self._meta.get('strict', True) or _created): for var in values.keys(): if var not in self._fields.keys() + [ 'id', 'pk', '_cls', '_text_score' ]: msg = ( "The field '{0}' does not exist on the document '{1}'" ).format(var, self._class_name) raise FieldDoesNotExist(msg) if self.STRICT and not self._dynamic: self._data = StrictDict.create(allowed_keys=self._fields_ordered)() else: self._data = SemiStrictDict.create( allowed_keys=self._fields_ordered)() self._data = {} self._dynamic_fields = SON() # Assign default values to instance for key, field in self._fields.iteritems(): if self._db_field_map.get(key, key) in __only_fields: continue value = getattr(self, key, None) setattr(self, key, value) if "_cls" not in values: self._cls = self._class_name # Set passed values after initialisation if self._dynamic: dynamic_data = {} for key, value in values.iteritems(): if key in self._fields or key == '_id': setattr(self, key, value) elif self._dynamic: dynamic_data[key] = value else: FileField = _import_class('FileField') for key, value in values.iteritems(): if key == '__auto_convert': continue key = self._reverse_db_field_map.get(key, key) if key in self._fields or key in ('id', 'pk', '_cls'): if __auto_convert and value is not None: field = self._fields.get(key) if field and not isinstance(field, FileField): value = field.to_python(value) setattr(self, key, value) else: self._data[key] = value # Set any get_fieldname_display methods self.__set_field_display() if self._dynamic: self._dynamic_lock = False for key, value in dynamic_data.iteritems(): setattr(self, key, value) # Flag initialised self._initialised = True self._created = _created signals.post_init.send(self.__class__, document=self)
def _import_classes(mcs): Document = _import_class("Document") EmbeddedDocument = _import_class("EmbeddedDocument") DictField = _import_class("DictField") CachedReferenceField = _import_class("CachedReferenceField") return Document, EmbeddedDocument, DictField, CachedReferenceField
def _lookup_field(cls, parts): """Lookup a field based on its attribute and return a list containing the field's parents and the field. """ ListField = _import_class("ListField") DynamicField = _import_class('DynamicField') if not isinstance(parts, (list, tuple)): parts = [parts] fields = [] field = None for field_name in parts: # Handle ListField indexing: if field_name.isdigit() and isinstance(field, ListField): fields.append(field_name) continue if field is None: # Look up first field from the document if field_name == 'pk': # Deal with "primary key" alias field_name = cls._meta['id_field'] if field_name in cls._fields: field = cls._fields[field_name] elif cls._dynamic: field = DynamicField(db_field=field_name) elif cls._meta.get("allow_inheritance", False) or cls._meta.get("abstract", False): # 744: in case the field is defined in a subclass for subcls in cls.__subclasses__(): try: field = subcls._lookup_field([field_name])[0] except LookUpError: continue if field is not None: break else: raise LookUpError('Cannot resolve field "%s"' % field_name) else: raise LookUpError('Cannot resolve field "%s"' % field_name) else: ReferenceField = _import_class('ReferenceField') GenericReferenceField = _import_class('GenericReferenceField') if isinstance(field, (ReferenceField, GenericReferenceField)): raise LookUpError('Cannot perform join in mongoDB: %s' % '__'.join(parts)) if hasattr(getattr(field, 'field', None), 'lookup_member'): new_field = field.field.lookup_member(field_name) elif cls._dynamic and (isinstance( field, DynamicField) or getattr( getattr(field, 'document_type'), '_dynamic')): new_field = DynamicField(db_field=field_name) else: # Look up subfield on the previous field or raise try: new_field = field.lookup_member(field_name) except AttributeError: raise LookUpError( 'Cannot resolve subfield or operator {} ' 'on the field {}'.format(field_name, field.name)) if not new_field and isinstance(field, ComplexBaseField): fields.append(field_name) continue elif not new_field: raise LookUpError('Cannot resolve field "%s"' % field_name) field = new_field # update field to the new field type fields.append(field) return fields
class BaseDocument(object): _dynamic = False _created = True _dynamic_lock = True _initialised = False def __init__(self, *args, **values): """ Initialise a document or embedded document :param __auto_convert: Try and will cast python objects to Object types :param values: A dictionary of values for the document """ if args: # Combine positional arguments with named arguments. # We only want named arguments. field = iter(self._fields_ordered) for value in args: name = next(field) if name in values: raise TypeError("Multiple values for keyword argument '" + name + "'") values[name] = value __auto_convert = values.pop("__auto_convert", True) signals.pre_init.send(self.__class__, document=self, values=values) self._data = {} # Assign default values to instance for key, field in self._fields.iteritems(): if self._db_field_map.get(key, key) in values: continue value = getattr(self, key, None) setattr(self, key, value) # Set passed values after initialisation if self._dynamic: self._dynamic_fields = {} dynamic_data = {} for key, value in values.iteritems(): if key in self._fields or key == '_id': setattr(self, key, value) elif self._dynamic: dynamic_data[key] = value else: FileField = _import_class('FileField') for key, value in values.iteritems(): if key == '__auto_convert': continue key = self._reverse_db_field_map.get(key, key) if key in self._fields or key in ('id', 'pk', '_cls'): if __auto_convert and value is not None: field = self._fields.get(key) if field and not isinstance(field, FileField): value = field.to_python(value) setattr(self, key, value) else: self._data[key] = value # Set any get_fieldname_display methods self.__set_field_display() if self._dynamic: self._dynamic_lock = False for key, value in dynamic_data.iteritems(): setattr(self, key, value) # Flag initialised self._initialised = True signals.post_init.send(self.__class__, document=self) def __delattr__(self, *args, **kwargs): """Handle deletions of fields""" field_name = args[0] if field_name in self._fields: default = self._fields[field_name].default if callable(default): default = default() setattr(self, field_name, default) else: super(BaseDocument, self).__delattr__(*args, **kwargs) def __setattr__(self, name, value): # Handle dynamic data only if an initialised dynamic document if self._dynamic and not self._dynamic_lock: field = None if not hasattr(self, name) and not name.startswith('_'): DynamicField = _import_class("DynamicField") field = DynamicField(db_field=name) field.name = name self._dynamic_fields[name] = field if not name.startswith('_'): value = self.__expand_dynamic_values(name, value) # Handle marking data as changed if name in self._dynamic_fields: self._data[name] = value if hasattr(self, '_changed_fields'): self._mark_as_changed(name) if (self._is_document and not self._created and name in self._meta.get('shard_key', tuple()) and self._data.get(name) != value): OperationError = _import_class('OperationError') msg = "Shard Keys are immutable. Tried to update %s" % name raise OperationError(msg) # Check if the user has created a new instance of a class if (self._is_document and self._initialised and self._created and name == self._meta['id_field']): super(BaseDocument, self).__setattr__('_created', False) super(BaseDocument, self).__setattr__(name, value) def __getstate__(self): data = {} for k in ('_changed_fields', '_initialised', '_created'): data[k] = getattr(self, k) data['_data'] = self.to_mongo() return data def __setstate__(self, data): if isinstance(data["_data"], SON): data["_data"] = self.__class__._from_son(data["_data"])._data for k in ('_changed_fields', '_initialised', '_created', '_data'): setattr(self, k, data[k]) def __iter__(self): if 'id' in self._fields and 'id' not in self._fields_ordered: return iter(('id', ) + self._fields_ordered) return iter(self._fields_ordered) def __getitem__(self, name): """Dictionary-style field access, return a field's value if present. """ try: if name in self._fields: return getattr(self, name) except AttributeError: pass raise KeyError(name) def __setitem__(self, name, value): """Dictionary-style field access, set a field's value. """ # Ensure that the field exists before settings its value if name not in self._fields: raise KeyError(name) return setattr(self, name, value) def __contains__(self, name): try: val = getattr(self, name) return val is not None except AttributeError: return False def __len__(self): return len(self._data) def __repr__(self): try: u = self.__str__() except (UnicodeEncodeError, UnicodeDecodeError): u = '[Bad Unicode data]' repr_type = type(u) return repr_type('<%s: %s>' % (self.__class__.__name__, u)) def __str__(self): if hasattr(self, '__unicode__'): if PY3: return self.__unicode__() else: return unicode(self).encode('utf-8') return txt_type('%s object' % self.__class__.__name__) def __eq__(self, other): if isinstance(other, self.__class__) and hasattr(other, 'id'): if self.id == other.id: return True return False def __ne__(self, other): return not self.__eq__(other) def __hash__(self): if self.pk is None: # For new object return super(BaseDocument, self).__hash__() else: return hash(self.pk) def clean(self): """ Hook for doing document level data cleaning before validation is run. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS. """ pass def to_mongo(self): """Return as SON data ready for use with MongoDB. """ data = SON() data["_id"] = None data['_cls'] = self._class_name for field_name in self: value = self._data.get(field_name, None) field = self._fields.get(field_name) if value is not None: value = field.to_mongo(value) # Handle self generating fields if value is None and field._auto_gen: value = field.generate() self._data[field_name] = value if value is not None: data[field.db_field] = value # If "_id" has not been set, then try and set it if data["_id"] is None: data["_id"] = self._data.get("id", None) if data['_id'] is None: data.pop('_id') # Only add _cls if allow_inheritance is True if (not hasattr(self, '_meta') or not self._meta.get('allow_inheritance', ALLOW_INHERITANCE)): data.pop('_cls') if not self._dynamic: return data # Sort dynamic fields by key dynamic_fields = sorted(self._dynamic_fields.iteritems(), key=operator.itemgetter(0)) for name, field in dynamic_fields: data[name] = field.to_mongo(self._data.get(name, None)) return data def validate(self, clean=True): """Ensure that all fields' values are valid and that required fields are present. """ # Ensure that each field is matched to a valid value errors = {} if clean: try: self.clean() except ValidationError, error: errors[NON_FIELD_ERRORS] = error # Get a list of tuples of field names and their current values fields = [(field, self._data.get(name)) for name, field in self._fields.items()] if self._dynamic: fields += [(field, self._data.get(name)) for name, field in self._dynamic_fields.items()] EmbeddedDocumentField = _import_class("EmbeddedDocumentField") GenericEmbeddedDocumentField = _import_class( "GenericEmbeddedDocumentField") for field, value in fields: if value is not None: try: if isinstance( field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)): field._validate(value, clean=clean) else: field._validate(value) except ValidationError, error: errors[field.name] = error.errors or error except (ValueError, AttributeError, AssertionError), error: errors[field.name] = error