Example #1
0
    def __new__(cls, name, bases, attrs):
        flattened_bases = cls._get_bases(bases)
        super_new = super(DocumentMetaclass, cls).__new__

        # If a base class just call super
        metaclass = attrs.get('my_metaclass')
        if metaclass and issubclass(metaclass, DocumentMetaclass):
            return super_new(cls, name, bases, attrs)

        attrs['_is_document'] = attrs.get('_is_document', False)

        # EmbeddedDocuments could have meta data for inheritance
        if 'meta' in attrs:
            attrs['_meta'] = attrs.pop('meta')

        # EmbeddedDocuments should inherit meta data
        if '_meta' not in attrs:
            meta = MetaDict()
            for base in flattened_bases[::-1]:
                # Add any mixin metadata from plain objects
                if hasattr(base, 'meta'):
                    meta.merge(base.meta)
                elif hasattr(base, '_meta'):
                    meta.merge(base._meta)
            attrs['_meta'] = meta

        # Handle document Fields

        # Merge all fields from subclasses
        doc_fields = {}
        for base in flattened_bases[::-1]:
            if hasattr(base, '_fields'):
                doc_fields.update(base._fields)

            # Standard object mixin - merge in any Fields
            if not hasattr(base, '_meta'):
                base_fields = {}
                for attr_name, attr_value in base.__dict__.iteritems():
                    if not isinstance(attr_value, BaseField):
                        continue
                    attr_value.name = attr_name
                    if not attr_value.db_field:
                        attr_value.db_field = attr_name
                    base_fields[attr_name] = attr_value

                doc_fields.update(base_fields)

        # Discover any document fields
        field_names = {}
        for attr_name, attr_value in attrs.iteritems():
            if not isinstance(attr_value, BaseField):
                continue
            attr_value.name = attr_name
            if not attr_value.db_field:
                attr_value.db_field = attr_name
            doc_fields[attr_name] = attr_value

            # Count names to ensure no db_field redefinitions
            field_names[attr_value.db_field] = field_names.get(
                attr_value.db_field, 0) + 1

        # Ensure no duplicate db_fields
        duplicate_db_fields = [k for k, v in field_names.items() if v > 1]
        if duplicate_db_fields:
            msg = ("Multiple db_fields defined for: %s " %
                   ", ".join(duplicate_db_fields))
            raise InvalidDocumentError(msg)

        # Set _fields and db_field maps
        attrs['_fields'] = doc_fields
        attrs['_db_field_map'] = dict([(k, getattr(v, 'db_field', k))
                                       for k, v in doc_fields.iteritems()])
        attrs['_reverse_db_field_map'] = dict(
            (v, k) for k, v in attrs['_db_field_map'].iteritems())

        attrs['_fields_ordered'] = tuple(i[1] for i in sorted(
            (v.creation_counter, v.name) for v in doc_fields.itervalues()))

        #
        # Set document hierarchy
        #
        superclasses = ()
        class_name = [name]
        for base in flattened_bases:
            if (not getattr(base, '_is_base_cls', True)
                    and not getattr(base, '_meta', {}).get('abstract', True)):
                # Collate heirarchy for _cls and _subclasses
                class_name.append(base.__name__)

            if hasattr(base, '_meta'):
                # Warn if allow_inheritance isn't set and prevent
                # inheritance of classes where inheritance is set to False
                allow_inheritance = base._meta.get('allow_inheritance',
                                                   ALLOW_INHERITANCE)
                if (allow_inheritance is not True
                        and not base._meta.get('abstract')):
                    raise ValueError('Document %s may not be subclassed' %
                                     base.__name__)

        # Get superclasses from last base superclass
        document_bases = [
            b for b in flattened_bases if hasattr(b, '_class_name')
        ]
        if document_bases:
            superclasses = document_bases[0]._superclasses
            superclasses += (document_bases[0]._class_name, )

        _cls = '.'.join(reversed(class_name))
        attrs['_class_name'] = _cls
        attrs['_superclasses'] = superclasses
        attrs['_subclasses'] = (_cls, )
        attrs['_types'] = attrs['_subclasses']  # TODO depreciate _types

        # Create the new_class
        new_class = super_new(cls, name, bases, attrs)

        # Set _subclasses
        for base in document_bases:
            if _cls not in base._subclasses:
                base._subclasses += (_cls, )
            base._types = base._subclasses  # TODO depreciate _types

        Document, EmbeddedDocument, DictField = cls._import_classes()

        if issubclass(new_class, Document):
            new_class._collection = None

        # Add class to the _document_registry
        _document_registry[new_class._class_name] = new_class

        # In Python 2, User-defined methods objects have special read-only
        # attributes 'im_func' and 'im_self' which contain the function obj
        # and class instance object respectively.  With Python 3 these special
        # attributes have been replaced by __func__ and __self__.  The Blinker
        # module continues to use im_func and im_self, so the code below
        # copies __func__ into im_func and __self__ into im_self for
        # classmethod objects in Document derived classes.
        if PY3:
            for key, val in new_class.__dict__.items():
                if isinstance(val, classmethod):
                    f = val.__get__(new_class)
                    if hasattr(f, '__func__') and not hasattr(f, 'im_func'):
                        f.__dict__.update({'im_func': getattr(f, '__func__')})
                    if hasattr(f, '__self__') and not hasattr(f, 'im_self'):
                        f.__dict__.update({'im_self': getattr(f, '__self__')})

        # Handle delete rules
        for field in new_class._fields.itervalues():
            f = field
            f.owner_document = new_class
            delete_rule = getattr(f, 'reverse_delete_rule', DO_NOTHING)
            if isinstance(f, ComplexBaseField) and hasattr(f, 'field'):
                delete_rule = getattr(f.field, 'reverse_delete_rule',
                                      DO_NOTHING)
                if isinstance(f, DictField) and delete_rule != DO_NOTHING:
                    msg = ("Reverse delete rules are not supported "
                           "for %s (field: %s)" %
                           (field.__class__.__name__, field.name))
                    raise InvalidDocumentError(msg)

                f = field.field

            if delete_rule != DO_NOTHING:
                if issubclass(new_class, EmbeddedDocument):
                    msg = ("Reverse delete rules are not supported for "
                           "EmbeddedDocuments (field: %s)" % field.name)
                    raise InvalidDocumentError(msg)
                f.document_type.register_delete_rule(new_class, field.name,
                                                     delete_rule)

            if (field.name and hasattr(Document, field.name)
                    and EmbeddedDocument not in new_class.mro()):
                msg = ("%s is a document method and not a valid "
                       "field name" % field.name)
                raise InvalidDocumentError(msg)

        return new_class
    def save(self,
             force_insert=False,
             validate=True,
             clean=True,
             write_concern=None,
             cascade=None,
             cascade_kwargs=None,
             _refs=None,
             save_condition=None,
             signal_kwargs=None,
             **kwargs):
        """Save the :class:`~mongoengine.Document` to the database. If the
        document already exists, it will be updated, otherwise it will be
        created.

        :param force_insert: only try to create a new document, don't allow
            updates of existing documents.
        :param validate: validates the document; set to ``False`` to skip.
        :param clean: call the document clean method, requires `validate` to be
            True.
        :param write_concern: Extra keyword arguments are passed down to
            :meth:`~pymongo.collection.Collection.save` OR
            :meth:`~pymongo.collection.Collection.insert`
            which will be used as options for the resultant
            ``getLastError`` command.  For example,
            ``save(..., write_concern={w: 2, fsync: True}, ...)`` will
            wait until at least two servers have recorded the write and
            will force an fsync on the primary server.
        :param cascade: Sets the flag for cascading saves.  You can set a
            default by setting "cascade" in the document __meta__
        :param cascade_kwargs: (optional) kwargs dictionary to be passed throw
            to cascading saves.  Implies ``cascade=True``.
        :param _refs: A list of processed references used in cascading saves
        :param save_condition: only perform save if matching record in db
            satisfies condition(s) (e.g. version number).
            Raises :class:`OperationError` if the conditions are not satisfied
        :param signal_kwargs: (optional) kwargs dictionary to be passed to
            the signal calls.

        .. versionchanged:: 0.5
            In existing documents it only saves changed fields using
            set / unset.  Saves are cascaded and any
            :class:`~bson.dbref.DBRef` objects that have changes are
            saved as well.
        .. versionchanged:: 0.6
            Added cascading saves
        .. versionchanged:: 0.8
            Cascade saves are optional and default to False.  If you want
            fine grain control then you can turn off using document
            meta['cascade'] = True.  Also you can pass different kwargs to
            the cascade save using cascade_kwargs which overwrites the
            existing kwargs with custom values.
        .. versionchanged:: 0.8.5
            Optional save_condition that only overwrites existing documents
            if the condition is satisfied in the current db record.
        .. versionchanged:: 0.10
            :class:`OperationError` exception raised if save_condition fails.
        .. versionchanged:: 0.10.1
            :class: save_condition failure now raises a `SaveConditionError`
        .. versionchanged:: 0.10.7
            Add signal_kwargs argument
        """
        signal_kwargs = signal_kwargs or {}

        if self._meta.get("abstract"):
            raise InvalidDocumentError("Cannot save an abstract document.")

        signals.pre_save.send(self.__class__, document=self, **signal_kwargs)

        if validate:
            self.validate(clean=clean)

        if write_concern is None:
            write_concern = {}

        doc_id = self.to_mongo(fields=[self._meta["id_field"]])
        created = "_id" not in doc_id or self._created or force_insert

        signals.pre_save_post_validation.send(self.__class__,
                                              document=self,
                                              created=created,
                                              **signal_kwargs)
        # it might be refreshed by the pre_save_post_validation hook, e.g., for etag generation
        doc = self.to_mongo()

        if self._meta.get("auto_create_index", True):
            self.ensure_indexes()

        try:
            # Save a new document or update an existing one
            if created:
                object_id = self._save_create(doc, force_insert, write_concern)
            else:
                object_id, created = self._save_update(doc, save_condition,
                                                       write_concern)

            if cascade is None:
                cascade = self._meta.get("cascade",
                                         False) or cascade_kwargs is not None

            if cascade:
                kwargs = {
                    "force_insert": force_insert,
                    "validate": validate,
                    "write_concern": write_concern,
                    "cascade": cascade,
                }
                if cascade_kwargs:  # Allow granular control over cascades
                    kwargs.update(cascade_kwargs)
                kwargs["_refs"] = _refs
                self.cascade_save(**kwargs)

        except pymongo.errors.DuplicateKeyError as err:
            message = "Tried to save duplicate unique keys (%s)"
            raise NotUniqueError(message % six.text_type(err))
        except pymongo.errors.OperationFailure as err:
            message = "Could not save document (%s)"
            if re.match("^E1100[01] duplicate key", six.text_type(err)):
                # E11000 - duplicate key error index
                # E11001 - duplicate key on update
                message = "Tried to save duplicate unique keys (%s)"
                raise NotUniqueError(message % six.text_type(err))
            raise OperationError(message % six.text_type(err))

        # Make sure we store the PK on this document now that it's saved
        id_field = self._meta["id_field"]
        if created or id_field not in self._meta.get("shard_key", []):
            self[id_field] = self._fields[id_field].to_python(object_id)

        signals.post_save.send(self.__class__,
                               document=self,
                               created=created,
                               **signal_kwargs)

        self._clear_changed_fields()
        self._created = False

        return self
Example #3
0
    def _from_son(cls,
                  son,
                  _auto_dereference=True,
                  only_fields=None,
                  created=False):
        """Create an instance of a Document (subclass) from a PyMongo
        SON.
        """
        if not only_fields:
            only_fields = []

        if son and not isinstance(son, dict):
            raise ValueError(
                "The source SON object needs to be of type 'dict'")

        # Get the class name from the document, falling back to the given
        # class if unavailable
        class_name = son.get('_cls', cls._class_name)

        # Convert SON to a data dict, making sure each key is a string and
        # corresponds to the right db field.
        data = {}
        for key, value in son.iteritems():
            key = str(key)
            key = cls._db_field_map.get(key, key)
            data[key] = value

        # Return correct subclass for document type
        if class_name != cls._class_name:
            cls = get_document(class_name)

        changed_fields = []
        errors_dict = {}

        fields = cls._fields
        if not _auto_dereference:
            fields = copy.deepcopy(fields)

        for field_name, field in fields.iteritems():
            field._auto_dereference = _auto_dereference
            if field.db_field in data:
                value = data[field.db_field]
                try:
                    data[field_name] = (value if value is None else
                                        field.to_python(value))
                    if field_name != field.db_field:
                        del data[field.db_field]
                except (AttributeError, ValueError) as e:
                    errors_dict[field_name] = e

        if errors_dict:
            errors = '\n'.join(
                ['%s - %s' % (k, v) for k, v in errors_dict.items()])
            msg = ('Invalid data to create a `%s` instance.\n%s' %
                   (cls._class_name, errors))
            raise InvalidDocumentError(msg)

        # In STRICT documents, remove any keys that aren't in cls._fields
        if cls.STRICT:
            data = {k: v for k, v in data.iteritems() if k in cls._fields}

        obj = cls(__auto_convert=False,
                  _created=created,
                  __only_fields=only_fields,
                  **data)
        obj._changed_fields = changed_fields
        if not _auto_dereference:
            obj._fields = fields

        return obj
Example #4
0
    def _from_son(cls, son, _auto_dereference=True, created=False):
        """Create an instance of a Document (subclass) from a PyMongo SON (dict)"""
        if son and not isinstance(son, dict):
            raise ValueError(
                "The source SON object needs to be of type 'dict' but a '%s' was found"
                % type(son))

        # Get the class name from the document, falling back to the given
        # class if unavailable
        class_name = son.get("_cls", cls._class_name)

        # Convert SON to a data dict, making sure each key is a string and
        # corresponds to the right db field.
        # This is needed as _from_son is currently called both from BaseDocument.__init__
        # and from EmbeddedDocumentField.to_python
        data = {}
        for key, value in son.items():
            key = str(key)
            key = cls._db_field_map.get(key, key)
            data[key] = value

        # Return correct subclass for document type
        if class_name != cls._class_name:
            cls = get_document(class_name)

        errors_dict = {}

        fields = cls._fields
        if not _auto_dereference:
            fields = copy.deepcopy(fields)

        for field_name, field in fields.items():
            field._auto_dereference = _auto_dereference
            if field.db_field in data:
                value = data[field.db_field]
                try:
                    data[field_name] = (value if value is None else
                                        field.to_python(value))
                    if field_name != field.db_field:
                        del data[field.db_field]
                except (AttributeError, ValueError) as e:
                    errors_dict[field_name] = e

        if errors_dict:
            errors = "\n".join(
                [f"Field '{k}' - {v}" for k, v in errors_dict.items()])
            msg = "Invalid data to create a `{}` instance.\n{}".format(
                cls._class_name,
                errors,
            )
            raise InvalidDocumentError(msg)

        # In STRICT documents, remove any keys that aren't in cls._fields
        if cls.STRICT:
            data = {k: v for k, v in data.items() if k in cls._fields}

        obj = cls(__auto_convert=False, _created=created, **data)
        obj._changed_fields = []
        if not _auto_dereference:
            obj._fields = fields

        return obj
Example #5
0
                    errors_dict[field_name] = e
            elif field.default:
                default = field.default
                if callable(default):
                    default = default()
                if isinstance(default, BaseDocument):
                    changed_fields.append(field_name)
                elif not only_fields or field_name in only_fields:
                    changed_fields.append(field_name)

        if errors_dict:
            errors = "\n".join(
                ["%s - %s" % (k, v) for k, v in errors_dict.items()])
            msg = ("Invalid data to create a `%s` instance.\n%s" %
                   (cls._class_name, errors))
            raise InvalidDocumentError(msg)

        if cls.STRICT:
            data = dict(
                (k, v) for k, v in data.iteritems() if k in cls._fields)
        obj = cls(__auto_convert=False,
                  _created=created,
                  __only_fields=only_fields,
                  **data)
        obj._changed_fields = changed_fields
        if not _auto_dereference:
            obj._fields = fields

        return obj

    @classmethod
    def __new__(mcs, name, bases, attrs):
        flattened_bases = mcs._get_bases(bases)
        super_new = super().__new__

        # If a base class just call super
        metaclass = attrs.get("my_metaclass")
        if metaclass and issubclass(metaclass, DocumentMetaclass):
            return super_new(mcs, name, bases, attrs)

        attrs["_is_document"] = attrs.get("_is_document", False)
        attrs["_cached_reference_fields"] = []

        # EmbeddedDocuments could have meta data for inheritance
        if "meta" in attrs:
            attrs["_meta"] = attrs.pop("meta")

        # EmbeddedDocuments should inherit meta data
        if "_meta" not in attrs:
            meta = MetaDict()
            for base in flattened_bases[::-1]:
                # Add any mixin metadata from plain objects
                if hasattr(base, "meta"):
                    meta.merge(base.meta)
                elif hasattr(base, "_meta"):
                    meta.merge(base._meta)
            attrs["_meta"] = meta
            attrs["_meta"][
                "abstract"] = False  # 789: EmbeddedDocument shouldn't inherit abstract

        # If allow_inheritance is True, add a "_cls" string field to the attrs
        if attrs["_meta"].get("allow_inheritance"):
            StringField = _import_class("StringField")
            attrs["_cls"] = StringField()

        # Handle document Fields

        # Merge all fields from subclasses
        doc_fields = {}
        for base in flattened_bases[::-1]:
            if hasattr(base, "_fields"):
                doc_fields.update(base._fields)

            # Standard object mixin - merge in any Fields
            if not hasattr(base, "_meta"):
                base_fields = {}
                for attr_name, attr_value in base.__dict__.items():
                    if not isinstance(attr_value, BaseField):
                        continue
                    attr_value.name = attr_name
                    if not attr_value.db_field:
                        attr_value.db_field = attr_name
                    base_fields[attr_name] = attr_value

                doc_fields.update(base_fields)

        # Discover any document fields
        field_names = {}
        for attr_name, attr_value in attrs.items():
            if not isinstance(attr_value, BaseField):
                continue
            attr_value.name = attr_name
            if not attr_value.db_field:
                attr_value.db_field = attr_name
            doc_fields[attr_name] = attr_value

            # Count names to ensure no db_field redefinitions
            field_names[attr_value.db_field] = (
                field_names.get(attr_value.db_field, 0) + 1)

        # Ensure no duplicate db_fields
        duplicate_db_fields = [k for k, v in field_names.items() if v > 1]
        if duplicate_db_fields:
            msg = "Multiple db_fields defined for: %s " % ", ".join(
                duplicate_db_fields)
            raise InvalidDocumentError(msg)

        # Set _fields and db_field maps
        attrs["_fields"] = doc_fields
        attrs["_db_field_map"] = {
            k: getattr(v, "db_field", k)
            for k, v in doc_fields.items()
        }
        attrs["_reverse_db_field_map"] = {
            v: k
            for k, v in attrs["_db_field_map"].items()
        }

        attrs["_fields_ordered"] = tuple(i[1] for i in sorted(
            (v.creation_counter, v.name) for v in doc_fields.values()))

        #
        # Set document hierarchy
        #
        superclasses = ()
        class_name = [name]
        for base in flattened_bases:
            if not getattr(base, "_is_base_cls", True) and not getattr(
                    base, "_meta", {}).get("abstract", True):
                # Collate hierarchy for _cls and _subclasses
                class_name.append(base.__name__)

            if hasattr(base, "_meta"):
                # Warn if allow_inheritance isn't set and prevent
                # inheritance of classes where inheritance is set to False
                allow_inheritance = base._meta.get("allow_inheritance")
                if not allow_inheritance and not base._meta.get("abstract"):
                    raise ValueError(
                        "Document %s may not be subclassed. "
                        'To enable inheritance, use the "allow_inheritance" meta attribute.'
                        % base.__name__)

        # Get superclasses from last base superclass
        document_bases = [
            b for b in flattened_bases if hasattr(b, "_class_name")
        ]
        if document_bases:
            superclasses = document_bases[0]._superclasses
            superclasses += (document_bases[0]._class_name, )

        _cls = ".".join(reversed(class_name))
        attrs["_class_name"] = _cls
        attrs["_superclasses"] = superclasses
        attrs["_subclasses"] = (_cls, )
        attrs["_types"] = attrs["_subclasses"]  # TODO depreciate _types

        # Create the new_class
        new_class = super_new(mcs, name, bases, attrs)

        # Set _subclasses
        for base in document_bases:
            if _cls not in base._subclasses:
                base._subclasses += (_cls, )
            base._types = base._subclasses  # TODO depreciate _types

        (
            Document,
            EmbeddedDocument,
            DictField,
            CachedReferenceField,
        ) = mcs._import_classes()

        if issubclass(new_class, Document):
            new_class._collection = None

        # Add class to the _document_registry
        _document_registry[new_class._class_name] = new_class

        # In Python 2, User-defined methods objects have special read-only
        # attributes 'im_func' and 'im_self' which contain the function obj
        # and class instance object respectively.  With Python 3 these special
        # attributes have been replaced by __func__ and __self__.  The Blinker
        # module continues to use im_func and im_self, so the code below
        # copies __func__ into im_func and __self__ into im_self for
        # classmethod objects in Document derived classes.
        for val in new_class.__dict__.values():
            if isinstance(val, classmethod):
                f = val.__get__(new_class)
                if hasattr(f, "__func__") and not hasattr(f, "im_func"):
                    f.__dict__.update({"im_func": getattr(f, "__func__")})
                if hasattr(f, "__self__") and not hasattr(f, "im_self"):
                    f.__dict__.update({"im_self": getattr(f, "__self__")})

        # Handle delete rules
        for field in new_class._fields.values():
            f = field
            if f.owner_document is None:
                f.owner_document = new_class
            delete_rule = getattr(f, "reverse_delete_rule", DO_NOTHING)
            if isinstance(f, CachedReferenceField):

                if issubclass(new_class, EmbeddedDocument):
                    raise InvalidDocumentError(
                        "CachedReferenceFields is not allowed in EmbeddedDocuments"
                    )

                if f.auto_sync:
                    f.start_listener()

                f.document_type._cached_reference_fields.append(f)

            if isinstance(f, ComplexBaseField) and hasattr(f, "field"):
                delete_rule = getattr(f.field, "reverse_delete_rule",
                                      DO_NOTHING)
                if isinstance(f, DictField) and delete_rule != DO_NOTHING:
                    msg = ("Reverse delete rules are not supported "
                           "for %s (field: %s)" %
                           (field.__class__.__name__, field.name))
                    raise InvalidDocumentError(msg)

                f = field.field

            if delete_rule != DO_NOTHING:
                if issubclass(new_class, EmbeddedDocument):
                    msg = ("Reverse delete rules are not supported for "
                           "EmbeddedDocuments (field: %s)" % field.name)
                    raise InvalidDocumentError(msg)
                f.document_type.register_delete_rule(new_class, field.name,
                                                     delete_rule)

            if (field.name and hasattr(Document, field.name)
                    and EmbeddedDocument not in new_class.mro()):
                msg = "%s is a document method and not a valid field name" % field.name
                raise InvalidDocumentError(msg)

        return new_class
Example #7
0
    def _from_son(cls,
                  son,
                  _auto_dereference=True,
                  only_fields=None,
                  created=False):
        """Create an instance of a Document (subclass) from a PyMongo SON.
        """
        if not only_fields:
            only_fields = []

        # get the class name from the document, falling back to the given
        # class if unavailable
        class_name = son.get('_cls', cls._class_name)
        data = dict(("%s" % key, value) for key, value in son.iteritems())

        # Return correct subclass for document type
        if class_name != cls._class_name:
            cls = get_document(class_name)

        changed_fields = []
        errors_dict = {}

        fields = cls._fields
        if not _auto_dereference:
            fields = copy.copy(fields)

        for field_name, field in fields.iteritems():
            field._auto_dereference = _auto_dereference
            if field.db_field in data:
                value = data[field.db_field]
                try:
                    data[field_name] = (value if value is None else
                                        field.to_python(value))
                    if field_name != field.db_field:
                        del data[field.db_field]
                except (AttributeError, ValueError) as e:
                    errors_dict[field_name] = e
            elif field.default:
                default = field.default
                if callable(default):
                    default = default()
                if isinstance(default, BaseDocument):
                    changed_fields.append(field_name)
                elif not only_fields or field_name in only_fields:
                    changed_fields.append(field_name)

        if errors_dict:
            errors = "\n".join(
                ["%s - %s" % (k, v) for k, v in errors_dict.items()])
            msg = ("Invalid data to create a `%s` instance.\n%s" %
                   (cls._class_name, errors))
            raise InvalidDocumentError(msg)

        if cls.STRICT:
            data = dict(
                (k, v) for k, v in data.iteritems() if k in cls._fields)
        obj = cls(__auto_convert=False,
                  _created=created,
                  __only_fields=only_fields,
                  **data)
        obj._changed_fields = changed_fields
        if not _auto_dereference:
            obj._fields = fields

        return obj