コード例 #1
0
ファイル: document.py プロジェクト: flaper87/django-mongokit
 def one(self, **kwargs):
     doc_cursor  = QuerySet(cursor=None, cls=self.model, spec=self._to_mongo(kwargs))
     count = doc_cursor.count()
     if count > 1:
         raise MultipleResultsFound("%s results found" % count)
     elif count == 1:
         return doc_cursor.next()
     return {}
コード例 #2
0
ファイル: base.py プロジェクト: faulkner/mongoengine
    def _unique_with_indexes(cls, new_class, namespace=""):
        unique_indexes = []
        for field_name, field in new_class._fields.items():
            # Generate a list of indexes needed by uniqueness constraints
            if field.unique:
                field.required = True
                unique_fields = [field.db_field]

                # Add any unique_with fields to the back of the index spec
                if field.unique_with:
                    if isinstance(field.unique_with, basestring):
                        field.unique_with = [field.unique_with]

                    # Convert unique_with field names to real field names
                    unique_with = []
                    for other_name in field.unique_with:
                        parts = other_name.split(".")
                        # Lookup real name
                        parts = QuerySet._lookup_field(new_class, parts)
                        name_parts = [part.db_field for part in parts]
                        unique_with.append(".".join(name_parts))
                        # Unique field should be required
                        parts[-1].required = True
                    unique_fields += unique_with

                # Add the new index to the list
                index = [("%s%s" % (namespace, f), pymongo.ASCENDING) for f in unique_fields]
                unique_indexes.append(index)

            # Grab any embedded document field unique indexes
            if field.__class__.__name__ == "EmbeddedDocumentField" and field.document_type != new_class:
                field_namespace = "%s." % field_name
                unique_indexes += cls._unique_with_indexes(field.document_type, field_namespace)

        return unique_indexes
コード例 #3
0
def test_queryset():

    lut = FunctionTrigger()

    ledger1 = pd.DataFrame({"Key1": ["A1", "B1"], "Key2": ["A2", "B2"]})

    ledger2 = pd.DataFrame({
        "Key1": ["A1", "D1", "E1"],
        "Key2": ["A2", "D2", "E2"]
    })

    cls1 = ConstantLedgerSource(ledger1)
    cls2 = ConstantLedgerSource(ledger2)

    led1 = Ledger(lut, cls1)
    led2 = Ledger(lut, cls2)

    lut.invoke()

    qs = QuerySet([led1, led2],
                  pd.DataFrame({
                      "Source": ["S0", "S0"],
                      "Query": ["Q0", "Q2"]
                  }))

    result_iterator = qs.join(3)

    results = result_iterator.as_list()

    target = pd.DataFrame({
        "Key1": ["A1", "B1", "D1", "A1", "B1", "D1", "E1", "E1"],
        "Key2": ["A2", "B2", "D2", "A2", "B2", "D2", "E2", "E2"],
        "Source": ["S0"] * 8,
        "Query": ["Q0", "Q2"] * 4
    })

    c = 0
    for result in results:
        row = target.loc[[c]]
        query = Query(**row.to_dict(orient="index")[c])
        assert query == result

        c += 1
コード例 #4
0
    def find(self, filter={}, fields=None, skip=0, limit=None, sort=None):
        """
        @param:query(dict), specify the WHERE clause
        {"name": "...", "id": ...}
        @param: fields, specify what fields are needed
        skip, limit: both integers, skip without defining limit is meaningless
        @return a QuerySet object
        """
        if not fields:
            self.__ensure_columns()
            fields = self.columns

        query_obj = Query(source=self.name, filter=filter, fields=fields, skip=skip, limit=limit, sort=sort)
        return QuerySet(cursor=self.cursor, query=query_obj)
コード例 #5
0
    def find(self, filter={}, fields=None, skip=0, limit=None, sort=None):
        """
        Searches the table using the filters provided.
        
        :Examples:

        >>> users = user_table.find({'id': {'$in': [10, 20]}, 'age': {'$gt': 20}}) # Complex query
        >>> user_count = len(users)
        >>> for user in users:
        >>>     # Do something...
        >>>     print user.id
        >>> 
        >>> users = user_table.find({}, sort=[('age', monsql.ASCENDING)]) # sort by age

        Also support complex operators:

        >>> {a: 1}                                  # a == 1
        >>> {a: {$gt: 1}}                           # a > 1
        >>> {a: {$gte: 1}}                          # a >= 1
        >>> {a: {$lt: 1}}                           # a < 1
        >>> {a: {$lte: 1}}                          # a <= 1
        >>> {a: {$eq: 1}}                           # a == 1
        >>> {a: {$in: [1, 2]}}                      # a == 1 or a == 2
        >>> {a: {$contains: '123'}}                 # a like %123%
        >>> {$not: condition}                       # !(condition)
        >>> {$and: [condition1, condition2, ...]}   # condition1 and condition2
        >>> {$or: [condition1, condition2, ...]}    # condition1 or condition2

        :Parameters: 

        - query(dict): specify the WHERE clause. One example is {"name": "...", "id": ...}    
        - fields: specify what fields are needed
        - skip, limit: both integers, skip without defining limit is meaningless
        - sort: A list, each element is a two-item tuple, with the first item be the column name
          and the second item be either monsql.ASCENDING or monsql.DESCENDING

        :Return: a QuerySet object
        """
        if not fields:
            self.__ensure_columns()
            fields = self.columns

        query_obj = Query(source=self.name,
                          filter=filter,
                          fields=fields,
                          skip=skip,
                          limit=limit,
                          sort=sort)
        return QuerySet(cursor=self.cursor, query=query_obj)
コード例 #6
0
    def _unique_with_indexes(cls, new_class, namespace=""):
        unique_indexes = []
        for field_name, field in new_class._fields.items():
            # Generate a list of indexes needed by uniqueness constraints
            if field.unique:
                field.required = True
                unique_fields = [field.db_field]

                # Add any unique_with fields to the back of the index spec
                if field.unique_with:
                    if isinstance(field.unique_with, basestring):
                        field.unique_with = [field.unique_with]

                    # Convert unique_with field names to real field names
                    unique_with = []
                    for other_name in field.unique_with:
                        parts = other_name.split('.')
                        # Lookup real name
                        parts = QuerySet._lookup_field(new_class, parts)
                        name_parts = [part.db_field for part in parts]
                        unique_with.append('.'.join(name_parts))
                        # Unique field should be required
                        parts[-1].required = True
                    unique_fields += unique_with

                # Add the new index to the list
                index = [("%s%s" % (namespace, f), pymongo.ASCENDING)
                         for f in unique_fields]
                unique_indexes.append(index)

            # Grab any embedded document field unique indexes
            if field.__class__.__name__ == "EmbeddedDocumentField":
                field_namespace = "%s." % field_name
                unique_indexes += cls._unique_with_indexes(
                    field.document_type, field_namespace)

        return unique_indexes
コード例 #7
0
ファイル: base.py プロジェクト: ContextLogic/mongoengine
    def __new__(cls, name, bases, attrs):
        super_new = super(TopLevelDocumentMetaclass, cls).__new__
        # Classes defined in this package are abstract and should not have
        # their own metadata with DB collection, etc.
        # __metaclass__ is only set on the class with the __metaclass__
        # attribute (i.e. it is not set on subclasses). This differentiates
        # 'real' documents from the 'Document' class
        if attrs.get("__metaclass__") == TopLevelDocumentMetaclass:
            return super_new(cls, name, bases, attrs)

        collection = name.lower()

        id_field = None
        base_indexes = []
        base_meta = {}

        # Subclassed documents inherit collection from superclass
        for base in bases:
            if hasattr(base, "_meta") and "collection" in base._meta:
                collection = base._meta["collection"]

                # Propagate index options.
                for key in ("index_background", "index_drop_dups", "index_opts"):
                    if key in base._meta:
                        base_meta[key] = base._meta[key]

                id_field = id_field or base._meta.get("id_field")
                base_indexes += base._meta.get("indexes", [])

        meta = {
            "collection": collection,
            "max_documents": None,
            "max_size": None,
            "ordering": [],  # default ordering applied at runtime
            "indexes": [],  # indexes to be ensured at runtime
            "id_field": id_field,
            "index_background": True,
            "index_drop_dups": False,
            "index_opts": {},
            "queryset_class": QuerySet,
            "db_name": None,
            "force_insert": False,
            "hash_field": None,
            "hash_db_field": "_h",
            "sharded": True,
            "write_concern": 1,
        }
        meta.update(base_meta)

        # Apply document-defined meta options
        meta.update(attrs.get("meta", {}))
        attrs["_meta"] = meta

        # Set up collection manager, needs the class to have fields so use
        # DocumentMetaclass before instantiating CollectionManager object
        new_class = super_new(cls, name, bases, attrs)

        # Provide a default queryset unless one has been manually provided
        if not hasattr(new_class, "objects"):
            new_class.objects = QuerySetManager()

        user_indexes = [QuerySet._build_index_spec(new_class, spec) for spec in meta["indexes"]] + base_indexes
        new_class._meta["indexes"] = user_indexes

        unique_indexes = []
        for field_name, field in new_class._fields.items():
            # Generate a list of indexes needed by uniqueness constraints
            if field.unique:
                field.required = True
                unique_fields = [field.db_field]

                # Add any unique_with fields to the back of the index spec
                if field.unique_with:
                    if isinstance(field.unique_with, basestring):
                        field.unique_with = [field.unique_with]

                    # Convert unique_with field names to real field names
                    unique_with = []
                    for other_name in field.unique_with:
                        parts = other_name.split(".")
                        # Lookup real name
                        parts = QuerySet._lookup_field(new_class, parts)
                        name_parts = [part.db_field for part in parts]
                        unique_with.append(".".join(name_parts))
                        # Unique field should be required
                        parts[-1].required = True
                    unique_fields += unique_with

                # Add the new index to the list
                index = [(f, pymongo.ASCENDING) for f in unique_fields]
                unique_indexes.append(index)

            # Check for custom primary key
            if field.primary_key:
                current_pk = new_class._meta["id_field"]
                if current_pk and current_pk != field_name:
                    raise ValueError("Cannot override primary key field")

                if not current_pk:
                    new_class._meta["id_field"] = field_name
                    # Make 'Document.id' an alias to the real primary key field
                    new_class.id = field

        new_class._meta["unique_indexes"] = unique_indexes

        if not new_class._meta["id_field"]:
            new_class._meta["id_field"] = "id"
            id_field = ObjectIdField(db_field="_id")
            id_field.name = "id"
            id_field.primary_key = True
            id_field.required = False
            new_class._fields["id"] = id_field
            new_class.id = new_class._fields["id"]

        if meta["hash_field"]:
            assert "shard_hash" not in new_class._fields, "You already have a shard hash"

            assert meta["hash_field"] in new_class._fields, "The field you want to hash doesn't exist"

            from fields import IntField

            field = IntField(db_field=meta["hash_db_field"], required=True)
            new_class._fields["shard_hash"] = field
            field.owner_document = new_class
            new_class.shard_hash = field

        return new_class
コード例 #8
0
ファイル: base.py プロジェクト: seedwithroot/mongoengine
    def __new__(cls, name, bases, attrs):
        super_new = super(TopLevelDocumentMetaclass, cls).__new__
        # Classes defined in this package are abstract and should not have
        # their own metadata with DB collection, etc.
        # __metaclass__ is only set on the class with the __metaclass__
        # attribute (i.e. it is not set on subclasses). This differentiates
        # 'real' documents from the 'Document' class
        #
        # Also assume a class is abstract if it has abstract set to True in
        # its meta dictionary. This allows custom Document superclasses.
        if (attrs.get('__metaclass__') == TopLevelDocumentMetaclass or
            ('meta' in attrs and attrs['meta'].get('abstract', False))):
            # Make sure no base class was non-abstract
            non_abstract_bases = [b for b in bases
                if hasattr(b, '_meta') and not b._meta.get('abstract', False)]
            if non_abstract_bases:
                raise ValueError("Abstract document cannot have non-abstract base")
            return super_new(cls, name, bases, attrs)

        collection = ''.join('_%s' % c if c.isupper() else c for c in name).strip('_').lower()

        id_field = None
        abstract_base_indexes = []
        base_indexes = []
        base_meta = {}

        # Subclassed documents inherit collection from superclass
        for base in bases:
            if hasattr(base, '_meta'):
                if 'collection' in attrs.get('meta', {}) and not base._meta.get('abstract', False):
                    import warnings
                    msg = "Trying to set a collection on a subclass (%s)" % name
                    warnings.warn(msg, SyntaxWarning)
                    del(attrs['meta']['collection'])
                if base._get_collection_name():
                    collection = base._get_collection_name()

                # Propagate inherited values
                keys_to_propogate = (
                    'index_background', 'index_drop_dups', 'index_opts',
                    'allow_inheritance', 'queryset_class', 'db_alias',
                    'shard_key'
                )
                for key in keys_to_propogate:
                    if key in base._meta:
                        base_meta[key] = base._meta[key]

                id_field = id_field or base._meta.get('id_field')
                if base._meta.get('abstract', False):
                    abstract_base_indexes += base._meta.get('indexes', [])
                else:
                    base_indexes += base._meta.get('indexes', [])
            try:
                base_meta['objects'] = base.__getattribute__(base, 'objects')
            except TypeError:
                pass
            except AttributeError:
                pass

        # defaults
        meta = {
            'abstract': False,
            'collection': collection,
            'max_documents': None,
            'max_size': None,
            'ordering': [],  # default ordering applied at runtime
            'indexes': [],  # indexes to be ensured at runtime
            'id_field': id_field,
            'index_background': False,
            'index_drop_dups': False,
            'index_opts': {},
            'queryset_class': QuerySet,
            'delete_rules': {},
            'allow_inheritance': True
        }

        allow_inheritance_defined = ('allow_inheritance' in base_meta or
                                     'allow_inheritance'in attrs.get('meta', {}))
        meta['allow_inheritance_defined'] = allow_inheritance_defined
        meta.update(base_meta)

        # Apply document-defined meta options
        meta.update(attrs.get('meta', {}))
        attrs['_meta'] = meta

        # Set up collection manager, needs the class to have fields so use
        # DocumentMetaclass before instantiating CollectionManager object
        new_class = super_new(cls, name, bases, attrs)

        collection = attrs['_meta'].get('collection', None)
        if callable(collection):
            new_class._meta['collection'] = collection(new_class)

        # Provide a default queryset unless one has been manually provided
        manager = attrs.get('objects', meta.get('objects', QuerySetManager()))
        if hasattr(manager, 'queryset_class'):
            meta['queryset_class'] = manager.queryset_class
        new_class.objects = manager

        indicies = list(meta['indexes']) + abstract_base_indexes
        user_indexes = [QuerySet._build_index_spec(new_class, spec)
                        for spec in indicies] + base_indexes
        new_class._meta['indexes'] = user_indexes

        unique_indexes = cls._unique_with_indexes(new_class)
        new_class._meta['unique_indexes'] = unique_indexes

        for field_name, field in new_class._fields.items():
            # Check for custom primary key
            if field.primary_key:
                current_pk = new_class._meta['id_field']
                if current_pk and current_pk != field_name:
                    raise ValueError('Cannot override primary key field')

                if not current_pk:
                    new_class._meta['id_field'] = field_name
                    # Make 'Document.id' an alias to the real primary key field
                    new_class.id = field

        if not new_class._meta['id_field']:
            new_class._meta['id_field'] = 'id'
            new_class._fields['id'] = ObjectIdField(db_field='_id')
            new_class.id = new_class._fields['id']

        return new_class
コード例 #9
0
    def __new__(cls, name, bases, attrs):
        super_new = super(TopLevelDocumentMetaclass, cls).__new__
        # Classes defined in this package are abstract and should not have
        # their own metadata with DB collection, etc.
        # __metaclass__ is only set on the class with the __metaclass__
        # attribute (i.e. it is not set on subclasses). This differentiates
        # 'real' documents from the 'Document' class
        if attrs.get('__metaclass__') == TopLevelDocumentMetaclass:
            return super_new(cls, name, bases, attrs)

        collection = name.lower()

        id_field = None
        base_indexes = []
        base_meta = {}

        # Subclassed documents inherit collection from superclass
        for base in bases:
            if hasattr(base, '_meta') and 'collection' in base._meta:
                collection = base._meta['collection']

                # Propagate index options.
                for key in ('index_background', 'index_drop_dups',
                            'index_opts'):
                    if key in base._meta:
                        base_meta[key] = base._meta[key]

                id_field = id_field or base._meta.get('id_field')
                base_indexes += base._meta.get('indexes', [])

        meta = {
            'collection': collection,
            'max_documents': None,
            'max_size': None,
            'ordering': [],  # default ordering applied at runtime
            'indexes': [],  # indexes to be ensured at runtime
            'id_field': id_field,
            'index_background': False,
            'index_drop_dups': False,
            'index_opts': {},
            'queryset_class': QuerySet,
        }
        meta.update(base_meta)

        # Apply document-defined meta options
        meta.update(attrs.get('meta', {}))
        attrs['_meta'] = meta

        # Set up collection manager, needs the class to have fields so use
        # DocumentMetaclass before instantiating CollectionManager object
        new_class = super_new(cls, name, bases, attrs)

        # Provide a default queryset unless one has been manually provided
        if not hasattr(new_class, 'objects'):
            new_class.objects = QuerySetManager()

        user_indexes = [
            QuerySet._build_index_spec(new_class, spec)
            for spec in meta['indexes']
        ] + base_indexes
        new_class._meta['indexes'] = user_indexes

        unique_indexes = []
        for field_name, field in new_class._fields.items():
            # Generate a list of indexes needed by uniqueness constraints
            if field.unique:
                field.required = True
                unique_fields = [field.db_field]

                # Add any unique_with fields to the back of the index spec
                if field.unique_with:
                    if isinstance(field.unique_with, basestring):
                        field.unique_with = [field.unique_with]

                    # Convert unique_with field names to real field names
                    unique_with = []
                    for other_name in field.unique_with:
                        parts = other_name.split('.')
                        # Lookup real name
                        parts = QuerySet._lookup_field(new_class, parts)
                        name_parts = [part.db_field for part in parts]
                        unique_with.append('.'.join(name_parts))
                        # Unique field should be required
                        parts[-1].required = True
                    unique_fields += unique_with

                # Add the new index to the list
                index = [(f, pymongo.ASCENDING) for f in unique_fields]
                unique_indexes.append(index)

            # Check for custom primary key
            if field.primary_key:
                current_pk = new_class._meta['id_field']
                if current_pk and current_pk != field_name:
                    raise ValueError('Cannot override primary key field')

                if not current_pk:
                    new_class._meta['id_field'] = field_name
                    # Make 'Document.id' an alias to the real primary key field
                    new_class.id = field

        new_class._meta['unique_indexes'] = unique_indexes

        if not new_class._meta['id_field']:
            new_class._meta['id_field'] = 'id'
            new_class._fields['id'] = ObjectIdField(db_field='_id')
            new_class.id = new_class._fields['id']

        return new_class
コード例 #10
0
ファイル: base.py プロジェクト: donspaulding/mongoengine
    def __new__(cls, name, bases, attrs):
        global _document_registry

        super_new = super(TopLevelDocumentMetaclass, cls).__new__
        # Classes defined in this package are abstract and should not have 
        # their own metadata with DB collection, etc.
        # __metaclass__ is only set on the class with the __metaclass__ 
        # attribute (i.e. it is not set on subclasses). This differentiates
        # 'real' documents from the 'Document' class
        if attrs.get('__metaclass__') == TopLevelDocumentMetaclass:
            return super_new(cls, name, bases, attrs)

        collection = name.lower()
        
        id_field = None
        base_indexes = []

        # Subclassed documents inherit collection from superclass
        for base in bases:
            if hasattr(base, '_meta') and 'collection' in base._meta:
                collection = base._meta['collection']

                id_field = id_field or base._meta.get('id_field')
                base_indexes += base._meta.get('indexes', [])

        meta = {
            'collection': collection,
            'max_documents': None,
            'max_size': None,
            'ordering': [], # default ordering applied at runtime
            'indexes': [], # indexes to be ensured at runtime
            'id_field': id_field,
        }

        # Apply document-defined meta options
        meta.update(attrs.get('meta', {}))
        attrs['_meta'] = meta

        # Set up collection manager, needs the class to have fields so use
        # DocumentMetaclass before instantiating CollectionManager object
        new_class = super_new(cls, name, bases, attrs)
        new_class.objects = QuerySetManager()

        user_indexes = [QuerySet._build_index_spec(new_class, spec)
                        for spec in meta['indexes']] + base_indexes
        new_class._meta['indexes'] = user_indexes
        
        unique_indexes = []
        for field_name, field in new_class._fields.items():
            # Generate a list of indexes needed by uniqueness constraints
            if field.unique:
                field.required = True
                unique_fields = [field_name]

                # Add any unique_with fields to the back of the index spec
                if field.unique_with:
                    if isinstance(field.unique_with, basestring):
                        field.unique_with = [field.unique_with]

                    # Convert unique_with field names to real field names
                    unique_with = []
                    for other_name in field.unique_with:
                        parts = other_name.split('.')
                        # Lookup real name
                        parts = QuerySet._lookup_field(new_class, parts)
                        name_parts = [part.db_field for part in parts]
                        unique_with.append('.'.join(name_parts))
                        # Unique field should be required
                        parts[-1].required = True
                    unique_fields += unique_with

                # Add the new index to the list
                index = [(f, pymongo.ASCENDING) for f in unique_fields]
                unique_indexes.append(index)

            # Check for custom primary key
            if field.primary_key:
                if not new_class._meta['id_field']:
                    new_class._meta['id_field'] = field_name
                    # Make 'Document.id' an alias to the real primary key field
                    new_class.id = field
                    #new_class._fields['id'] = field
                else:
                    raise ValueError('Cannot override primary key field')

        new_class._meta['unique_indexes'] = unique_indexes

        if not new_class._meta['id_field']:
            new_class._meta['id_field'] = 'id'
            new_class._fields['id'] = ObjectIdField(db_field='_id')
            new_class.id = new_class._fields['id']

        _document_registry[name] = new_class

        return new_class
コード例 #11
0
ファイル: base.py プロジェクト: jooser/archivematica
    def __new__(cls, name, bases, attrs):
        super_new = super(TopLevelDocumentMetaclass, cls).__new__
        # Classes defined in this package are abstract and should not have
        # their own metadata with DB collection, etc.
        # __metaclass__ is only set on the class with the __metaclass__
        # attribute (i.e. it is not set on subclasses). This differentiates
        # 'real' documents from the 'Document' class
        #
        # Also assume a class is abstract if it has abstract set to True in
        # its meta dictionary. This allows custom Document superclasses.
        if (attrs.get('__metaclass__') == TopLevelDocumentMetaclass
                or ('meta' in attrs and attrs['meta'].get('abstract', False))):
            # Make sure no base class was non-abstract
            non_abstract_bases = [
                b for b in bases
                if hasattr(b, '_meta') and not b._meta.get('abstract', False)
            ]
            if non_abstract_bases:
                raise ValueError(
                    "Abstract document cannot have non-abstract base")
            return super_new(cls, name, bases, attrs)

        collection = ''.join('_%s' % c if c.isupper() else c
                             for c in name).strip('_').lower()

        id_field = None
        abstract_base_indexes = []
        base_indexes = []
        base_meta = {}

        # Subclassed documents inherit collection from superclass
        for base in bases:
            if hasattr(base, '_meta'):
                if 'collection' in attrs.get(
                        'meta', {}) and not base._meta.get('abstract', False):
                    import warnings
                    msg = "Trying to set a collection on a subclass (%s)" % name
                    warnings.warn(msg, SyntaxWarning)
                    del (attrs['meta']['collection'])
                if base._get_collection_name():
                    collection = base._get_collection_name()
                # Propagate index options.
                for key in ('index_background', 'index_drop_dups',
                            'index_opts'):
                    if key in base._meta:
                        base_meta[key] = base._meta[key]

                id_field = id_field or base._meta.get('id_field')
                if base._meta.get('abstract', False):
                    abstract_base_indexes += base._meta.get('indexes', [])
                else:
                    base_indexes += base._meta.get('indexes', [])
                # Propagate 'allow_inheritance'
                if 'allow_inheritance' in base._meta:
                    base_meta['allow_inheritance'] = base._meta[
                        'allow_inheritance']
                if 'queryset_class' in base._meta:
                    base_meta['queryset_class'] = base._meta['queryset_class']
            try:
                base_meta['objects'] = base.__getattribute__(base, 'objects')
            except TypeError:
                pass
            except AttributeError:
                pass

        meta = {
            'abstract': False,
            'collection': collection,
            'max_documents': None,
            'max_size': None,
            'ordering': [],  # default ordering applied at runtime
            'indexes': [],  # indexes to be ensured at runtime
            'id_field': id_field,
            'index_background': False,
            'index_drop_dups': False,
            'index_opts': {},
            'queryset_class': QuerySet,
            'delete_rules': {},
            'allow_inheritance': True
        }

        allow_inheritance_defined = ('allow_inheritance' in base_meta
                                     or 'allow_inheritance' in attrs.get(
                                         'meta', {}))
        meta['allow_inheritance_defined'] = allow_inheritance_defined
        meta.update(base_meta)

        # Apply document-defined meta options
        meta.update(attrs.get('meta', {}))
        attrs['_meta'] = meta

        # Set up collection manager, needs the class to have fields so use
        # DocumentMetaclass before instantiating CollectionManager object
        new_class = super_new(cls, name, bases, attrs)

        collection = attrs['_meta'].get('collection', None)
        if callable(collection):
            new_class._meta['collection'] = collection(new_class)

        # Provide a default queryset unless one has been manually provided
        manager = attrs.get('objects', meta.get('objects', QuerySetManager()))
        if hasattr(manager, 'queryset_class'):
            meta['queryset_class'] = manager.queryset_class
        new_class.objects = manager

        indicies = meta['indexes'] + abstract_base_indexes
        user_indexes = [
            QuerySet._build_index_spec(new_class, spec) for spec in indicies
        ] + base_indexes
        new_class._meta['indexes'] = user_indexes

        unique_indexes = cls._unique_with_indexes(new_class)
        new_class._meta['unique_indexes'] = unique_indexes

        for field_name, field in new_class._fields.items():
            # Check for custom primary key
            if field.primary_key:
                current_pk = new_class._meta['id_field']
                if current_pk and current_pk != field_name:
                    raise ValueError('Cannot override primary key field')

                if not current_pk:
                    new_class._meta['id_field'] = field_name
                    # Make 'Document.id' an alias to the real primary key field
                    new_class.id = field

        if not new_class._meta['id_field']:
            new_class._meta['id_field'] = 'id'
            new_class._fields['id'] = ObjectIdField(db_field='_id')
            new_class.id = new_class._fields['id']

        return new_class
コード例 #12
0
ファイル: base.py プロジェクト: ContextLogic/mongoengine
    def __new__(cls, name, bases, attrs):
        super_new = super(TopLevelDocumentMetaclass, cls).__new__
        # Classes defined in this package are abstract and should not have
        # their own metadata with DB collection, etc.
        # __metaclass__ is only set on the class with the __metaclass__
        # attribute (i.e. it is not set on subclasses). This differentiates
        # 'real' documents from the 'Document' class
        if attrs.get('__metaclass__') == TopLevelDocumentMetaclass:
            return super_new(cls, name, bases, attrs)

        collection = name.lower()

        id_field = None
        base_indexes = []
        base_meta = {}

        # Subclassed documents inherit collection from superclass
        for base in bases:
            if hasattr(base, '_meta') and 'collection' in base._meta:
                collection = base._meta['collection']

                # Propagate index options.
                for key in ('index_background', 'index_drop_dups', 'index_opts'):
                   if key in base._meta:
                      base_meta[key] = base._meta[key]

                id_field = id_field or base._meta.get('id_field')
                base_indexes += base._meta.get('indexes', [])

        meta = {
            'collection': collection,
            'max_documents': None,
            'max_size': None,
            'ordering': [], # default ordering applied at runtime
            'indexes': [], # indexes to be ensured at runtime
            'id_field': id_field,
            'index_background': True,
            'index_drop_dups': False,
            'index_opts': {},
            'queryset_class': QuerySet,
            'db_name': None,

            'force_insert': False,

            'hash_field': None,
            'hash_db_field': '_h',
            'sharded': True,

            'write_concern': 1
        }
        meta.update(base_meta)

        # Apply document-defined meta options
        meta.update(attrs.get('meta', {}))
        attrs['_meta'] = meta

        # Set up collection manager, needs the class to have fields so use
        # DocumentMetaclass before instantiating CollectionManager object
        new_class = super_new(cls, name, bases, attrs)

        # Check if trying to use sweeper db; default to unsharded DB which has
        # unsharded shard as default primary to reduce load
        if hasattr(new_class, 'meta') and 'abstract' not in new_class.meta and\
                'shard_key' not in new_class.meta:
            import mongoengine.connection
            import inspect
            if mongoengine.connection._default_db == 'sweeper' and 'clroot/sweeper' in inspect.getfile(new_class):
                new_class.meta['shard_key'] = False
                new_class._meta['db_name'] = 'sweeper-unsharded'

        # Provide a default queryset unless one has been manually provided
        if not hasattr(new_class, 'objects'):
            new_class.objects = QuerySetManager()

        user_indexes = [QuerySet._build_index_spec(new_class, spec)
                        for spec in meta['indexes']] + base_indexes
        new_class._meta['indexes'] = user_indexes

        unique_indexes = []
        for field_name, field in new_class._fields.items():
            # Generate a list of indexes needed by uniqueness constraints
            if field.unique:
                field.required = True
                unique_fields = [field.db_field]

                # Add any unique_with fields to the back of the index spec
                if field.unique_with:
                    if isinstance(field.unique_with, basestring):
                        field.unique_with = [field.unique_with]

                    # Convert unique_with field names to real field names
                    unique_with = []
                    for other_name in field.unique_with:
                        parts = other_name.split('.')
                        # Lookup real name
                        parts = QuerySet._lookup_field(new_class, parts)
                        name_parts = [part.db_field for part in parts]
                        unique_with.append('.'.join(name_parts))
                        # Unique field should be required
                        parts[-1].required = True
                    unique_fields += unique_with

                # Add the new index to the list
                index = [(f, pymongo.ASCENDING) for f in unique_fields]
                unique_indexes.append(index)

            # Check for custom primary key
            if field.primary_key:
                current_pk = new_class._meta['id_field']
                if current_pk and current_pk != field_name:
                    raise ValueError('Cannot override primary key field')

                if not current_pk:
                    new_class._meta['id_field'] = field_name
                    # Make 'Document.id' an alias to the real primary key field
                    new_class.id = field

        new_class._meta['unique_indexes'] = unique_indexes

        if not new_class._meta['id_field']:
            new_class._meta['id_field'] = 'id'
            id_field = ObjectIdField(db_field='_id')
            id_field.name = 'id'
            id_field.primary_key = True
            id_field.required = False
            new_class._fields['id'] = id_field
            new_class.id = new_class._fields['id']

        if meta['hash_field']:
            assert 'shard_hash' not in new_class._fields, \
                    "You already have a shard hash"

            assert meta['hash_field'] in new_class._fields, \
                    "The field you want to hash doesn't exist"

            from fields import IntField

            field = IntField(db_field=meta['hash_db_field'], required=True)
            new_class._fields['shard_hash'] = field
            field.owner_document = new_class
            new_class.shard_hash = field

        return new_class
コード例 #13
0
 def get_query_set(self):
     return QuerySet(model=self.model, db=self.db)
コード例 #14
0
ファイル: base.py プロジェクト: faulkner/mongoengine
    def __new__(cls, name, bases, attrs):
        super_new = super(TopLevelDocumentMetaclass, cls).__new__
        # Classes defined in this package are abstract and should not have
        # their own metadata with DB collection, etc.
        # __metaclass__ is only set on the class with the __metaclass__
        # attribute (i.e. it is not set on subclasses). This differentiates
        # 'real' documents from the 'Document' class
        #
        # Also assume a class is abstract if it has abstract set to True in
        # its meta dictionary. This allows custom Document superclasses.
        if attrs.get("__metaclass__") == TopLevelDocumentMetaclass or (
            "meta" in attrs and attrs["meta"].get("abstract", False)
        ):
            # Make sure no base class was non-abstract
            non_abstract_bases = [b for b in bases if hasattr(b, "_meta") and not b._meta.get("abstract", False)]
            if non_abstract_bases:
                raise ValueError("Abstract document cannot have non-abstract base")
            return super_new(cls, name, bases, attrs)

        collection = "".join("_%s" % c if c.isupper() else c for c in name).strip("_").lower()

        id_field = None
        abstract_base_indexes = []
        base_indexes = []
        base_meta = {}

        # Subclassed documents inherit collection from superclass
        for base in bases:
            if hasattr(base, "_meta"):
                if "collection" in attrs.get("meta", {}) and not base._meta.get("abstract", False):
                    import warnings

                    msg = "Trying to set a collection on a subclass (%s)" % name
                    warnings.warn(msg, SyntaxWarning)
                    del (attrs["meta"]["collection"])
                if base._get_collection_name():
                    collection = base._get_collection_name()

                # Propagate inherited values
                keys_to_propogate = (
                    "index_background",
                    "index_drop_dups",
                    "index_opts",
                    "allow_inheritance",
                    "queryset_class",
                    "db_alias",
                )
                for key in keys_to_propogate:
                    if key in base._meta:
                        base_meta[key] = base._meta[key]

                id_field = id_field or base._meta.get("id_field")
                if base._meta.get("abstract", False):
                    abstract_base_indexes += base._meta.get("indexes", [])
                else:
                    base_indexes += base._meta.get("indexes", [])
            try:
                base_meta["objects"] = base.__getattribute__(base, "objects")
            except TypeError:
                pass
            except AttributeError:
                pass

        # defaults
        meta = {
            "abstract": False,
            "collection": collection,
            "max_documents": None,
            "max_size": None,
            "ordering": [],  # default ordering applied at runtime
            "indexes": [],  # indexes to be ensured at runtime
            "id_field": id_field,
            "index_background": False,
            "index_drop_dups": False,
            "index_opts": {},
            "queryset_class": QuerySet,
            "delete_rules": {},
            "allow_inheritance": True,
        }

        allow_inheritance_defined = "allow_inheritance" in base_meta or "allow_inheritance" in attrs.get("meta", {})
        meta["allow_inheritance_defined"] = allow_inheritance_defined
        meta.update(base_meta)

        # Apply document-defined meta options
        meta.update(attrs.get("meta", {}))
        attrs["_meta"] = meta

        # Set up collection manager, needs the class to have fields so use
        # DocumentMetaclass before instantiating CollectionManager object
        new_class = super_new(cls, name, bases, attrs)

        collection = attrs["_meta"].get("collection", None)
        if callable(collection):
            new_class._meta["collection"] = collection(new_class)

        # Provide a default queryset unless one has been manually provided
        manager = attrs.get("objects", meta.get("objects", QuerySetManager()))
        if hasattr(manager, "queryset_class"):
            meta["queryset_class"] = manager.queryset_class
        new_class.objects = manager

        indicies = list(meta["indexes"]) + abstract_base_indexes
        user_indexes = [QuerySet._build_index_spec(new_class, spec) for spec in indicies] + base_indexes
        new_class._meta["indexes"] = user_indexes

        unique_indexes = cls._unique_with_indexes(new_class)
        new_class._meta["unique_indexes"] = unique_indexes

        for field_name, field in new_class._fields.items():
            # Check for custom primary key
            if field.primary_key:
                current_pk = new_class._meta["id_field"]
                if current_pk and current_pk != field_name:
                    raise ValueError("Cannot override primary key field")

                if not current_pk:
                    new_class._meta["id_field"] = field_name
                    # Make 'Document.id' an alias to the real primary key field
                    new_class.id = field

        if not new_class._meta["id_field"]:
            new_class._meta["id_field"] = "id"
            new_class._fields["id"] = ObjectIdField(db_field="_id")
            new_class.id = new_class._fields["id"]

        return new_class
コード例 #15
0
ファイル: base.py プロジェクト: pelletier/mongoengine
    def __new__(cls, name, bases, attrs):
        super_new = super(TopLevelDocumentMetaclass, cls).__new__
        # Classes defined in this package are abstract and should not have 
        # their own metadata with DB collection, etc.
        # __metaclass__ is only set on the class with the __metaclass__ 
        # attribute (i.e. it is not set on subclasses). This differentiates
        # 'real' documents from the 'Document' class
        #
        # Also assume a class is abstract if it has abstract set to True in
        # its meta dictionary. This allows custom Document superclasses.
        if attrs.get('__metaclass__') == TopLevelDocumentMetaclass or \
            ('meta' in attrs and attrs['meta'].get('abstract', False)):
            # check to make sure no base class was non-abstract
            for base in bases:
                if hasattr(base,'_meta') and 'abstract' in base._meta \
                    and not base._meta['abstract']:
                    raise ValueError( \
                        'Abstract document cannot have non-abstract base')
            return super_new(cls, name, bases, attrs)

        collection = name.lower()

        id_field = None
        base_indexes = []
        base_meta = {}

        # Subclassed documents inherit collection from superclass
        for base in bases:
            if hasattr(base, '_meta') and 'collection' in base._meta:
                collection = base._meta['collection']

                # Propagate index options.
                for key in ('index_background', 'index_drop_dups', 'index_opts'):
                   if key in base._meta:
                      base_meta[key] = base._meta[key]

                id_field = id_field or base._meta.get('id_field')
                base_indexes += base._meta.get('indexes', [])
        
        meta = {
            'abstract': False,
            'collection': collection,
            'max_documents': None,
            'max_size': None,
            'ordering': [], # default ordering applied at runtime
            'indexes': [], # indexes to be ensured at runtime
            'id_field': id_field,
            'index_background': False,
            'index_drop_dups': False,
            'index_opts': {},
            'queryset_class': QuerySet,
        }
        meta.update(base_meta)

        # Apply document-defined meta options
        meta.update(attrs.get('meta', {}))
        attrs['_meta'] = meta

        # Set up collection manager, needs the class to have fields so use
        # DocumentMetaclass before instantiating CollectionManager object
        new_class = super_new(cls, name, bases, attrs)
        
        # Allow dynamically-generated collection names. Pass the newly
        # created class so the callee has access to __module__, etc.
        collection = new_class._meta['collection']
        if callable(collection):
            new_class._meta['collection'] = collection(new_class)
        
        # Provide a default queryset unless one has been manually provided
        # Note: Check for existance in attrs because hasattr assumes it
        # doesn't exist if any exception is raised, eg when not connected.
        if 'objects' not in attrs:
            new_class.objects = QuerySetManager()

        user_indexes = [QuerySet._build_index_spec(new_class, spec)
                        for spec in meta['indexes']] + base_indexes
        new_class._meta['indexes'] = user_indexes

        unique_indexes = []
        for field_name, field in new_class._fields.items():
            # Generate a list of indexes needed by uniqueness constraints
            if field.unique:
                field.required = True
                unique_fields = [field.db_field]

                # Add any unique_with fields to the back of the index spec
                if field.unique_with:
                    if isinstance(field.unique_with, basestring):
                        field.unique_with = [field.unique_with]

                    # Convert unique_with field names to real field names
                    unique_with = []
                    for other_name in field.unique_with:
                        parts = other_name.split('.')
                        # Lookup real name
                        parts = QuerySet._lookup_field(new_class, parts)
                        name_parts = [part.db_field for part in parts]
                        unique_with.append('.'.join(name_parts))
                        # Unique field should be required
                        parts[-1].required = True
                    unique_fields += unique_with

                # Add the new index to the list
                index = [(f, pymongo.ASCENDING) for f in unique_fields]
                unique_indexes.append(index)

            # Check for custom primary key
            if field.primary_key:
                current_pk = new_class._meta['id_field']
                if current_pk and current_pk != field_name:
                    raise ValueError('Cannot override primary key field')

                if not current_pk:
                    new_class._meta['id_field'] = field_name
                    # Make 'Document.id' an alias to the real primary key field
                    new_class.id = field

        new_class._meta['unique_indexes'] = unique_indexes

        if not new_class._meta['id_field']:
            new_class._meta['id_field'] = 'id'
            new_class._fields['id'] = ObjectIdField(db_field='_id')
            new_class.id = new_class._fields['id']

        return new_class