Example #1
0
    def _unique_with_indexes(cls, new_class, namespace=""):
        unique_indexes = []
        for field_name, field in new_class._fields.items():
            # Generate a list of indexes needed by uniqueness constraints
            if field.unique:
                field.required = True
                unique_fields = [field.db_field]

                # Add any unique_with fields to the back of the index spec
                if field.unique_with:
                    if isinstance(field.unique_with, basestring):
                        field.unique_with = [field.unique_with]

                    # Convert unique_with field names to real field names
                    unique_with = []
                    for other_name in field.unique_with:
                        parts = other_name.split(".")
                        # Lookup real name
                        parts = QuerySet._lookup_field(new_class, parts)
                        name_parts = [part.db_field for part in parts]
                        unique_with.append(".".join(name_parts))
                        # Unique field should be required
                        parts[-1].required = True
                    unique_fields += unique_with

                # Add the new index to the list
                index = [("%s%s" % (namespace, f), pymongo.ASCENDING) for f in unique_fields]
                unique_indexes.append(index)

            # Grab any embedded document field unique indexes
            if field.__class__.__name__ == "EmbeddedDocumentField" and field.document_type != new_class:
                field_namespace = "%s." % field_name
                unique_indexes += cls._unique_with_indexes(field.document_type, field_namespace)

        return unique_indexes
Example #2
0
    def _unique_with_indexes(cls, new_class, namespace=""):
        unique_indexes = []
        for field_name, field in new_class._fields.items():
            # Generate a list of indexes needed by uniqueness constraints
            if field.unique:
                field.required = True
                unique_fields = [field.db_field]

                # Add any unique_with fields to the back of the index spec
                if field.unique_with:
                    if isinstance(field.unique_with, basestring):
                        field.unique_with = [field.unique_with]

                    # Convert unique_with field names to real field names
                    unique_with = []
                    for other_name in field.unique_with:
                        parts = other_name.split('.')
                        # Lookup real name
                        parts = QuerySet._lookup_field(new_class, parts)
                        name_parts = [part.db_field for part in parts]
                        unique_with.append('.'.join(name_parts))
                        # Unique field should be required
                        parts[-1].required = True
                    unique_fields += unique_with

                # Add the new index to the list
                index = [("%s%s" % (namespace, f), pymongo.ASCENDING)
                         for f in unique_fields]
                unique_indexes.append(index)

            # Grab any embedded document field unique indexes
            if field.__class__.__name__ == "EmbeddedDocumentField":
                field_namespace = "%s." % field_name
                unique_indexes += cls._unique_with_indexes(
                    field.document_type, field_namespace)

        return unique_indexes
Example #3
0
    def __new__(cls, name, bases, attrs):
        super_new = super(TopLevelDocumentMetaclass, cls).__new__
        # Classes defined in this package are abstract and should not have
        # their own metadata with DB collection, etc.
        # __metaclass__ is only set on the class with the __metaclass__
        # attribute (i.e. it is not set on subclasses). This differentiates
        # 'real' documents from the 'Document' class
        if attrs.get("__metaclass__") == TopLevelDocumentMetaclass:
            return super_new(cls, name, bases, attrs)

        collection = name.lower()

        id_field = None
        base_indexes = []
        base_meta = {}

        # Subclassed documents inherit collection from superclass
        for base in bases:
            if hasattr(base, "_meta") and "collection" in base._meta:
                collection = base._meta["collection"]

                # Propagate index options.
                for key in ("index_background", "index_drop_dups", "index_opts"):
                    if key in base._meta:
                        base_meta[key] = base._meta[key]

                id_field = id_field or base._meta.get("id_field")
                base_indexes += base._meta.get("indexes", [])

        meta = {
            "collection": collection,
            "max_documents": None,
            "max_size": None,
            "ordering": [],  # default ordering applied at runtime
            "indexes": [],  # indexes to be ensured at runtime
            "id_field": id_field,
            "index_background": True,
            "index_drop_dups": False,
            "index_opts": {},
            "queryset_class": QuerySet,
            "db_name": None,
            "force_insert": False,
            "hash_field": None,
            "hash_db_field": "_h",
            "sharded": True,
            "write_concern": 1,
        }
        meta.update(base_meta)

        # Apply document-defined meta options
        meta.update(attrs.get("meta", {}))
        attrs["_meta"] = meta

        # Set up collection manager, needs the class to have fields so use
        # DocumentMetaclass before instantiating CollectionManager object
        new_class = super_new(cls, name, bases, attrs)

        # Provide a default queryset unless one has been manually provided
        if not hasattr(new_class, "objects"):
            new_class.objects = QuerySetManager()

        user_indexes = [QuerySet._build_index_spec(new_class, spec) for spec in meta["indexes"]] + base_indexes
        new_class._meta["indexes"] = user_indexes

        unique_indexes = []
        for field_name, field in new_class._fields.items():
            # Generate a list of indexes needed by uniqueness constraints
            if field.unique:
                field.required = True
                unique_fields = [field.db_field]

                # Add any unique_with fields to the back of the index spec
                if field.unique_with:
                    if isinstance(field.unique_with, basestring):
                        field.unique_with = [field.unique_with]

                    # Convert unique_with field names to real field names
                    unique_with = []
                    for other_name in field.unique_with:
                        parts = other_name.split(".")
                        # Lookup real name
                        parts = QuerySet._lookup_field(new_class, parts)
                        name_parts = [part.db_field for part in parts]
                        unique_with.append(".".join(name_parts))
                        # Unique field should be required
                        parts[-1].required = True
                    unique_fields += unique_with

                # Add the new index to the list
                index = [(f, pymongo.ASCENDING) for f in unique_fields]
                unique_indexes.append(index)

            # Check for custom primary key
            if field.primary_key:
                current_pk = new_class._meta["id_field"]
                if current_pk and current_pk != field_name:
                    raise ValueError("Cannot override primary key field")

                if not current_pk:
                    new_class._meta["id_field"] = field_name
                    # Make 'Document.id' an alias to the real primary key field
                    new_class.id = field

        new_class._meta["unique_indexes"] = unique_indexes

        if not new_class._meta["id_field"]:
            new_class._meta["id_field"] = "id"
            id_field = ObjectIdField(db_field="_id")
            id_field.name = "id"
            id_field.primary_key = True
            id_field.required = False
            new_class._fields["id"] = id_field
            new_class.id = new_class._fields["id"]

        if meta["hash_field"]:
            assert "shard_hash" not in new_class._fields, "You already have a shard hash"

            assert meta["hash_field"] in new_class._fields, "The field you want to hash doesn't exist"

            from fields import IntField

            field = IntField(db_field=meta["hash_db_field"], required=True)
            new_class._fields["shard_hash"] = field
            field.owner_document = new_class
            new_class.shard_hash = field

        return new_class
Example #4
0
    def __new__(cls, name, bases, attrs):
        super_new = super(TopLevelDocumentMetaclass, cls).__new__
        # Classes defined in this package are abstract and should not have
        # their own metadata with DB collection, etc.
        # __metaclass__ is only set on the class with the __metaclass__
        # attribute (i.e. it is not set on subclasses). This differentiates
        # 'real' documents from the 'Document' class
        if attrs.get('__metaclass__') == TopLevelDocumentMetaclass:
            return super_new(cls, name, bases, attrs)

        collection = name.lower()

        id_field = None
        base_indexes = []
        base_meta = {}

        # Subclassed documents inherit collection from superclass
        for base in bases:
            if hasattr(base, '_meta') and 'collection' in base._meta:
                collection = base._meta['collection']

                # Propagate index options.
                for key in ('index_background', 'index_drop_dups',
                            'index_opts'):
                    if key in base._meta:
                        base_meta[key] = base._meta[key]

                id_field = id_field or base._meta.get('id_field')
                base_indexes += base._meta.get('indexes', [])

        meta = {
            'collection': collection,
            'max_documents': None,
            'max_size': None,
            'ordering': [],  # default ordering applied at runtime
            'indexes': [],  # indexes to be ensured at runtime
            'id_field': id_field,
            'index_background': False,
            'index_drop_dups': False,
            'index_opts': {},
            'queryset_class': QuerySet,
        }
        meta.update(base_meta)

        # Apply document-defined meta options
        meta.update(attrs.get('meta', {}))
        attrs['_meta'] = meta

        # Set up collection manager, needs the class to have fields so use
        # DocumentMetaclass before instantiating CollectionManager object
        new_class = super_new(cls, name, bases, attrs)

        # Provide a default queryset unless one has been manually provided
        if not hasattr(new_class, 'objects'):
            new_class.objects = QuerySetManager()

        user_indexes = [
            QuerySet._build_index_spec(new_class, spec)
            for spec in meta['indexes']
        ] + base_indexes
        new_class._meta['indexes'] = user_indexes

        unique_indexes = []
        for field_name, field in new_class._fields.items():
            # Generate a list of indexes needed by uniqueness constraints
            if field.unique:
                field.required = True
                unique_fields = [field.db_field]

                # Add any unique_with fields to the back of the index spec
                if field.unique_with:
                    if isinstance(field.unique_with, basestring):
                        field.unique_with = [field.unique_with]

                    # Convert unique_with field names to real field names
                    unique_with = []
                    for other_name in field.unique_with:
                        parts = other_name.split('.')
                        # Lookup real name
                        parts = QuerySet._lookup_field(new_class, parts)
                        name_parts = [part.db_field for part in parts]
                        unique_with.append('.'.join(name_parts))
                        # Unique field should be required
                        parts[-1].required = True
                    unique_fields += unique_with

                # Add the new index to the list
                index = [(f, pymongo.ASCENDING) for f in unique_fields]
                unique_indexes.append(index)

            # Check for custom primary key
            if field.primary_key:
                current_pk = new_class._meta['id_field']
                if current_pk and current_pk != field_name:
                    raise ValueError('Cannot override primary key field')

                if not current_pk:
                    new_class._meta['id_field'] = field_name
                    # Make 'Document.id' an alias to the real primary key field
                    new_class.id = field

        new_class._meta['unique_indexes'] = unique_indexes

        if not new_class._meta['id_field']:
            new_class._meta['id_field'] = 'id'
            new_class._fields['id'] = ObjectIdField(db_field='_id')
            new_class.id = new_class._fields['id']

        return new_class
Example #5
0
    def __new__(cls, name, bases, attrs):
        global _document_registry

        super_new = super(TopLevelDocumentMetaclass, cls).__new__
        # Classes defined in this package are abstract and should not have 
        # their own metadata with DB collection, etc.
        # __metaclass__ is only set on the class with the __metaclass__ 
        # attribute (i.e. it is not set on subclasses). This differentiates
        # 'real' documents from the 'Document' class
        if attrs.get('__metaclass__') == TopLevelDocumentMetaclass:
            return super_new(cls, name, bases, attrs)

        collection = name.lower()
        
        id_field = None
        base_indexes = []

        # Subclassed documents inherit collection from superclass
        for base in bases:
            if hasattr(base, '_meta') and 'collection' in base._meta:
                collection = base._meta['collection']

                id_field = id_field or base._meta.get('id_field')
                base_indexes += base._meta.get('indexes', [])

        meta = {
            'collection': collection,
            'max_documents': None,
            'max_size': None,
            'ordering': [], # default ordering applied at runtime
            'indexes': [], # indexes to be ensured at runtime
            'id_field': id_field,
        }

        # Apply document-defined meta options
        meta.update(attrs.get('meta', {}))
        attrs['_meta'] = meta

        # Set up collection manager, needs the class to have fields so use
        # DocumentMetaclass before instantiating CollectionManager object
        new_class = super_new(cls, name, bases, attrs)
        new_class.objects = QuerySetManager()

        user_indexes = [QuerySet._build_index_spec(new_class, spec)
                        for spec in meta['indexes']] + base_indexes
        new_class._meta['indexes'] = user_indexes
        
        unique_indexes = []
        for field_name, field in new_class._fields.items():
            # Generate a list of indexes needed by uniqueness constraints
            if field.unique:
                field.required = True
                unique_fields = [field_name]

                # Add any unique_with fields to the back of the index spec
                if field.unique_with:
                    if isinstance(field.unique_with, basestring):
                        field.unique_with = [field.unique_with]

                    # Convert unique_with field names to real field names
                    unique_with = []
                    for other_name in field.unique_with:
                        parts = other_name.split('.')
                        # Lookup real name
                        parts = QuerySet._lookup_field(new_class, parts)
                        name_parts = [part.db_field for part in parts]
                        unique_with.append('.'.join(name_parts))
                        # Unique field should be required
                        parts[-1].required = True
                    unique_fields += unique_with

                # Add the new index to the list
                index = [(f, pymongo.ASCENDING) for f in unique_fields]
                unique_indexes.append(index)

            # Check for custom primary key
            if field.primary_key:
                if not new_class._meta['id_field']:
                    new_class._meta['id_field'] = field_name
                    # Make 'Document.id' an alias to the real primary key field
                    new_class.id = field
                    #new_class._fields['id'] = field
                else:
                    raise ValueError('Cannot override primary key field')

        new_class._meta['unique_indexes'] = unique_indexes

        if not new_class._meta['id_field']:
            new_class._meta['id_field'] = 'id'
            new_class._fields['id'] = ObjectIdField(db_field='_id')
            new_class.id = new_class._fields['id']

        _document_registry[name] = new_class

        return new_class
Example #6
0
    def __new__(cls, name, bases, attrs):
        super_new = super(TopLevelDocumentMetaclass, cls).__new__
        # Classes defined in this package are abstract and should not have
        # their own metadata with DB collection, etc.
        # __metaclass__ is only set on the class with the __metaclass__
        # attribute (i.e. it is not set on subclasses). This differentiates
        # 'real' documents from the 'Document' class
        if attrs.get('__metaclass__') == TopLevelDocumentMetaclass:
            return super_new(cls, name, bases, attrs)

        collection = name.lower()

        id_field = None
        base_indexes = []
        base_meta = {}

        # Subclassed documents inherit collection from superclass
        for base in bases:
            if hasattr(base, '_meta') and 'collection' in base._meta:
                collection = base._meta['collection']

                # Propagate index options.
                for key in ('index_background', 'index_drop_dups', 'index_opts'):
                   if key in base._meta:
                      base_meta[key] = base._meta[key]

                id_field = id_field or base._meta.get('id_field')
                base_indexes += base._meta.get('indexes', [])

        meta = {
            'collection': collection,
            'max_documents': None,
            'max_size': None,
            'ordering': [], # default ordering applied at runtime
            'indexes': [], # indexes to be ensured at runtime
            'id_field': id_field,
            'index_background': True,
            'index_drop_dups': False,
            'index_opts': {},
            'queryset_class': QuerySet,
            'db_name': None,

            'force_insert': False,

            'hash_field': None,
            'hash_db_field': '_h',
            'sharded': True,

            'write_concern': 1
        }
        meta.update(base_meta)

        # Apply document-defined meta options
        meta.update(attrs.get('meta', {}))
        attrs['_meta'] = meta

        # Set up collection manager, needs the class to have fields so use
        # DocumentMetaclass before instantiating CollectionManager object
        new_class = super_new(cls, name, bases, attrs)

        # Check if trying to use sweeper db; default to unsharded DB which has
        # unsharded shard as default primary to reduce load
        if hasattr(new_class, 'meta') and 'abstract' not in new_class.meta and\
                'shard_key' not in new_class.meta:
            import mongoengine.connection
            import inspect
            if mongoengine.connection._default_db == 'sweeper' and 'clroot/sweeper' in inspect.getfile(new_class):
                new_class.meta['shard_key'] = False
                new_class._meta['db_name'] = 'sweeper-unsharded'

        # Provide a default queryset unless one has been manually provided
        if not hasattr(new_class, 'objects'):
            new_class.objects = QuerySetManager()

        user_indexes = [QuerySet._build_index_spec(new_class, spec)
                        for spec in meta['indexes']] + base_indexes
        new_class._meta['indexes'] = user_indexes

        unique_indexes = []
        for field_name, field in new_class._fields.items():
            # Generate a list of indexes needed by uniqueness constraints
            if field.unique:
                field.required = True
                unique_fields = [field.db_field]

                # Add any unique_with fields to the back of the index spec
                if field.unique_with:
                    if isinstance(field.unique_with, basestring):
                        field.unique_with = [field.unique_with]

                    # Convert unique_with field names to real field names
                    unique_with = []
                    for other_name in field.unique_with:
                        parts = other_name.split('.')
                        # Lookup real name
                        parts = QuerySet._lookup_field(new_class, parts)
                        name_parts = [part.db_field for part in parts]
                        unique_with.append('.'.join(name_parts))
                        # Unique field should be required
                        parts[-1].required = True
                    unique_fields += unique_with

                # Add the new index to the list
                index = [(f, pymongo.ASCENDING) for f in unique_fields]
                unique_indexes.append(index)

            # Check for custom primary key
            if field.primary_key:
                current_pk = new_class._meta['id_field']
                if current_pk and current_pk != field_name:
                    raise ValueError('Cannot override primary key field')

                if not current_pk:
                    new_class._meta['id_field'] = field_name
                    # Make 'Document.id' an alias to the real primary key field
                    new_class.id = field

        new_class._meta['unique_indexes'] = unique_indexes

        if not new_class._meta['id_field']:
            new_class._meta['id_field'] = 'id'
            id_field = ObjectIdField(db_field='_id')
            id_field.name = 'id'
            id_field.primary_key = True
            id_field.required = False
            new_class._fields['id'] = id_field
            new_class.id = new_class._fields['id']

        if meta['hash_field']:
            assert 'shard_hash' not in new_class._fields, \
                    "You already have a shard hash"

            assert meta['hash_field'] in new_class._fields, \
                    "The field you want to hash doesn't exist"

            from fields import IntField

            field = IntField(db_field=meta['hash_db_field'], required=True)
            new_class._fields['shard_hash'] = field
            field.owner_document = new_class
            new_class.shard_hash = field

        return new_class
Example #7
0
    def __new__(cls, name, bases, attrs):
        super_new = super(TopLevelDocumentMetaclass, cls).__new__
        # Classes defined in this package are abstract and should not have 
        # their own metadata with DB collection, etc.
        # __metaclass__ is only set on the class with the __metaclass__ 
        # attribute (i.e. it is not set on subclasses). This differentiates
        # 'real' documents from the 'Document' class
        #
        # Also assume a class is abstract if it has abstract set to True in
        # its meta dictionary. This allows custom Document superclasses.
        if attrs.get('__metaclass__') == TopLevelDocumentMetaclass or \
            ('meta' in attrs and attrs['meta'].get('abstract', False)):
            # check to make sure no base class was non-abstract
            for base in bases:
                if hasattr(base,'_meta') and 'abstract' in base._meta \
                    and not base._meta['abstract']:
                    raise ValueError( \
                        'Abstract document cannot have non-abstract base')
            return super_new(cls, name, bases, attrs)

        collection = name.lower()

        id_field = None
        base_indexes = []
        base_meta = {}

        # Subclassed documents inherit collection from superclass
        for base in bases:
            if hasattr(base, '_meta') and 'collection' in base._meta:
                collection = base._meta['collection']

                # Propagate index options.
                for key in ('index_background', 'index_drop_dups', 'index_opts'):
                   if key in base._meta:
                      base_meta[key] = base._meta[key]

                id_field = id_field or base._meta.get('id_field')
                base_indexes += base._meta.get('indexes', [])
        
        meta = {
            'abstract': False,
            'collection': collection,
            'max_documents': None,
            'max_size': None,
            'ordering': [], # default ordering applied at runtime
            'indexes': [], # indexes to be ensured at runtime
            'id_field': id_field,
            'index_background': False,
            'index_drop_dups': False,
            'index_opts': {},
            'queryset_class': QuerySet,
        }
        meta.update(base_meta)

        # Apply document-defined meta options
        meta.update(attrs.get('meta', {}))
        attrs['_meta'] = meta

        # Set up collection manager, needs the class to have fields so use
        # DocumentMetaclass before instantiating CollectionManager object
        new_class = super_new(cls, name, bases, attrs)
        
        # Allow dynamically-generated collection names. Pass the newly
        # created class so the callee has access to __module__, etc.
        collection = new_class._meta['collection']
        if callable(collection):
            new_class._meta['collection'] = collection(new_class)
        
        # Provide a default queryset unless one has been manually provided
        # Note: Check for existance in attrs because hasattr assumes it
        # doesn't exist if any exception is raised, eg when not connected.
        if 'objects' not in attrs:
            new_class.objects = QuerySetManager()

        user_indexes = [QuerySet._build_index_spec(new_class, spec)
                        for spec in meta['indexes']] + base_indexes
        new_class._meta['indexes'] = user_indexes

        unique_indexes = []
        for field_name, field in new_class._fields.items():
            # Generate a list of indexes needed by uniqueness constraints
            if field.unique:
                field.required = True
                unique_fields = [field.db_field]

                # Add any unique_with fields to the back of the index spec
                if field.unique_with:
                    if isinstance(field.unique_with, basestring):
                        field.unique_with = [field.unique_with]

                    # Convert unique_with field names to real field names
                    unique_with = []
                    for other_name in field.unique_with:
                        parts = other_name.split('.')
                        # Lookup real name
                        parts = QuerySet._lookup_field(new_class, parts)
                        name_parts = [part.db_field for part in parts]
                        unique_with.append('.'.join(name_parts))
                        # Unique field should be required
                        parts[-1].required = True
                    unique_fields += unique_with

                # Add the new index to the list
                index = [(f, pymongo.ASCENDING) for f in unique_fields]
                unique_indexes.append(index)

            # Check for custom primary key
            if field.primary_key:
                current_pk = new_class._meta['id_field']
                if current_pk and current_pk != field_name:
                    raise ValueError('Cannot override primary key field')

                if not current_pk:
                    new_class._meta['id_field'] = field_name
                    # Make 'Document.id' an alias to the real primary key field
                    new_class.id = field

        new_class._meta['unique_indexes'] = unique_indexes

        if not new_class._meta['id_field']:
            new_class._meta['id_field'] = 'id'
            new_class._fields['id'] = ObjectIdField(db_field='_id')
            new_class.id = new_class._fields['id']

        return new_class