def __new__(cls, name, bases, attrs): super_new = super(TopLevelDocumentMetaclass, cls).__new__ # Classes defined in this package are abstract and should not have # their own metadata with DB collection, etc. # __metaclass__ is only set on the class with the __metaclass__ # attribute (i.e. it is not set on subclasses). This differentiates # 'real' documents from the 'Document' class if attrs.get("__metaclass__") == TopLevelDocumentMetaclass: return super_new(cls, name, bases, attrs) collection = name.lower() id_field = None base_indexes = [] base_meta = {} # Subclassed documents inherit collection from superclass for base in bases: if hasattr(base, "_meta") and "collection" in base._meta: collection = base._meta["collection"] # Propagate index options. for key in ("index_background", "index_drop_dups", "index_opts"): if key in base._meta: base_meta[key] = base._meta[key] id_field = id_field or base._meta.get("id_field") base_indexes += base._meta.get("indexes", []) meta = { "collection": collection, "max_documents": None, "max_size": None, "ordering": [], # default ordering applied at runtime "indexes": [], # indexes to be ensured at runtime "id_field": id_field, "index_background": True, "index_drop_dups": False, "index_opts": {}, "queryset_class": QuerySet, "db_name": None, "force_insert": False, "hash_field": None, "hash_db_field": "_h", "sharded": True, "write_concern": 1, } meta.update(base_meta) # Apply document-defined meta options meta.update(attrs.get("meta", {})) attrs["_meta"] = meta # Set up collection manager, needs the class to have fields so use # DocumentMetaclass before instantiating CollectionManager object new_class = super_new(cls, name, bases, attrs) # Provide a default queryset unless one has been manually provided if not hasattr(new_class, "objects"): new_class.objects = QuerySetManager() user_indexes = [QuerySet._build_index_spec(new_class, spec) for spec in meta["indexes"]] + base_indexes new_class._meta["indexes"] = user_indexes unique_indexes = [] for field_name, field in new_class._fields.items(): # Generate a list of indexes needed by uniqueness constraints if field.unique: field.required = True unique_fields = [field.db_field] # Add any unique_with fields to the back of the index spec if field.unique_with: if isinstance(field.unique_with, basestring): field.unique_with = [field.unique_with] # Convert unique_with field names to real field names unique_with = [] for other_name in field.unique_with: parts = other_name.split(".") # Lookup real name parts = QuerySet._lookup_field(new_class, parts) name_parts = [part.db_field for part in parts] unique_with.append(".".join(name_parts)) # Unique field should be required parts[-1].required = True unique_fields += unique_with # Add the new index to the list index = [(f, pymongo.ASCENDING) for f in unique_fields] unique_indexes.append(index) # Check for custom primary key if field.primary_key: current_pk = new_class._meta["id_field"] if current_pk and current_pk != field_name: raise ValueError("Cannot override primary key field") if not current_pk: new_class._meta["id_field"] = field_name # Make 'Document.id' an alias to the real primary key field new_class.id = field new_class._meta["unique_indexes"] = unique_indexes if not new_class._meta["id_field"]: new_class._meta["id_field"] = "id" id_field = ObjectIdField(db_field="_id") id_field.name = "id" id_field.primary_key = True id_field.required = False new_class._fields["id"] = id_field new_class.id = new_class._fields["id"] if meta["hash_field"]: assert "shard_hash" not in new_class._fields, "You already have a shard hash" assert meta["hash_field"] in new_class._fields, "The field you want to hash doesn't exist" from fields import IntField field = IntField(db_field=meta["hash_db_field"], required=True) new_class._fields["shard_hash"] = field field.owner_document = new_class new_class.shard_hash = field return new_class
def __new__(cls, name, bases, attrs): super_new = super(TopLevelDocumentMetaclass, cls).__new__ # Classes defined in this package are abstract and should not have # their own metadata with DB collection, etc. # __metaclass__ is only set on the class with the __metaclass__ # attribute (i.e. it is not set on subclasses). This differentiates # 'real' documents from the 'Document' class if attrs.get('__metaclass__') == TopLevelDocumentMetaclass: return super_new(cls, name, bases, attrs) collection = name.lower() id_field = None base_indexes = [] base_meta = {} # Subclassed documents inherit collection from superclass for base in bases: if hasattr(base, '_meta') and 'collection' in base._meta: collection = base._meta['collection'] # Propagate index options. for key in ('index_background', 'index_drop_dups', 'index_opts'): if key in base._meta: base_meta[key] = base._meta[key] id_field = id_field or base._meta.get('id_field') base_indexes += base._meta.get('indexes', []) meta = { 'collection': collection, 'max_documents': None, 'max_size': None, 'ordering': [], # default ordering applied at runtime 'indexes': [], # indexes to be ensured at runtime 'id_field': id_field, 'index_background': False, 'index_drop_dups': False, 'index_opts': {}, 'queryset_class': QuerySet, } meta.update(base_meta) # Apply document-defined meta options meta.update(attrs.get('meta', {})) attrs['_meta'] = meta # Set up collection manager, needs the class to have fields so use # DocumentMetaclass before instantiating CollectionManager object new_class = super_new(cls, name, bases, attrs) # Provide a default queryset unless one has been manually provided if not hasattr(new_class, 'objects'): new_class.objects = QuerySetManager() user_indexes = [ QuerySet._build_index_spec(new_class, spec) for spec in meta['indexes'] ] + base_indexes new_class._meta['indexes'] = user_indexes unique_indexes = [] for field_name, field in new_class._fields.items(): # Generate a list of indexes needed by uniqueness constraints if field.unique: field.required = True unique_fields = [field.db_field] # Add any unique_with fields to the back of the index spec if field.unique_with: if isinstance(field.unique_with, basestring): field.unique_with = [field.unique_with] # Convert unique_with field names to real field names unique_with = [] for other_name in field.unique_with: parts = other_name.split('.') # Lookup real name parts = QuerySet._lookup_field(new_class, parts) name_parts = [part.db_field for part in parts] unique_with.append('.'.join(name_parts)) # Unique field should be required parts[-1].required = True unique_fields += unique_with # Add the new index to the list index = [(f, pymongo.ASCENDING) for f in unique_fields] unique_indexes.append(index) # Check for custom primary key if field.primary_key: current_pk = new_class._meta['id_field'] if current_pk and current_pk != field_name: raise ValueError('Cannot override primary key field') if not current_pk: new_class._meta['id_field'] = field_name # Make 'Document.id' an alias to the real primary key field new_class.id = field new_class._meta['unique_indexes'] = unique_indexes if not new_class._meta['id_field']: new_class._meta['id_field'] = 'id' new_class._fields['id'] = ObjectIdField(db_field='_id') new_class.id = new_class._fields['id'] return new_class
def __new__(cls, name, bases, attrs): super_new = super(TopLevelDocumentMetaclass, cls).__new__ # Classes defined in this package are abstract and should not have # their own metadata with DB collection, etc. # __metaclass__ is only set on the class with the __metaclass__ # attribute (i.e. it is not set on subclasses). This differentiates # 'real' documents from the 'Document' class # # Also assume a class is abstract if it has abstract set to True in # its meta dictionary. This allows custom Document superclasses. if (attrs.get('__metaclass__') == TopLevelDocumentMetaclass or ('meta' in attrs and attrs['meta'].get('abstract', False))): # Make sure no base class was non-abstract non_abstract_bases = [b for b in bases if hasattr(b, '_meta') and not b._meta.get('abstract', False)] if non_abstract_bases: raise ValueError("Abstract document cannot have non-abstract base") return super_new(cls, name, bases, attrs) collection = ''.join('_%s' % c if c.isupper() else c for c in name).strip('_').lower() id_field = None abstract_base_indexes = [] base_indexes = [] base_meta = {} # Subclassed documents inherit collection from superclass for base in bases: if hasattr(base, '_meta'): if 'collection' in attrs.get('meta', {}) and not base._meta.get('abstract', False): import warnings msg = "Trying to set a collection on a subclass (%s)" % name warnings.warn(msg, SyntaxWarning) del(attrs['meta']['collection']) if base._get_collection_name(): collection = base._get_collection_name() # Propagate inherited values keys_to_propogate = ( 'index_background', 'index_drop_dups', 'index_opts', 'allow_inheritance', 'queryset_class', 'db_alias', 'shard_key' ) for key in keys_to_propogate: if key in base._meta: base_meta[key] = base._meta[key] id_field = id_field or base._meta.get('id_field') if base._meta.get('abstract', False): abstract_base_indexes += base._meta.get('indexes', []) else: base_indexes += base._meta.get('indexes', []) try: base_meta['objects'] = base.__getattribute__(base, 'objects') except TypeError: pass except AttributeError: pass # defaults meta = { 'abstract': False, 'collection': collection, 'max_documents': None, 'max_size': None, 'ordering': [], # default ordering applied at runtime 'indexes': [], # indexes to be ensured at runtime 'id_field': id_field, 'index_background': False, 'index_drop_dups': False, 'index_opts': {}, 'queryset_class': QuerySet, 'delete_rules': {}, 'allow_inheritance': True } allow_inheritance_defined = ('allow_inheritance' in base_meta or 'allow_inheritance'in attrs.get('meta', {})) meta['allow_inheritance_defined'] = allow_inheritance_defined meta.update(base_meta) # Apply document-defined meta options meta.update(attrs.get('meta', {})) attrs['_meta'] = meta # Set up collection manager, needs the class to have fields so use # DocumentMetaclass before instantiating CollectionManager object new_class = super_new(cls, name, bases, attrs) collection = attrs['_meta'].get('collection', None) if callable(collection): new_class._meta['collection'] = collection(new_class) # Provide a default queryset unless one has been manually provided manager = attrs.get('objects', meta.get('objects', QuerySetManager())) if hasattr(manager, 'queryset_class'): meta['queryset_class'] = manager.queryset_class new_class.objects = manager indicies = list(meta['indexes']) + abstract_base_indexes user_indexes = [QuerySet._build_index_spec(new_class, spec) for spec in indicies] + base_indexes new_class._meta['indexes'] = user_indexes unique_indexes = cls._unique_with_indexes(new_class) new_class._meta['unique_indexes'] = unique_indexes for field_name, field in new_class._fields.items(): # Check for custom primary key if field.primary_key: current_pk = new_class._meta['id_field'] if current_pk and current_pk != field_name: raise ValueError('Cannot override primary key field') if not current_pk: new_class._meta['id_field'] = field_name # Make 'Document.id' an alias to the real primary key field new_class.id = field if not new_class._meta['id_field']: new_class._meta['id_field'] = 'id' new_class._fields['id'] = ObjectIdField(db_field='_id') new_class.id = new_class._fields['id'] return new_class
def __new__(cls, name, bases, attrs): global _document_registry super_new = super(TopLevelDocumentMetaclass, cls).__new__ # Classes defined in this package are abstract and should not have # their own metadata with DB collection, etc. # __metaclass__ is only set on the class with the __metaclass__ # attribute (i.e. it is not set on subclasses). This differentiates # 'real' documents from the 'Document' class if attrs.get('__metaclass__') == TopLevelDocumentMetaclass: return super_new(cls, name, bases, attrs) collection = name.lower() id_field = None base_indexes = [] # Subclassed documents inherit collection from superclass for base in bases: if hasattr(base, '_meta') and 'collection' in base._meta: collection = base._meta['collection'] id_field = id_field or base._meta.get('id_field') base_indexes += base._meta.get('indexes', []) meta = { 'collection': collection, 'max_documents': None, 'max_size': None, 'ordering': [], # default ordering applied at runtime 'indexes': [], # indexes to be ensured at runtime 'id_field': id_field, } # Apply document-defined meta options meta.update(attrs.get('meta', {})) attrs['_meta'] = meta # Set up collection manager, needs the class to have fields so use # DocumentMetaclass before instantiating CollectionManager object new_class = super_new(cls, name, bases, attrs) new_class.objects = QuerySetManager() user_indexes = [QuerySet._build_index_spec(new_class, spec) for spec in meta['indexes']] + base_indexes new_class._meta['indexes'] = user_indexes unique_indexes = [] for field_name, field in new_class._fields.items(): # Generate a list of indexes needed by uniqueness constraints if field.unique: field.required = True unique_fields = [field_name] # Add any unique_with fields to the back of the index spec if field.unique_with: if isinstance(field.unique_with, basestring): field.unique_with = [field.unique_with] # Convert unique_with field names to real field names unique_with = [] for other_name in field.unique_with: parts = other_name.split('.') # Lookup real name parts = QuerySet._lookup_field(new_class, parts) name_parts = [part.db_field for part in parts] unique_with.append('.'.join(name_parts)) # Unique field should be required parts[-1].required = True unique_fields += unique_with # Add the new index to the list index = [(f, pymongo.ASCENDING) for f in unique_fields] unique_indexes.append(index) # Check for custom primary key if field.primary_key: if not new_class._meta['id_field']: new_class._meta['id_field'] = field_name # Make 'Document.id' an alias to the real primary key field new_class.id = field #new_class._fields['id'] = field else: raise ValueError('Cannot override primary key field') new_class._meta['unique_indexes'] = unique_indexes if not new_class._meta['id_field']: new_class._meta['id_field'] = 'id' new_class._fields['id'] = ObjectIdField(db_field='_id') new_class.id = new_class._fields['id'] _document_registry[name] = new_class return new_class
def __new__(cls, name, bases, attrs): super_new = super(TopLevelDocumentMetaclass, cls).__new__ # Classes defined in this package are abstract and should not have # their own metadata with DB collection, etc. # __metaclass__ is only set on the class with the __metaclass__ # attribute (i.e. it is not set on subclasses). This differentiates # 'real' documents from the 'Document' class # # Also assume a class is abstract if it has abstract set to True in # its meta dictionary. This allows custom Document superclasses. if (attrs.get('__metaclass__') == TopLevelDocumentMetaclass or ('meta' in attrs and attrs['meta'].get('abstract', False))): # Make sure no base class was non-abstract non_abstract_bases = [ b for b in bases if hasattr(b, '_meta') and not b._meta.get('abstract', False) ] if non_abstract_bases: raise ValueError( "Abstract document cannot have non-abstract base") return super_new(cls, name, bases, attrs) collection = ''.join('_%s' % c if c.isupper() else c for c in name).strip('_').lower() id_field = None abstract_base_indexes = [] base_indexes = [] base_meta = {} # Subclassed documents inherit collection from superclass for base in bases: if hasattr(base, '_meta'): if 'collection' in attrs.get( 'meta', {}) and not base._meta.get('abstract', False): import warnings msg = "Trying to set a collection on a subclass (%s)" % name warnings.warn(msg, SyntaxWarning) del (attrs['meta']['collection']) if base._get_collection_name(): collection = base._get_collection_name() # Propagate index options. for key in ('index_background', 'index_drop_dups', 'index_opts'): if key in base._meta: base_meta[key] = base._meta[key] id_field = id_field or base._meta.get('id_field') if base._meta.get('abstract', False): abstract_base_indexes += base._meta.get('indexes', []) else: base_indexes += base._meta.get('indexes', []) # Propagate 'allow_inheritance' if 'allow_inheritance' in base._meta: base_meta['allow_inheritance'] = base._meta[ 'allow_inheritance'] if 'queryset_class' in base._meta: base_meta['queryset_class'] = base._meta['queryset_class'] try: base_meta['objects'] = base.__getattribute__(base, 'objects') except TypeError: pass except AttributeError: pass meta = { 'abstract': False, 'collection': collection, 'max_documents': None, 'max_size': None, 'ordering': [], # default ordering applied at runtime 'indexes': [], # indexes to be ensured at runtime 'id_field': id_field, 'index_background': False, 'index_drop_dups': False, 'index_opts': {}, 'queryset_class': QuerySet, 'delete_rules': {}, 'allow_inheritance': True } allow_inheritance_defined = ('allow_inheritance' in base_meta or 'allow_inheritance' in attrs.get( 'meta', {})) meta['allow_inheritance_defined'] = allow_inheritance_defined meta.update(base_meta) # Apply document-defined meta options meta.update(attrs.get('meta', {})) attrs['_meta'] = meta # Set up collection manager, needs the class to have fields so use # DocumentMetaclass before instantiating CollectionManager object new_class = super_new(cls, name, bases, attrs) collection = attrs['_meta'].get('collection', None) if callable(collection): new_class._meta['collection'] = collection(new_class) # Provide a default queryset unless one has been manually provided manager = attrs.get('objects', meta.get('objects', QuerySetManager())) if hasattr(manager, 'queryset_class'): meta['queryset_class'] = manager.queryset_class new_class.objects = manager indicies = meta['indexes'] + abstract_base_indexes user_indexes = [ QuerySet._build_index_spec(new_class, spec) for spec in indicies ] + base_indexes new_class._meta['indexes'] = user_indexes unique_indexes = cls._unique_with_indexes(new_class) new_class._meta['unique_indexes'] = unique_indexes for field_name, field in new_class._fields.items(): # Check for custom primary key if field.primary_key: current_pk = new_class._meta['id_field'] if current_pk and current_pk != field_name: raise ValueError('Cannot override primary key field') if not current_pk: new_class._meta['id_field'] = field_name # Make 'Document.id' an alias to the real primary key field new_class.id = field if not new_class._meta['id_field']: new_class._meta['id_field'] = 'id' new_class._fields['id'] = ObjectIdField(db_field='_id') new_class.id = new_class._fields['id'] return new_class
def __new__(cls, name, bases, attrs): super_new = super(TopLevelDocumentMetaclass, cls).__new__ # Classes defined in this package are abstract and should not have # their own metadata with DB collection, etc. # __metaclass__ is only set on the class with the __metaclass__ # attribute (i.e. it is not set on subclasses). This differentiates # 'real' documents from the 'Document' class if attrs.get('__metaclass__') == TopLevelDocumentMetaclass: return super_new(cls, name, bases, attrs) collection = name.lower() id_field = None base_indexes = [] base_meta = {} # Subclassed documents inherit collection from superclass for base in bases: if hasattr(base, '_meta') and 'collection' in base._meta: collection = base._meta['collection'] # Propagate index options. for key in ('index_background', 'index_drop_dups', 'index_opts'): if key in base._meta: base_meta[key] = base._meta[key] id_field = id_field or base._meta.get('id_field') base_indexes += base._meta.get('indexes', []) meta = { 'collection': collection, 'max_documents': None, 'max_size': None, 'ordering': [], # default ordering applied at runtime 'indexes': [], # indexes to be ensured at runtime 'id_field': id_field, 'index_background': True, 'index_drop_dups': False, 'index_opts': {}, 'queryset_class': QuerySet, 'db_name': None, 'force_insert': False, 'hash_field': None, 'hash_db_field': '_h', 'sharded': True, 'write_concern': 1 } meta.update(base_meta) # Apply document-defined meta options meta.update(attrs.get('meta', {})) attrs['_meta'] = meta # Set up collection manager, needs the class to have fields so use # DocumentMetaclass before instantiating CollectionManager object new_class = super_new(cls, name, bases, attrs) # Check if trying to use sweeper db; default to unsharded DB which has # unsharded shard as default primary to reduce load if hasattr(new_class, 'meta') and 'abstract' not in new_class.meta and\ 'shard_key' not in new_class.meta: import mongoengine.connection import inspect if mongoengine.connection._default_db == 'sweeper' and 'clroot/sweeper' in inspect.getfile(new_class): new_class.meta['shard_key'] = False new_class._meta['db_name'] = 'sweeper-unsharded' # Provide a default queryset unless one has been manually provided if not hasattr(new_class, 'objects'): new_class.objects = QuerySetManager() user_indexes = [QuerySet._build_index_spec(new_class, spec) for spec in meta['indexes']] + base_indexes new_class._meta['indexes'] = user_indexes unique_indexes = [] for field_name, field in new_class._fields.items(): # Generate a list of indexes needed by uniqueness constraints if field.unique: field.required = True unique_fields = [field.db_field] # Add any unique_with fields to the back of the index spec if field.unique_with: if isinstance(field.unique_with, basestring): field.unique_with = [field.unique_with] # Convert unique_with field names to real field names unique_with = [] for other_name in field.unique_with: parts = other_name.split('.') # Lookup real name parts = QuerySet._lookup_field(new_class, parts) name_parts = [part.db_field for part in parts] unique_with.append('.'.join(name_parts)) # Unique field should be required parts[-1].required = True unique_fields += unique_with # Add the new index to the list index = [(f, pymongo.ASCENDING) for f in unique_fields] unique_indexes.append(index) # Check for custom primary key if field.primary_key: current_pk = new_class._meta['id_field'] if current_pk and current_pk != field_name: raise ValueError('Cannot override primary key field') if not current_pk: new_class._meta['id_field'] = field_name # Make 'Document.id' an alias to the real primary key field new_class.id = field new_class._meta['unique_indexes'] = unique_indexes if not new_class._meta['id_field']: new_class._meta['id_field'] = 'id' id_field = ObjectIdField(db_field='_id') id_field.name = 'id' id_field.primary_key = True id_field.required = False new_class._fields['id'] = id_field new_class.id = new_class._fields['id'] if meta['hash_field']: assert 'shard_hash' not in new_class._fields, \ "You already have a shard hash" assert meta['hash_field'] in new_class._fields, \ "The field you want to hash doesn't exist" from fields import IntField field = IntField(db_field=meta['hash_db_field'], required=True) new_class._fields['shard_hash'] = field field.owner_document = new_class new_class.shard_hash = field return new_class
def __new__(cls, name, bases, attrs): super_new = super(TopLevelDocumentMetaclass, cls).__new__ # Classes defined in this package are abstract and should not have # their own metadata with DB collection, etc. # __metaclass__ is only set on the class with the __metaclass__ # attribute (i.e. it is not set on subclasses). This differentiates # 'real' documents from the 'Document' class # # Also assume a class is abstract if it has abstract set to True in # its meta dictionary. This allows custom Document superclasses. if attrs.get("__metaclass__") == TopLevelDocumentMetaclass or ( "meta" in attrs and attrs["meta"].get("abstract", False) ): # Make sure no base class was non-abstract non_abstract_bases = [b for b in bases if hasattr(b, "_meta") and not b._meta.get("abstract", False)] if non_abstract_bases: raise ValueError("Abstract document cannot have non-abstract base") return super_new(cls, name, bases, attrs) collection = "".join("_%s" % c if c.isupper() else c for c in name).strip("_").lower() id_field = None abstract_base_indexes = [] base_indexes = [] base_meta = {} # Subclassed documents inherit collection from superclass for base in bases: if hasattr(base, "_meta"): if "collection" in attrs.get("meta", {}) and not base._meta.get("abstract", False): import warnings msg = "Trying to set a collection on a subclass (%s)" % name warnings.warn(msg, SyntaxWarning) del (attrs["meta"]["collection"]) if base._get_collection_name(): collection = base._get_collection_name() # Propagate inherited values keys_to_propogate = ( "index_background", "index_drop_dups", "index_opts", "allow_inheritance", "queryset_class", "db_alias", ) for key in keys_to_propogate: if key in base._meta: base_meta[key] = base._meta[key] id_field = id_field or base._meta.get("id_field") if base._meta.get("abstract", False): abstract_base_indexes += base._meta.get("indexes", []) else: base_indexes += base._meta.get("indexes", []) try: base_meta["objects"] = base.__getattribute__(base, "objects") except TypeError: pass except AttributeError: pass # defaults meta = { "abstract": False, "collection": collection, "max_documents": None, "max_size": None, "ordering": [], # default ordering applied at runtime "indexes": [], # indexes to be ensured at runtime "id_field": id_field, "index_background": False, "index_drop_dups": False, "index_opts": {}, "queryset_class": QuerySet, "delete_rules": {}, "allow_inheritance": True, } allow_inheritance_defined = "allow_inheritance" in base_meta or "allow_inheritance" in attrs.get("meta", {}) meta["allow_inheritance_defined"] = allow_inheritance_defined meta.update(base_meta) # Apply document-defined meta options meta.update(attrs.get("meta", {})) attrs["_meta"] = meta # Set up collection manager, needs the class to have fields so use # DocumentMetaclass before instantiating CollectionManager object new_class = super_new(cls, name, bases, attrs) collection = attrs["_meta"].get("collection", None) if callable(collection): new_class._meta["collection"] = collection(new_class) # Provide a default queryset unless one has been manually provided manager = attrs.get("objects", meta.get("objects", QuerySetManager())) if hasattr(manager, "queryset_class"): meta["queryset_class"] = manager.queryset_class new_class.objects = manager indicies = list(meta["indexes"]) + abstract_base_indexes user_indexes = [QuerySet._build_index_spec(new_class, spec) for spec in indicies] + base_indexes new_class._meta["indexes"] = user_indexes unique_indexes = cls._unique_with_indexes(new_class) new_class._meta["unique_indexes"] = unique_indexes for field_name, field in new_class._fields.items(): # Check for custom primary key if field.primary_key: current_pk = new_class._meta["id_field"] if current_pk and current_pk != field_name: raise ValueError("Cannot override primary key field") if not current_pk: new_class._meta["id_field"] = field_name # Make 'Document.id' an alias to the real primary key field new_class.id = field if not new_class._meta["id_field"]: new_class._meta["id_field"] = "id" new_class._fields["id"] = ObjectIdField(db_field="_id") new_class.id = new_class._fields["id"] return new_class
def __new__(cls, name, bases, attrs): super_new = super(TopLevelDocumentMetaclass, cls).__new__ # Classes defined in this package are abstract and should not have # their own metadata with DB collection, etc. # __metaclass__ is only set on the class with the __metaclass__ # attribute (i.e. it is not set on subclasses). This differentiates # 'real' documents from the 'Document' class # # Also assume a class is abstract if it has abstract set to True in # its meta dictionary. This allows custom Document superclasses. if attrs.get('__metaclass__') == TopLevelDocumentMetaclass or \ ('meta' in attrs and attrs['meta'].get('abstract', False)): # check to make sure no base class was non-abstract for base in bases: if hasattr(base,'_meta') and 'abstract' in base._meta \ and not base._meta['abstract']: raise ValueError( \ 'Abstract document cannot have non-abstract base') return super_new(cls, name, bases, attrs) collection = name.lower() id_field = None base_indexes = [] base_meta = {} # Subclassed documents inherit collection from superclass for base in bases: if hasattr(base, '_meta') and 'collection' in base._meta: collection = base._meta['collection'] # Propagate index options. for key in ('index_background', 'index_drop_dups', 'index_opts'): if key in base._meta: base_meta[key] = base._meta[key] id_field = id_field or base._meta.get('id_field') base_indexes += base._meta.get('indexes', []) meta = { 'abstract': False, 'collection': collection, 'max_documents': None, 'max_size': None, 'ordering': [], # default ordering applied at runtime 'indexes': [], # indexes to be ensured at runtime 'id_field': id_field, 'index_background': False, 'index_drop_dups': False, 'index_opts': {}, 'queryset_class': QuerySet, } meta.update(base_meta) # Apply document-defined meta options meta.update(attrs.get('meta', {})) attrs['_meta'] = meta # Set up collection manager, needs the class to have fields so use # DocumentMetaclass before instantiating CollectionManager object new_class = super_new(cls, name, bases, attrs) # Allow dynamically-generated collection names. Pass the newly # created class so the callee has access to __module__, etc. collection = new_class._meta['collection'] if callable(collection): new_class._meta['collection'] = collection(new_class) # Provide a default queryset unless one has been manually provided # Note: Check for existance in attrs because hasattr assumes it # doesn't exist if any exception is raised, eg when not connected. if 'objects' not in attrs: new_class.objects = QuerySetManager() user_indexes = [QuerySet._build_index_spec(new_class, spec) for spec in meta['indexes']] + base_indexes new_class._meta['indexes'] = user_indexes unique_indexes = [] for field_name, field in new_class._fields.items(): # Generate a list of indexes needed by uniqueness constraints if field.unique: field.required = True unique_fields = [field.db_field] # Add any unique_with fields to the back of the index spec if field.unique_with: if isinstance(field.unique_with, basestring): field.unique_with = [field.unique_with] # Convert unique_with field names to real field names unique_with = [] for other_name in field.unique_with: parts = other_name.split('.') # Lookup real name parts = QuerySet._lookup_field(new_class, parts) name_parts = [part.db_field for part in parts] unique_with.append('.'.join(name_parts)) # Unique field should be required parts[-1].required = True unique_fields += unique_with # Add the new index to the list index = [(f, pymongo.ASCENDING) for f in unique_fields] unique_indexes.append(index) # Check for custom primary key if field.primary_key: current_pk = new_class._meta['id_field'] if current_pk and current_pk != field_name: raise ValueError('Cannot override primary key field') if not current_pk: new_class._meta['id_field'] = field_name # Make 'Document.id' an alias to the real primary key field new_class.id = field new_class._meta['unique_indexes'] = unique_indexes if not new_class._meta['id_field']: new_class._meta['id_field'] = 'id' new_class._fields['id'] = ObjectIdField(db_field='_id') new_class.id = new_class._fields['id'] return new_class