Esempio n. 1
0
	def toPython( self, bsonValue ):
		self._getClassInfo( )
		
		if bsonValue is None:
			return None
		
		documentClass = None

		if isinstance(bsonValue, dbref.DBRef):
			# old style (mongoengine)
			dbRef = bsonValue
			documentClass = self.documentClass
			documentName = self.documentName
			initialData = {
				'_id': bsonValue.id,
			}
		else:
			# new style (dict with extra info)
			dbRef = bsonValue['_ref']
			if '_cls' in bsonValue:
				# mongoengine GenericReferenceField compatibility
				documentName = bsonValue['_cls']
			elif '_types' in bsonValue: 
				documentName = bsonValue['_types'][0]
			else:
				return dbRef

			documentClass = DocumentRegistry.getDocument( documentName )
			
			initialData = {
				'_id': dbRef.id,
			}
			initialData.update( bsonValue.get( '_cache', {} ) )
		
		return documentClass( )._fromMongo( initialData )
Esempio n. 2
0
	def _getClassInfo( self ):
		if hasattr(self, 'documentName'): return
		
		documentClass = self.inputDocumentClass
		
		if isinstance(documentClass, basestring):
			if documentClass == 'self':
				self.documentName = self.ownerDocument.__name__
				self.documentClass = self.ownerDocument
			else:
				self.documentName = documentClass
				self.documentClass = DocumentRegistry.getDocument( self.documentName )
		else:
			self.documentClass = documentClass
			self.documentName = documentClass.__name__
Esempio n. 3
0
	def _getNewInstance( self, data ):
		documentName = data.get( '_types', [self.document.__name__] )[0]
		documentClass = DocumentRegistry.getDocument( documentName )
		assert issubclass( documentClass, self.document )
		return documentClass( )._fromMongo( data )
	def __new__( cls, name, bases, attrs ):
		if DocumentRegistry.hasDocument( name ):
			return DocumentRegistry.getDocument( name )
			
		# don't do anything if we're the class that defines the metaclass
		#metaclass = attrs.get( '__metaclass__' )
		superNew = super(DocumentMetaclass, cls).__new__
		#if metaclass and issubclass(metaclass, DocumentMetaclass):
		#	return superNew( cls, name, bases, attrs )
		
		fields = {}
		collection = name.lower()
		needsPrimaryKey = False
		primaryKey = None
		
		# find all inherited fields and record them
		for base in bases:
			if hasattr(base, '_fields'):
				fields.update( base._fields )
			if hasattr(base, '_collection'):
				collection = base._collection
			if hasattr(base, '__needs_primary_key__'):
				needsPrimaryKey = True
		
		if not '__internal__' in attrs:
			attrs['_collection'] = collection
		
		# find all fields and add them to our field list
		for attrName, attrValue in attrs.items( ):
			if hasattr(attrValue, '__class__') and \
				issubclass(attrValue.__class__, BaseField):
				field = attrValue
				field.name = attrName
				if not hasattr(field, 'dbField') or field.dbField is None:
					field.dbField = attrName
				fields[attrName] = field
				del attrs[attrName]

		def indexConverter( fieldName ):
			if fieldName in fields:
				return fields[fieldName].optimalIndex( )
			return fieldName

		for field,value in fields.iteritems( ):
			if value.primaryKey:
				assert primaryKey is None, "Can only have one primary key per document"
				primaryKey = field
			if value.unique:
				keyOpts = { 'unique': True }
				
				if value.dropDups:
					keyOpts['dropDups'] = True
				
				connection.stagedIndexes.append( (collection, indexConverter( field ), keyOpts) )
		
		# add a primary key if none exists and one is required
		if needsPrimaryKey and primaryKey is None:
			fields['id'] = ObjectIdField( primaryKey=True, dbField='_id' )
			primaryKey = 'id'
		
		attrs['_primaryKeyField'] = primaryKey
		
		# make sure we have all indexes that are specified
		if 'meta' in attrs:
			meta = attrs['meta']
			if 'indexes' in meta:
				indexes = meta['indexes']
				
				for index in indexes:
					if not isinstance(index, (list,tuple)):
						index = [index]
					
					pyMongoIndexKeys = sortListToPyMongo( index, indexConverter )
					connection.stagedIndexes.append( (collection, pyMongoIndexKeys, {}) )

		# add a query set manager if none exists already
		if 'objects' not in attrs:
			attrs['objects'] = QuerySetManager( )
		
		# construct the new class
		attrs['_is_lazy'] = False
		attrs['_fields'] = fields
		attrs['_data'] = None
		attrs['_values'] = None
		newClass = superNew( cls, name, bases, attrs )
		
		# record the document in the fields
		for field in newClass._fields.values( ):
			#field.ownerDocument = newClass
			field.setOwnerDocument( newClass )
		
		# add DoesNotExist and MultipleObjectsReturned exceptions
		module = attrs.get('__module__')
		newClass._addException( 'DoesNotExist', bases,
								defaultBase=DoesNotExist,
								module=module )
		newClass._addException( 'MultipleObjectsReturned', bases,
								defaultBase=MultipleObjectsReturned,
								module=module )

		# register the document for name-based reference
		DocumentRegistry.registerDocument( name, newClass )

		if connection.database is not None:
			connection.ensureIndexes()
		
		return newClass