Пример #1
0
    def _save(self, data, return_id=False):

        db_table = self.query.get_meta().db_table
        indexes = get_indexes()
        indexes_for_model = indexes.get(self.query.model, {})

        pipeline = self._collection.pipeline(transaction=False)

        h_map = {}
        h_map_old = {}

        if '_id' in data:
            pk = data['_id']
            new = False
            h_map_old = self._collection.hgetall(
                get_hash_key(self.db_name, db_table, pk))
        else:
            pk = self._collection.incr(self.db_name + '_' + db_table + "_id")
            new = True

        for key, value in data.iteritems():

            if new:
                old = None
                h_map[key] = pickle.dumps(value)
            else:
                if key == "_id": continue
                old = pickle.loads(h_map_old[key])

                if old != value:
                    h_map[key] = pickle.dumps(value)

            if key in indexes_for_model or self.connection.exact_all:
                try:
                    indexes_for_field = indexes_for_model[key]
                except KeyError:
                    indexes_for_field = ()
                if 'exact' not in indexes_for_field and self.connection.exact_all:
                    indexes_for_field += 'exact',
                create_indexes(
                    key,
                    value,
                    old,
                    indexes_for_field,
                    pipeline,
                    db_table + '_' + str(pk),
                    db_table,
                    pk,
                    self.db_name,
                )

        if '_id' not in data:
            pipeline.sadd(self.db_name + '_' + db_table + "_ids", pk)

        pipeline.hmset(get_hash_key(self.db_name, db_table, pk), h_map)
        pipeline.execute()
        if return_id:
            return unicode(pk)
Пример #2
0
    def _save(self, data, return_id=False):

	db_table = self.query.get_meta().db_table
	indexes = get_indexes()
	indexes_for_model =  indexes.get(self.query.model,{})

	pipeline = self._collection.pipeline(transaction = False)

	h_map = {}
	h_map_old = {}

	if '_id' in data:
		pk = data['_id']
		new = False
		h_map_old = self._collection.hgetall(get_hash_key(self.db_name,db_table,pk))
	else:
		pk = self._collection.incr(self.db_name+'_'+db_table+"_id")
		new = True		
	
	for key,value in data.iteritems():
		
		if new:
			old = None
			h_map[key] = pickle.dumps(value)			
		else:
			if key == "_id": continue
			old = pickle.loads(h_map_old[key])

			if old != value:
				h_map[key] = pickle.dumps(value)

		if key in indexes_for_model or self.connection.exact_all:
			try:
				indexes_for_field = indexes_for_model[key]
			except KeyError:
				indexes_for_field = ()
			if 'exact' not in indexes_for_field and self.connection.exact_all:
				indexes_for_field += 'exact',
			create_indexes(	key,
					value,
					old,
					indexes_for_field,
					pipeline,
					db_table+'_'+str(pk),
					db_table,
					pk,
					self.db_name,
					)
	
        if '_id' not in data: pipeline.sadd(self.db_name+'_'+db_table+"_ids" ,pk)
	
	pipeline.hmset(get_hash_key(self.db_name,db_table,pk),h_map)			
	pipeline.execute()
        if return_id:
            return unicode(pk)
Пример #3
0
    def __init__(self, compiler, fields):
        super(DBQuery, self).__init__(compiler, fields)
	#print fields
	#print dir(self.query.get_meta())
        self.db_table = self.query.get_meta().db_table
	self.indexes = get_indexes()
	self.indexes_for_model =  self.indexes.get(self.query.model,{})
	self._collection = self.connection.db_connection
	self.db_name = self.connection.db_name
	#self.connection.exact_all
        self._ordering = []
        self.db_query = {}
Пример #4
0
 def __init__(self, compiler, fields):
     super(DBQuery, self).__init__(compiler, fields)
     #print fields
     #print dir(self.query.get_meta())
     self.db_table = self.query.get_meta().db_table
     self.indexes = get_indexes()
     self.indexes_for_model = self.indexes.get(self.query.model, {})
     self._collection = self.connection.db_connection
     self.db_name = self.connection.db_name
     #self.connection.exact_all
     self._ordering = []
     self.db_query = {}
Пример #5
0
    def _save(self, data, return_id=False):
        meta = self.query.get_meta()
        db_table = meta.db_table
        indexes = get_indexes()
        indexes_for_model =  indexes.get(self.query.model,{})

        h_map = {}
        h_map_old = {}

        if '_id' in data:
            pk = data['_id']
            h_map_old = self._collection.hgetall(get_hash_key(self.db_name,db_table,pk))
        else:
            pk = self._collection.incr(self.db_name+'_'+db_table+"_id")
            h_map_old = {}
            
        for key,value in data.iteritems():
            if key == "_id":
              continue
            if key in h_map_old:
                old = unpickle(h_map_old[key])
            else:
                old = None

            # if item is a RedisAtomicInteger we don't want to save it, since it's being atomically updated
            # in other code.  but it hasn't been set before we do want to save it the first time. also we don't
            # want to pickle these so that HINCRYBY can work
            do_pickle = True
            do_set = value is not None and old != value
            if do_set:
                try:
                    if isinstance(meta.get_field(key), RedisAtomicInteger):
                        do_pickle = False
                        do_set = old is None
                except:
                    pass
            
            if do_set:
                if do_pickle:
                    value = enpickle(value)
                h_map[key] = value

            if key in indexes_for_model or self.connection.exact_all:
            	try:
            		indexes_for_field = indexes_for_model[key]
            	except KeyError:
            		indexes_for_field = []
            	if 'exact' not in indexes_for_field and self.connection.exact_all:
            		indexes_for_field += 'exact',
            	create_indexes(key,
        			value,
        			old,
        			indexes_for_field,
                    self._collection,
        			db_table+'_'+str(pk),
        			db_table,
        			pk,
        			self.db_name,
            	)

        self._collection.sadd(self.db_name+'_'+db_table+"_ids" ,pk)
        if len(h_map):
            self._collection.hmset(get_hash_key(self.db_name,db_table, pk), h_map)			
        if return_id:
            return unicode(pk)