Exemplo n.º 1
0
    def _save(self, data, return_id=False):

        db_table = self.query.get_meta().db_table
        indexes = get_indexes()
        indexes_for_model = indexes.get(self.query.model, {})

        pipeline = self._collection.pipeline(transaction=False)

        h_map = {}
        h_map_old = {}

        if '_id' in data:
            pk = data['_id']
            new = False
            h_map_old = self._collection.hgetall(
                get_hash_key(self.db_name, db_table, pk))
        else:
            pk = self._collection.incr(self.db_name + '_' + db_table + "_id")
            new = True

        for key, value in data.iteritems():

            if new:
                old = None
                h_map[key] = pickle.dumps(value)
            else:
                if key == "_id": continue
                old = pickle.loads(h_map_old[key])

                if old != value:
                    h_map[key] = pickle.dumps(value)

            if key in indexes_for_model or self.connection.exact_all:
                try:
                    indexes_for_field = indexes_for_model[key]
                except KeyError:
                    indexes_for_field = ()
                if 'exact' not in indexes_for_field and self.connection.exact_all:
                    indexes_for_field += 'exact',
                create_indexes(
                    key,
                    value,
                    old,
                    indexes_for_field,
                    pipeline,
                    db_table + '_' + str(pk),
                    db_table,
                    pk,
                    self.db_name,
                )

        if '_id' not in data:
            pipeline.sadd(self.db_name + '_' + db_table + "_ids", pk)

        pipeline.hmset(get_hash_key(self.db_name, db_table, pk), h_map)
        pipeline.execute()
        if return_id:
            return unicode(pk)
Exemplo n.º 2
0
    def _save(self, data, return_id=False):

	db_table = self.query.get_meta().db_table
	indexes = get_indexes()
	indexes_for_model =  indexes.get(self.query.model,{})

	pipeline = self._collection.pipeline(transaction = False)

	h_map = {}
	h_map_old = {}

	if '_id' in data:
		pk = data['_id']
		new = False
		h_map_old = self._collection.hgetall(get_hash_key(self.db_name,db_table,pk))
	else:
		pk = self._collection.incr(self.db_name+'_'+db_table+"_id")
		new = True		
	
	for key,value in data.iteritems():
		
		if new:
			old = None
			h_map[key] = pickle.dumps(value)			
		else:
			if key == "_id": continue
			old = pickle.loads(h_map_old[key])

			if old != value:
				h_map[key] = pickle.dumps(value)

		if key in indexes_for_model or self.connection.exact_all:
			try:
				indexes_for_field = indexes_for_model[key]
			except KeyError:
				indexes_for_field = ()
			if 'exact' not in indexes_for_field and self.connection.exact_all:
				indexes_for_field += 'exact',
			create_indexes(	key,
					value,
					old,
					indexes_for_field,
					pipeline,
					db_table+'_'+str(pk),
					db_table,
					pk,
					self.db_name,
					)
	
        if '_id' not in data: pipeline.sadd(self.db_name+'_'+db_table+"_ids" ,pk)
	
	pipeline.hmset(get_hash_key(self.db_name,db_table,pk),h_map)			
	pipeline.execute()
        if return_id:
            return unicode(pk)
Exemplo n.º 3
0
    def delete(self):

        db_table = self.query.get_meta().db_table
        results = self._get_results()

        pipeline = self._collection.pipeline(transaction=False)
        for res in results:
            pipeline.hgetall(get_hash_key(self.db_name, db_table, res))
        hmaps_ret = pipeline.execute()
        hmaps = ((results[n], hmaps_ret[n]) for n in range(len(hmaps_ret)))

        pipeline = self._collection.pipeline(transaction=False)
        for res, hmap in hmaps:
            pipeline.delete(get_hash_key(self.db_name, db_table, res))
            for field, val in hmap.iteritems():
                val = unpickle(val)
                if val is not None:
                    #INDEXES
                    if field in self.indexes_for_model or self.connection.exact_all:
                        try:
                            indexes_for_field = self.indexes_for_model[field]
                        except KeyError:
                            indexes_for_field = ()
                        if 'exact' not in indexes_for_field and self.connection.exact_all:
                            indexes_for_field += 'exact',
                        delete_indexes(
                            field,
                            val,
                            indexes_for_field,
                            pipeline,
                            get_hash_key(self.db_name, db_table, res),
                            db_table,
                            res,
                            self.db_name,
                        )
            pipeline.srem(self.db_name + '_' + db_table + '_ids', res)
        pipeline.execute()
Exemplo n.º 4
0
    def delete(self):
        db_table = self.query.get_meta().db_table
        results = self._get_results()
	
        hmaps_ret = []
        for res in results:
            hmaps_ret.append(self._collection.hgetall(get_hash_key(self.db_name,db_table,res)))
        hmaps = ((results[n],hmaps_ret[n]) for n in range(len(hmaps_ret)))

        for res,hmap in hmaps:
            self._collection.delete(get_hash_key(self.db_name,db_table,res))
            for field,val in hmap.iteritems():
                val = unpickle(val)
                if val is not None:
                    #INDEXES
                    if field in self.indexes_for_model or self.connection.exact_all:
                        try:
                            indexes_for_field = self.indexes_for_model[field]
                        except KeyError:
                            indexes_for_field = ()
                        if 'exact' not in indexes_for_field and self.connection.exact_all:
                            indexes_for_field += 'exact',
                        delete_indexes(field, val, indexes_for_field, self._collection, get_hash_key(self.db_name,db_table,res), db_table, res, self.db_name)
            self._collection.srem(self.db_name+'_'+db_table+'_ids' ,res)
Exemplo n.º 5
0
    def _save(self, data, return_id=False):
        meta = self.query.get_meta()
        db_table = meta.db_table
        indexes = get_indexes()
        indexes_for_model =  indexes.get(self.query.model,{})

        h_map = {}
        h_map_old = {}

        if '_id' in data:
            pk = data['_id']
            h_map_old = self._collection.hgetall(get_hash_key(self.db_name,db_table,pk))
        else:
            pk = self._collection.incr(self.db_name+'_'+db_table+"_id")
            h_map_old = {}
            
        for key,value in data.iteritems():
            if key == "_id":
              continue
            if key in h_map_old:
                old = unpickle(h_map_old[key])
            else:
                old = None

            # if item is a RedisAtomicInteger we don't want to save it, since it's being atomically updated
            # in other code.  but it hasn't been set before we do want to save it the first time. also we don't
            # want to pickle these so that HINCRYBY can work
            do_pickle = True
            do_set = value is not None and old != value
            if do_set:
                try:
                    if isinstance(meta.get_field(key), RedisAtomicInteger):
                        do_pickle = False
                        do_set = old is None
                except:
                    pass
            
            if do_set:
                if do_pickle:
                    value = enpickle(value)
                h_map[key] = value

            if key in indexes_for_model or self.connection.exact_all:
            	try:
            		indexes_for_field = indexes_for_model[key]
            	except KeyError:
            		indexes_for_field = []
            	if 'exact' not in indexes_for_field and self.connection.exact_all:
            		indexes_for_field += 'exact',
            	create_indexes(key,
        			value,
        			old,
        			indexes_for_field,
                    self._collection,
        			db_table+'_'+str(pk),
        			db_table,
        			pk,
        			self.db_name,
            	)

        self._collection.sadd(self.db_name+'_'+db_table+"_ids" ,pk)
        if len(h_map):
            self._collection.hmset(get_hash_key(self.db_name,db_table, pk), h_map)			
        if return_id:
            return unicode(pk)