def delete(self):

        db_table = self.query.get_meta().db_table
        results = self._get_results()

        pipeline = self._collection.pipeline(transaction=False)
        for res in results:
            pipeline.hgetall(get_hash_key(self.db_name, db_table, res))
        hmaps_ret = pipeline.execute()
        hmaps = ((results[n], hmaps_ret[n]) for n in range(len(hmaps_ret)))

        pipeline = self._collection.pipeline(transaction=False)
        for res, hmap in hmaps:
            pipeline.delete(get_hash_key(self.db_name, db_table, res))
            for field, val in hmap.iteritems():
                val = unpickle(val)
                if val is not None:
                    #INDEXES
                    if field in self.indexes_for_model or self.connection.exact_all:
                        try:
                            indexes_for_field = self.indexes_for_model[field]
                        except KeyError:
                            indexes_for_field = ()
                        if 'exact' not in indexes_for_field and self.connection.exact_all:
                            indexes_for_field += 'exact',
                        delete_indexes(
                            field,
                            val,
                            indexes_for_field,
                            pipeline,
                            get_hash_key(self.db_name, db_table, res),
                            db_table,
                            res,
                            self.db_name,
                        )
            pipeline.srem(self.db_name + '_' + db_table + '_ids', res)
        pipeline.execute()
    def delete(self):
        db_table = self.query.get_meta().db_table
        results = self._get_results()
	
        hmaps_ret = []
        for res in results:
            hmaps_ret.append(self._collection.hgetall(get_hash_key(self.db_name,db_table,res)))
        hmaps = ((results[n],hmaps_ret[n]) for n in range(len(hmaps_ret)))

        for res,hmap in hmaps:
            self._collection.delete(get_hash_key(self.db_name,db_table,res))
            for field,val in hmap.iteritems():
                val = unpickle(val)
                if val is not None:
                    #INDEXES
                    if field in self.indexes_for_model or self.connection.exact_all:
                        try:
                            indexes_for_field = self.indexes_for_model[field]
                        except KeyError:
                            indexes_for_field = ()
                        if 'exact' not in indexes_for_field and self.connection.exact_all:
                            indexes_for_field += 'exact',
                        delete_indexes(field, val, indexes_for_field, self._collection, get_hash_key(self.db_name,db_table,res), db_table, res, self.db_name)
            self._collection.srem(self.db_name+'_'+db_table+'_ids' ,res)
    def _save(self, data, return_id=False):
        meta = self.query.get_meta()
        db_table = meta.db_table
        indexes = get_indexes()
        indexes_for_model =  indexes.get(self.query.model,{})

        h_map = {}
        h_map_old = {}

        if '_id' in data:
            pk = data['_id']
            h_map_old = self._collection.hgetall(get_hash_key(self.db_name,db_table,pk))
        else:
            pk = self._collection.incr(self.db_name+'_'+db_table+"_id")
            h_map_old = {}
            
        for key,value in data.iteritems():
            if key == "_id":
              continue
            if key in h_map_old:
                old = unpickle(h_map_old[key])
            else:
                old = None

            # if item is a RedisAtomicInteger we don't want to save it, since it's being atomically updated
            # in other code.  but it hasn't been set before we do want to save it the first time. also we don't
            # want to pickle these so that HINCRYBY can work
            do_pickle = True
            do_set = value is not None and old != value
            if do_set:
                try:
                    if isinstance(meta.get_field(key), RedisAtomicInteger):
                        do_pickle = False
                        do_set = old is None
                except:
                    pass
            
            if do_set:
                if do_pickle:
                    value = enpickle(value)
                h_map[key] = value

            if key in indexes_for_model or self.connection.exact_all:
            	try:
            		indexes_for_field = indexes_for_model[key]
            	except KeyError:
            		indexes_for_field = []
            	if 'exact' not in indexes_for_field and self.connection.exact_all:
            		indexes_for_field += 'exact',
            	create_indexes(key,
        			value,
        			old,
        			indexes_for_field,
                    self._collection,
        			db_table+'_'+str(pk),
        			db_table,
        			pk,
        			self.db_name,
            	)

        self._collection.sadd(self.db_name+'_'+db_table+"_ids" ,pk)
        if len(h_map):
            self._collection.hmset(get_hash_key(self.db_name,db_table, pk), h_map)			
        if return_id:
            return unicode(pk)