Exemplo n.º 1
0
class RedisCache(BaseCache):
    key_expire = 60 * 60  # 1 hour

    def __init__(self, version=None, prefix=None, **options):
        if not options:
            # inherit default options from REDIS_OPTIONS
            options = settings.SENTRY_REDIS_OPTIONS

        options.setdefault('hosts', {
            0: {},
        })
        self.cluster = Cluster(options['hosts'])
        self.client = self.cluster.get_routing_client()

        super(RedisCache, self).__init__(version=version, prefix=prefix)

    def set(self, key, value, timeout, version=None):
        key = self.make_key(key, version=version)
        v = json.dumps(value)
        if timeout:
            self.client.setex(key, int(timeout), v)
        else:
            self.client.set(key, v)

    def delete(self, key, version=None):
        key = self.make_key(key, version=version)
        self.client.delete(key)

    def get(self, key, version=None):
        key = self.make_key(key, version=version)
        result = self.client.get(key)
        if result is not None:
            result = json.loads(result)
        return result
Exemplo n.º 2
0
class RedisCache(local):
    key_expire = 60 * 60  # 1 hour

    def __init__(self, **options):
        if not options:
            # inherit default options from REDIS_OPTIONS
            options = settings.SENTRY_REDIS_OPTIONS

        options.setdefault('hosts', {
            0: {},
        })
        self.cluster = Cluster(options['hosts'])
        self.client = self.cluster.get_routing_client()

    def set(self, key, value, timeout):
        v = json.dumps(value)
        if timeout:
            self.client.setex(key, int(timeout), v)
        else:
            self.client.set(key, v)

    def delete(self, key):
        self.client.delete(key)

    def get(self, key):
        result = self.client.get(key)
        if result is not None:
            result = json.loads(result)
        return result
Exemplo n.º 3
0
def test_redis_blaster_operation_disable_instance():
    from rb import Cluster

    cluster = Cluster(hosts={0: {
        'port': DB_SETTINGS['port']
    }},
                      host_defaults={'host': DB_SETTINGS['host']})
    exercise_fanout(cluster)

    client = cluster.get_routing_client()
    exercise_redis(client)
Exemplo n.º 4
0
class RedisBuffer(Buffer):
    key_expire = 60 * 60  # 1 hour
    pending_key = 'b:p'

    def __init__(self, **options):
        if not options:
            # inherit default options from REDIS_OPTIONS
            options = settings.SENTRY_REDIS_OPTIONS

        options.setdefault('hosts', {
            0: {},
        })
        self.cluster = Cluster(options['hosts'])

    def validate(self):
        try:
            with self.cluster.all() as client:
                client.ping()
        except Exception as e:
            raise InvalidConfiguration(unicode(e))

    def _coerce_val(self, value):
        if isinstance(value, models.Model):
            value = value.pk
        return smart_str(value)

    def _make_key(self, model, filters):
        """
        Returns a Redis-compatible key for the model given filters.
        """
        return 'b:k:%s:%s' % (
            model._meta,
            md5(smart_str('&'.join('%s=%s' % (k, self._coerce_val(v))
                for k, v in sorted(filters.iteritems())))).hexdigest(),
        )

    def _make_lock_key(self, key):
        return 'l:%s' % (key,)

    def incr(self, model, columns, filters, extra=None):
        """
        Increment the key by doing the following:

        - Insert/update a hashmap based on (model, columns)
            - Perform an incrby on counters
            - Perform a set (last write wins) on extra
        - Add hashmap key to pending flushes
        """
        # TODO(dcramer): longer term we'd rather not have to serialize values
        # here (unless it's to JSON)
        key = self._make_key(model, filters)
        # We can't use conn.map() due to wanting to support multiple pending
        # keys (one per Redis shard)
        conn = self.cluster.get_local_client_for_key(key)

        pipe = conn.pipeline()
        pipe.hsetnx(key, 'm', '%s.%s' % (model.__module__, model.__name__))
        pipe.hsetnx(key, 'f', pickle.dumps(filters))
        for column, amount in columns.iteritems():
            pipe.hincrby(key, 'i+' + column, amount)

        if extra:
            for column, value in extra.iteritems():
                pipe.hset(key, 'e+' + column, pickle.dumps(value))
        pipe.expire(key, self.key_expire)
        pipe.zadd(self.pending_key, time(), key)
        pipe.execute()

    def process_pending(self):
        client = self.cluster.get_routing_client()
        lock_key = self._make_lock_key(self.pending_key)
        # prevent a stampede due to celerybeat + periodic task
        if not client.set(lock_key, '1', nx=True, ex=60):
            return

        try:
            for host_id in self.cluster.hosts.iterkeys():
                conn = self.cluster.get_local_client(host_id)
                keys = conn.zrange(self.pending_key, 0, -1)
                if not keys:
                    continue
                for key in keys:
                    process_incr.apply_async(kwargs={
                        'key': key,
                    })
                pipe = conn.pipeline()
                pipe.zrem(self.pending_key, *keys)
                pipe.execute()
        finally:
            client.delete(lock_key)

    def process(self, key):
        client = self.cluster.get_routing_client()
        lock_key = self._make_lock_key(key)
        # prevent a stampede due to the way we use celery etas + duplicate
        # tasks
        if not client.set(lock_key, '1', nx=True, ex=10):
            return

        with self.cluster.map() as conn:
            values = conn.hgetall(key)
            conn.delete(key)

        if not values.value:
            return

        model = import_string(values.value['m'])
        filters = pickle.loads(values.value['f'])
        incr_values = {}
        extra_values = {}
        for k, v in values.value.iteritems():
            if k.startswith('i+'):
                incr_values[k[2:]] = int(v)
            elif k.startswith('e+'):
                extra_values[k[2:]] = pickle.loads(v)

        super(RedisBuffer, self).process(model, incr_values, filters, extra_values)