def get_dynamic_cluster_from_options(setting, config): cluster_name = config.get("cluster", "default") cluster_opts = options.default_manager.get("redis.clusters").get(cluster_name) if cluster_opts is not None and cluster_opts.get("is_redis_cluster"): return True, redis_clusters.get(cluster_name), config return (False,) + get_cluster_from_options(setting, config)
def __init__(self, uuid=None): self._uuid = uuid or self._generate_uuid() cluster_id = getattr(settings, "SENTRY_RULE_TASK_REDIS_CLUSTER", "default") self.client = redis_clusters.get(cluster_id) self._set_inital_value()
def check_event_already_post_processed(event): cluster_key = getattr(settings, 'SENTRY_POST_PROCESSING_LOCK_REDIS_CLUSTER', None) if cluster_key is None: return client = redis_clusters.get(cluster_key) result = client.set( u'pp:{}/{}'.format(event.project_id, event.event_id), u'{:.0f}'.format(time.time()), ex=60 * 60, nx=True, ) return not result
def check_correct_event_sequencing(event): sample_rate = getattr(settings, 'SENTRY_POST_PROCESSING_SEQUENCING_SAMPLE_RATE', None) if sample_rate is None: return # no decision cluster_key = getattr(settings, 'SENTRY_POST_PROCESSING_SEQUENCING_REDIS_CLUSTER', None) if cluster_key is None: return # no decision event_id = event.event_id.encode('utf-8') _hash = int(md5(event_id).hexdigest(), 16) if not _hash % sample_rate == 0: return # no decision # if the key exists, things are executing in the right order, otherwise # we're ahead of the stream return redis_clusters.get(cluster_key).delete( "_hack_post_process_sampled_events:%s" % event_id) > 0
def __init__(self, cluster_id, **options): client = redis_clusters.get(cluster_id) CommonRedisCache.__init__(self, client=client, **options)
def _get_sync_redis_client(): return redis_clusters.get(settings.SENTRY_REPROCESSING_SYNC_REDIS_CLUSTER)
def get_client() -> Any: return redis_clusters.get(settings.SENTRY_METRICS_INDEXER_REDIS_CLUSTER)