class GroupStatsMixin(object): STATS_PERIOD_CHOICES = { "14d": StatsPeriod(14, timedelta(hours=24)), "24h": StatsPeriod(24, timedelta(hours=1)), } CUSTOM_PERIOD_SEGMENTS = 29 # for 30 segments use 1/29th intervals def query_tsdb(self, group_ids, query_params): raise NotImplementedError def get_stats(self, item_list, user): if self.stats_period: # we need to compute stats at 1d (1h resolution), and 14d or a custom given period group_ids = [g.id for g in item_list] if self.stats_period == "auto": query_params = { "start": self.stats_period_start, "end": self.stats_period_end, "rollup": int( (self.stats_period_end - self.stats_period_start).total_seconds() / self.CUSTOM_PERIOD_SEGMENTS ), } else: segments, interval = self.STATS_PERIOD_CHOICES[self.stats_period] now = timezone.now() query_params = { "start": now - ((segments - 1) * interval), "end": now, "rollup": int(interval.total_seconds()), } return self.query_tsdb(group_ids, query_params)
class StreamGroupSerializer(GroupSerializer): STATS_PERIOD_CHOICES = { '14d': StatsPeriod(14, timedelta(hours=24)), '24h': StatsPeriod(24, timedelta(hours=1)), } def __init__(self, stats_period=None, matching_event_id=None): if stats_period is not None: assert stats_period in self.STATS_PERIOD_CHOICES self.stats_period = stats_period self.matching_event_id = matching_event_id def get_attrs(self, item_list, user): attrs = super(StreamGroupSerializer, self).get_attrs(item_list, user) if self.stats_period: # we need to compute stats at 1d (1h resolution), and 14d group_ids = [g.id for g in item_list] segments, interval = self.STATS_PERIOD_CHOICES[self.stats_period] now = timezone.now() stats = tsdb.get_range( model=tsdb.models.group, keys=group_ids, end=now, start=now - ((segments - 1) * interval), rollup=int(interval.total_seconds()), ) for item in item_list: attrs[item].update({ 'stats': stats[item.id], }) return attrs def serialize(self, obj, attrs, user): result = super(StreamGroupSerializer, self).serialize(obj, attrs, user) if self.stats_period: result['stats'] = { self.stats_period: attrs['stats'], } if self.matching_event_id: result['matchingEventId'] = self.matching_event_id return result
class GroupStatsMixin(object): STATS_PERIOD_CHOICES = { "14d": StatsPeriod(14, timedelta(hours=24)), "24h": StatsPeriod(24, timedelta(hours=1)), } CUSTOM_SEGMENTS = 29 # for 30 segments use 1/29th intervals CUSTOM_ROLLUP_6H = timedelta( hours=6).total_seconds() # rollups should be increments of 6hs def query_tsdb(self, group_ids, query_params): raise NotImplementedError def get_stats(self, item_list, user): if self.stats_period: # we need to compute stats at 1d (1h resolution), and 14d or a custom given period group_ids = [g.id for g in item_list] if self.stats_period == "auto": total_period = (self.stats_period_end - self.stats_period_start).total_seconds() rollup = total_period / self.CUSTOM_SEGMENTS if rollup > self.CUSTOM_ROLLUP_6H: rollup = round( rollup / self.CUSTOM_ROLLUP_6H) * self.CUSTOM_ROLLUP_6H elif (2 * rollup) > self.CUSTOM_ROLLUP_6H: rollup = self.CUSTOM_ROLLUP_6H / 2 # 3hrs elif (3 * rollup) > self.CUSTOM_ROLLUP_6H: rollup = self.CUSTOM_ROLLUP_6H / 3 # 2hr elif total_period > timedelta(hours=24).total_seconds(): rollup = self.CUSTOM_ROLLUP_6H / 6 # 1hr query_params = { "start": self.stats_period_start, "end": self.stats_period_end, "rollup": int(rollup), } else: segments, interval = self.STATS_PERIOD_CHOICES[ self.stats_period] now = timezone.now() query_params = { "start": now - ((segments - 1) * interval), "end": now, "rollup": int(interval.total_seconds()), } return self.query_tsdb(group_ids, query_params)
class GroupStatsMixin(object): STATS_PERIOD_CHOICES = { "14d": StatsPeriod(14, timedelta(hours=24)), "24h": StatsPeriod(24, timedelta(hours=1)), } def query_tsdb(self, group_ids, query_params): raise NotImplementedError def get_stats(self, item_list, user): if self.stats_period: # we need to compute stats at 1d (1h resolution), and 14d group_ids = [g.id for g in item_list] segments, interval = self.STATS_PERIOD_CHOICES[self.stats_period] now = timezone.now() query_params = { "start": now - ((segments - 1) * interval), "end": now, "rollup": int(interval.total_seconds()), } return self.query_tsdb(group_ids, query_params)
def seen_today(self, group, event): now = timezone.now() get_range = retry_triple(tsdb.get_range) segments, interval = StatsPeriod(1, timedelta(hours=24)) environment = event.get_environment() query_params = { "start": now - ((segments - 1) * interval), "end": now, "rollup": int(interval.total_seconds()), } stats = get_range(model=tsdb.models.group, keys=[group.id], environment_ids=environment and [environment.id], **query_params) return stats[group.id][0][1]
class GroupStatsMixin(object): STATS_PERIOD_CHOICES = { "14d": StatsPeriod(14, timedelta(hours=24)), "24h": StatsPeriod(24, timedelta(hours=1)), } CUSTOM_ROLLUP_CHOICES = { "1h": timedelta(hours=1).total_seconds(), "2h": timedelta(hours=2).total_seconds(), "3h": timedelta(hours=3).total_seconds(), "6h": timedelta(hours=6).total_seconds(), "12h": timedelta(hours=12).total_seconds(), "24h": timedelta(hours=24).total_seconds(), } CUSTOM_SEGMENTS = 29 # for 30 segments use 1/29th intervals CUSTOM_SEGMENTS_12H = 35 # for 12h 36 segments, otherwise 15-16-17 bars is too few CUSTOM_ROLLUP_6H = timedelta(hours=6).total_seconds() # rollups should be increments of 6hs def query_tsdb(self, group_ids, query_params): raise NotImplementedError def get_stats(self, item_list, user, **kwargs): if self.stats_period: # we need to compute stats at 1d (1h resolution), and 14d or a custom given period group_ids = [g.id for g in item_list] if self.stats_period == "auto": total_period = (self.stats_period_end - self.stats_period_start).total_seconds() if total_period < timedelta(hours=24).total_seconds(): rollup = total_period / self.CUSTOM_SEGMENTS elif total_period < self.CUSTOM_SEGMENTS * self.CUSTOM_ROLLUP_CHOICES["1h"]: rollup = self.CUSTOM_ROLLUP_CHOICES["1h"] elif total_period < self.CUSTOM_SEGMENTS * self.CUSTOM_ROLLUP_CHOICES["2h"]: rollup = self.CUSTOM_ROLLUP_CHOICES["2h"] elif total_period < self.CUSTOM_SEGMENTS * self.CUSTOM_ROLLUP_CHOICES["3h"]: rollup = self.CUSTOM_ROLLUP_CHOICES["3h"] elif total_period < self.CUSTOM_SEGMENTS * self.CUSTOM_ROLLUP_CHOICES["6h"]: rollup = self.CUSTOM_ROLLUP_CHOICES["6h"] elif ( total_period < self.CUSTOM_SEGMENTS_12H * self.CUSTOM_ROLLUP_CHOICES["12h"] ): # 36 segments is ok rollup = self.CUSTOM_ROLLUP_CHOICES["12h"] elif total_period < self.CUSTOM_SEGMENTS * self.CUSTOM_ROLLUP_CHOICES["24h"]: rollup = self.CUSTOM_ROLLUP_CHOICES["24h"] else: delta_day = self.CUSTOM_ROLLUP_CHOICES["24h"] rollup = round(total_period / (self.CUSTOM_SEGMENTS * delta_day)) * delta_day query_params = { "start": self.stats_period_start, "end": self.stats_period_end, "rollup": int(rollup), } else: segments, interval = self.STATS_PERIOD_CHOICES[self.stats_period] now = timezone.now() query_params = { "start": now - ((segments - 1) * interval), "end": now, "rollup": int(interval.total_seconds()), } return self.query_tsdb(group_ids, query_params, **kwargs)
UserOption, UserReport, DEFAULT_SUBJECT_TEMPLATE, ) from sentry.utils.data_filters import FilterTypes from sentry.utils.db import is_postgres STATUS_LABELS = { ProjectStatus.VISIBLE: "active", ProjectStatus.HIDDEN: "deleted", ProjectStatus.PENDING_DELETION: "deleted", ProjectStatus.DELETION_IN_PROGRESS: "deleted", } STATS_PERIOD_CHOICES = { "30d": StatsPeriod(30, timedelta(hours=24)), "14d": StatsPeriod(14, timedelta(hours=24)), "24h": StatsPeriod(24, timedelta(hours=1)), } @register(Project) class ProjectSerializer(Serializer): """ This is primarily used to summarize projects. We utilize it when doing bulk loads for things such as "show all projects for this organization", and its attributes be kept to a minimum. """ def __init__(self, environment_id=None, stats_period=None): if stats_period is not None: assert stats_period in STATS_PERIOD_CHOICES
from sentry.models.integration import ExternalProviders from sentry.models.notificationsetting import NotificationSetting from sentry.notifications.helpers import transform_to_notification_settings_by_parent_id from sentry.notifications.types import NotificationSettingOptionValues, NotificationSettingTypes from sentry.snuba import discover from sentry.utils.compat import zip STATUS_LABELS = { ProjectStatus.VISIBLE: "active", ProjectStatus.HIDDEN: "deleted", ProjectStatus.PENDING_DELETION: "deleted", ProjectStatus.DELETION_IN_PROGRESS: "deleted", } STATS_PERIOD_CHOICES = { "30d": StatsPeriod(30, timedelta(hours=24)), "14d": StatsPeriod(14, timedelta(hours=24)), "7d": StatsPeriod(7, timedelta(hours=24)), "24h": StatsPeriod(24, timedelta(hours=1)), "1h": StatsPeriod(60, timedelta(minutes=1)), } _PROJECT_SCOPE_PREFIX = "projects:" LATEST_DEPLOYS_KEY = "latestDeploys" @register(Project) class ProjectSerializer(Serializer): """ This is primarily used to summarize projects. We utilize it when doing bulk loads for things
from sentry.digests import backend as digests from sentry.models import ( Project, ProjectAvatar, ProjectBookmark, ProjectOption, ProjectPlatform, ProjectStatus, ProjectTeam, Release, ReleaseProjectEnvironment, Deploy, UserOption, DEFAULT_SUBJECT_TEMPLATE ) from sentry.utils.data_filters import FilterTypes STATUS_LABELS = { ProjectStatus.VISIBLE: 'active', ProjectStatus.HIDDEN: 'deleted', ProjectStatus.PENDING_DELETION: 'deleted', ProjectStatus.DELETION_IN_PROGRESS: 'deleted', } STATS_PERIOD_CHOICES = { '30d': StatsPeriod(30, timedelta(hours=24)), '14d': StatsPeriod(14, timedelta(hours=24)), '24h': StatsPeriod(24, timedelta(hours=1)), } @register(Project) class ProjectSerializer(Serializer): """ This is primarily used to summarize projects. We utilize it when doing bulk loads for things such as "show all projects for this organization", and its attributes be kept to a minimum. """ def __init__(self, environment_id=None, stats_period=None): if stats_period is not None: assert stats_period in STATS_PERIOD_CHOICES
class StreamGroupSerializer(GroupSerializer): STATS_PERIOD_CHOICES = { '14d': StatsPeriod(14, timedelta(hours=24)), '24h': StatsPeriod(24, timedelta(hours=1)), } def __init__(self, environment_func=None, stats_period=None, matching_event_id=None): super(StreamGroupSerializer, self).__init__(environment_func) if stats_period is not None: assert stats_period in self.STATS_PERIOD_CHOICES self.stats_period = stats_period self.matching_event_id = matching_event_id def get_attrs(self, item_list, user): attrs = super(StreamGroupSerializer, self).get_attrs(item_list, user) if self.stats_period: # we need to compute stats at 1d (1h resolution), and 14d group_ids = [g.id for g in item_list] segments, interval = self.STATS_PERIOD_CHOICES[self.stats_period] now = timezone.now() query_params = { 'start': now - ((segments - 1) * interval), 'end': now, 'rollup': int(interval.total_seconds()), } try: environment = self.environment_func() except Environment.DoesNotExist: stats = { key: tsdb.make_series(0, **query_params) for key in group_ids } else: stats = tsdb.get_range(model=tsdb.models.group, keys=group_ids, environment_id=environment and environment.id, **query_params) for item in item_list: attrs[item].update({ 'stats': stats[item.id], }) return attrs def serialize(self, obj, attrs, user): result = super(StreamGroupSerializer, self).serialize(obj, attrs, user) if self.stats_period: result['stats'] = { self.stats_period: attrs['stats'], } if self.matching_event_id: result['matchingEventId'] = self.matching_event_id return result