def make_group_generator(random, project): epoch = to_timestamp(datetime(2016, 6, 1, 0, 0, 0, tzinfo=timezone.utc)) for id in itertools.count(1): first_seen = epoch + random.randint(0, 60 * 60 * 24 * 30) last_seen = random.randint(first_seen, first_seen + (60 * 60 * 24 * 30)) culprit = make_culprit(random) level = random.choice(LOG_LEVELS.keys()) message = make_message(random) group = Group( id=id, project=project, culprit=culprit, level=level, message=message, first_seen=to_datetime(first_seen), last_seen=to_datetime(last_seen), status=random.choice((GroupStatus.UNRESOLVED, GroupStatus.RESOLVED)), data={"type": "default", "metadata": {"title": message}}, ) if random.random() < 0.8: group.data = make_group_metadata(random, group) yield group
def serialize(self, obj, attrs, user): status = obj.status status_details = {} if attrs['snooze']: if attrs['snooze'] < timezone.now() and status == GroupStatus.MUTED: status = GroupStatus.UNRESOLVED else: status_details['snoozeUntil'] = attrs['snooze'] elif status == GroupStatus.UNRESOLVED and obj.is_over_resolve_age(): status = GroupStatus.RESOLVED status_details['autoResolved'] = True if status == GroupStatus.RESOLVED: status_label = 'resolved' if attrs['pending_resolution']: status_details['inNextRelease'] = True elif status == GroupStatus.MUTED: status_label = 'muted' elif status in [GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS]: status_label = 'pending_deletion' elif status == GroupStatus.PENDING_MERGE: status_label = 'pending_merge' else: status_label = 'unresolved' permalink = absolute_uri(reverse('sentry-group', args=[ obj.organization.slug, obj.project.slug, obj.id])) event_type = obj.data.get('type', 'default') metadata = obj.data.get('metadata') or { 'title': obj.message_short, } return { 'id': str(obj.id), 'shareId': obj.get_share_id(), 'shortId': obj.qualified_short_id, 'count': str(obj.times_seen), 'userCount': attrs['user_count'], 'title': obj.message_short, 'culprit': obj.culprit, 'permalink': permalink, 'firstSeen': obj.first_seen, 'lastSeen': obj.last_seen, 'logger': obj.logger or None, 'level': LOG_LEVELS.get(obj.level, 'unknown'), 'status': status_label, 'statusDetails': status_details, 'isPublic': obj.is_public, 'project': { 'name': obj.project.name, 'slug': obj.project.slug, }, 'type': event_type, 'metadata': metadata, 'numComments': obj.num_comments, 'assignedTo': attrs['assigned_to'], 'isBookmarked': attrs['is_bookmarked'], 'hasSeen': attrs['has_seen'], 'annotations': attrs['annotations'], }
class GroupTombstone(Model): __core__ = False previous_group_id = BoundedPositiveIntegerField(unique=True) project = FlexibleForeignKey("sentry.Project") level = BoundedPositiveIntegerField( choices=LOG_LEVELS.items(), default=logging.ERROR, blank=True ) message = models.TextField() culprit = models.CharField(max_length=MAX_CULPRIT_LENGTH, blank=True, null=True) data = GzippedDictField(blank=True, null=True) actor_id = BoundedPositiveIntegerField(null=True) class Meta: app_label = "sentry" db_table = "sentry_grouptombstone" def get_event_type(self): """ Return the type of this issue. See ``sentry.eventtypes``. """ return self.data.get("type", "default") def get_event_metadata(self): """ Return the metadata of this issue. See ``sentry.eventtypes``. """ return self.data["metadata"]
class TurboSMSOptionsForm(forms.Form): numbers = forms.CharField( label=_('Phone numbers'), widget=forms.Textarea(attrs={ 'class': 'span6', 'placeholder': '+380xxyyyyyyy'}), help_text=_('Enter phone numbers to send new events to (one per line).') ) login = forms.CharField( label=_('Login'), help_text=_('TurboSMS login'), validators=[RegexValidator(r'\w+')] ) password = forms.CharField( label=_('Password'), widget=forms.PasswordInput(), help_text=_('TurboSMS password') ) alphaname = forms.CharField( label=_('Alphaname'), help_text=_('TurboSMS alphaname'), validators=[RegexValidator(r'\w+')] ) level = forms.ChoiceField( label=_('Level'), choices=LOG_LEVELS.items() )
def serialize(self, obj, attrs, user): status = obj.status status_details = {} if attrs['ignore_duration']: if attrs['ignore_duration'] < timezone.now( ) and status == GroupStatus.IGNORED: status = GroupStatus.UNRESOLVED else: status_details['ignoreUntil'] = attrs['ignore_duration'] elif status == GroupStatus.UNRESOLVED and obj.is_over_resolve_age(): status = GroupStatus.RESOLVED status_details['autoResolved'] = True if status == GroupStatus.RESOLVED: status_label = 'resolved' if attrs['pending_resolution']: status_details['inNextRelease'] = True elif status == GroupStatus.IGNORED: status_label = 'ignored' elif status in [ GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS ]: status_label = 'pending_deletion' elif status == GroupStatus.PENDING_MERGE: status_label = 'pending_merge' else: status_label = 'unresolved' permalink = absolute_uri( reverse('sentry-group', args=[obj.organization.slug, obj.project.slug, obj.id])) return { 'id': six.text_type(obj.id), 'shareId': obj.get_share_id(), 'shortId': obj.qualified_short_id, 'count': six.text_type(obj.times_seen), 'userCount': attrs['user_count'], 'title': obj.title, 'culprit': obj.culprit, 'permalink': permalink, 'firstSeen': obj.first_seen, 'lastSeen': obj.last_seen, 'logger': obj.logger or None, 'level': LOG_LEVELS.get(obj.level, 'unknown'), 'status': status_label, 'statusDetails': status_details, 'isPublic': obj.is_public, 'project': { 'name': obj.project.name, 'slug': obj.project.slug, }, 'type': obj.get_event_type(), 'metadata': obj.get_event_metadata(), 'numComments': obj.num_comments, 'assignedTo': attrs['assigned_to'], 'isBookmarked': attrs['is_bookmarked'], 'isSubscribed': attrs['is_subscribed'], 'hasSeen': attrs['has_seen'], 'annotations': attrs['annotations'], }
def make_group_generator(random, project): epoch = to_timestamp(datetime(2016, 6, 1, 0, 0, 0, tzinfo=timezone.utc)) for id in itertools.count(1): first_seen = epoch + random.randint(0, 60 * 60 * 24 * 30) last_seen = random.randint(first_seen, first_seen + (60 * 60 * 24 * 30)) culprit = make_culprit(random) level = random.choice(LOG_LEVELS.keys()) message = make_message(random) group = Group( id=id, project=project, culprit=culprit, level=level, message=message, first_seen=to_datetime(first_seen), last_seen=to_datetime(last_seen), status=random.choice((GroupStatus.UNRESOLVED, GroupStatus.RESOLVED, )), data={ 'type': 'default', 'metadata': { 'title': message, } } ) if random.random() < 0.8: group.data = make_group_metadata(random, group) yield group
def serialize(self, obj, attrs, user): return { 'id': six.text_type(obj.id), 'level': LOG_LEVELS.get(obj.level, 'unknown'), 'message': obj.message, 'culprit': obj.culprit, 'type': obj.get_event_type(), 'actor': attrs.get('user'), }
def serialize(self, obj, attrs, user): return { 'id': six.text_type(obj.id), 'level': LOG_LEVELS.get(obj.level, 'unknown'), 'message': obj.message, 'culprit': obj.culprit, 'type': obj.get_event_type(), 'metadata': obj.get_event_metadata(), 'actor': attrs.get('user'), }
def serialize(self, obj, attrs, user): return { "id": six.text_type(obj.id), "level": LOG_LEVELS.get(obj.level, "unknown"), "message": obj.message, "culprit": obj.culprit, "type": obj.get_event_type(), "metadata": obj.get_event_metadata(), "actor": attrs.get("user"), }
def serialize(self, obj, attrs, user): status = obj.status if attrs['snooze']: if attrs['snooze'] < timezone.now() and status == GroupStatus.MUTED: status = GroupStatus.UNRESOLVED elif status == GroupStatus.UNRESOLVED and obj.is_over_resolve_age(): status = GroupStatus.RESOLVED if status == GroupStatus.RESOLVED: status_label = 'resolved' elif status == GroupStatus.MUTED: status_label = 'muted' elif status in [GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS]: status_label = 'pending_deletion' elif status == GroupStatus.PENDING_MERGE: status_label = 'pending_merge' else: status_label = 'unresolved' if obj.team: permalink = absolute_uri(reverse('sentry-group', args=[ obj.organization.slug, obj.project.slug, obj.id])) else: permalink = None return { 'id': str(obj.id), 'shareId': obj.get_share_id(), 'count': str(obj.times_seen), 'userCount': attrs['user_count'], 'title': obj.message_short, 'culprit': obj.culprit, 'permalink': permalink, 'firstSeen': obj.first_seen, 'lastSeen': obj.last_seen, 'timeSpent': obj.avg_time_spent, 'logger': obj.logger or None, 'level': LOG_LEVELS.get(obj.level, 'unknown'), 'status': status_label, 'snoozeUntil': attrs['snooze'], 'isPublic': obj.is_public, 'project': { 'name': obj.project.name, 'slug': obj.project.slug, }, 'numComments': obj.num_comments, 'assignedTo': attrs['assigned_to'], 'isBookmarked': attrs['is_bookmarked'], 'hasSeen': attrs['has_seen'], 'annotations': attrs['annotations'], }
def serialize(self, obj, attrs, user): status = obj.get_status() if status == GroupStatus.RESOLVED: status_label = 'resolved' elif status == GroupStatus.MUTED: status_label = 'muted' elif status in [ GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS ]: status_label = 'pending_deletion' elif status == GroupStatus.PENDING_MERGE: status_label = 'pending_merge' else: status_label = 'unresolved' if obj.team: permalink = absolute_uri( reverse('sentry-group', args=[obj.organization.slug, obj.project.slug, obj.id])) else: permalink = None d = { 'id': str(obj.id), 'shareId': obj.get_share_id(), 'count': str(obj.times_seen), 'userCount': attrs['user_count'], 'title': obj.message_short, 'culprit': obj.culprit, 'permalink': permalink, 'firstSeen': obj.first_seen, 'lastSeen': obj.last_seen, 'timeSpent': obj.avg_time_spent, 'logger': obj.logger or None, 'level': LOG_LEVELS.get(obj.level, 'unknown'), 'status': status_label, 'isPublic': obj.is_public, 'project': { 'name': obj.project.name, 'slug': obj.project.slug, }, 'numComments': obj.num_comments, 'assignedTo': attrs['assigned_to'], 'isBookmarked': attrs['is_bookmarked'], 'hasSeen': attrs['has_seen'], 'annotations': attrs['annotations'], } return d
def serialize(self, obj, attrs, user): status_details, status_label = self._get_status(attrs, obj) permalink = self._get_permalink(obj, user) is_subscribed, subscription_details = get_subscription_from_attributes( attrs) share_id = attrs["share_id"] group_dict = { "id": str(obj.id), "shareId": share_id, "shortId": obj.qualified_short_id, "title": obj.title, "culprit": obj.culprit, "permalink": permalink, "logger": obj.logger or None, "level": LOG_LEVELS.get(obj.level, "unknown"), "status": status_label, "statusDetails": status_details, "isPublic": share_id is not None, "platform": obj.platform, "project": { "id": str(obj.project.id), "name": obj.project.name, "slug": obj.project.slug, "platform": obj.project.platform, }, "type": obj.get_event_type(), "metadata": obj.get_event_metadata(), "numComments": obj.num_comments, "assignedTo": serialize(attrs["assigned_to"], user, ActorSerializer()), "isBookmarked": attrs["is_bookmarked"], "isSubscribed": is_subscribed, "subscriptionDetails": subscription_details, "hasSeen": attrs["has_seen"], "annotations": attrs["annotations"], } # This attribute is currently feature gated if "is_unhandled" in attrs: group_dict["isUnhandled"] = attrs["is_unhandled"] if "times_seen" in attrs: group_dict.update(self._convert_seen_stats(attrs)) return group_dict
class GroupTombstone(Model): __core__ = False previous_group_id = BoundedPositiveIntegerField(unique=True) project = FlexibleForeignKey('sentry.Project') level = BoundedPositiveIntegerField( choices=LOG_LEVELS.items(), default=logging.ERROR, blank=True ) message = models.TextField() culprit = models.CharField( max_length=MAX_CULPRIT_LENGTH, blank=True, null=True, ) data = GzippedDictField(blank=True, null=True) actor_id = BoundedPositiveIntegerField(null=True) class Meta: app_label = 'sentry' db_table = 'sentry_grouptombstone' def get_event_type(self): """ Return the type of this issue. See ``sentry.eventtypes``. """ return self.data.get('type', 'default') def get_event_metadata(self): """ Return the metadata of this issue. See ``sentry.eventtypes``. """ etype = self.data.get('type') if etype is None: etype = 'default' if 'metadata' not in self.data: data = self.data.copy() if self.data else {} data['message'] = self.message return eventtypes.get(etype)(data).get_metadata() return self.data['metadata']
def make_group_generator(random, project): epoch = to_timestamp(datetime(2016, 6, 1, 0, 0, 0, tzinfo=timezone.utc)) for id in itertools.count(1): first_seen = epoch + random.randint(0, 60 * 60 * 24 * 30) last_seen = random.randint(first_seen, first_seen + (60 * 60 * 24 * 30)) group = Group( id=id, project=project, culprit=make_culprit(random), level=random.choice(LOG_LEVELS.keys()), message=make_message(random), first_seen=to_datetime(first_seen), last_seen=to_datetime(last_seen), ) if random.random() < 0.8: group.data = make_group_metadata(random, group) yield group
def serialize(self, obj, attrs, user): status = obj.get_status() if status == GroupStatus.RESOLVED: status_label = 'resolved' elif status == GroupStatus.MUTED: status_label = 'muted' else: status_label = 'unresolved' if obj.team: permalink = absolute_uri(reverse('sentry-group', args=[ obj.organization.slug, obj.project.slug, obj.id])) else: permalink = None d = { 'id': str(obj.id), 'shareId': obj.get_share_id(), 'count': str(obj.times_seen), 'title': obj.message_short, 'culprit': obj.culprit, 'permalink': permalink, 'firstSeen': obj.first_seen, 'lastSeen': obj.last_seen, 'timeSpent': obj.avg_time_spent, 'logger': obj.logger or None, 'level': LOG_LEVELS.get(obj.level, 'unknown'), 'status': status_label, 'isPublic': obj.is_public, 'project': { 'name': obj.project.name, 'slug': obj.project.slug, }, 'numComments': obj.num_comments, 'assignedTo': attrs['assigned_to'], 'isBookmarked': attrs['is_bookmarked'], 'hasSeen': attrs['has_seen'], 'tags': attrs['tags'], 'annotations': attrs['annotations'], } return d
def serialize(self, obj, attrs, user): status_details, status_label = self._get_status(attrs, obj) permalink = self._get_permalink(obj, user) is_subscribed, subscription_details = self._get_subscription(attrs) share_id = attrs["share_id"] return { "id": six.text_type(obj.id), "shareId": share_id, "shortId": obj.qualified_short_id, "count": six.text_type(attrs["times_seen"]), "userCount": attrs["user_count"], "title": obj.title, "culprit": obj.culprit, "permalink": permalink, "firstSeen": attrs["first_seen"], "lastSeen": attrs["last_seen"], "logger": obj.logger or None, "level": LOG_LEVELS.get(obj.level, "unknown"), "status": status_label, "statusDetails": status_details, "isPublic": share_id is not None, "platform": obj.platform, "project": { "id": six.text_type(obj.project.id), "name": obj.project.name, "slug": obj.project.slug, "platform": obj.project.platform, }, "type": obj.get_event_type(), "metadata": obj.get_event_metadata(), "numComments": obj.num_comments, "assignedTo": serialize(attrs["assigned_to"], user, ActorSerializer()), "isBookmarked": attrs["is_bookmarked"], "isSubscribed": is_subscribed, "subscriptionDetails": subscription_details, "hasSeen": attrs["has_seen"], "annotations": attrs["annotations"], }
class Group(Model): """ Aggregated message which summarizes a set of Events. """ project = models.ForeignKey('sentry.Project', null=True) logger = models.CharField(max_length=64, blank=True, default='root', db_index=True) level = BoundedPositiveIntegerField(choices=LOG_LEVELS.items(), default=logging.ERROR, blank=True, db_index=True) message = models.TextField() culprit = models.CharField(max_length=MAX_CULPRIT_LENGTH, blank=True, null=True, db_column='view') checksum = models.CharField(max_length=32, db_index=True) num_comments = BoundedPositiveIntegerField(default=0, null=True) platform = models.CharField(max_length=64, null=True) status = BoundedPositiveIntegerField(default=0, choices=STATUS_LEVELS, db_index=True) times_seen = BoundedPositiveIntegerField(default=1, db_index=True) last_seen = models.DateTimeField(default=timezone.now, db_index=True) first_seen = models.DateTimeField(default=timezone.now, db_index=True) resolved_at = models.DateTimeField(null=True, db_index=True) # active_at should be the same as first_seen by default active_at = models.DateTimeField(null=True, db_index=True) time_spent_total = BoundedIntegerField(default=0) time_spent_count = BoundedIntegerField(default=0) score = BoundedIntegerField(default=0) is_public = models.NullBooleanField(default=False, null=True) data = GzippedDictField(blank=True, null=True) objects = GroupManager() class Meta: app_label = 'sentry' db_table = 'sentry_groupedmessage' unique_together = (('project', 'checksum'), ) verbose_name_plural = _('grouped messages') verbose_name = _('grouped message') permissions = (("can_view", "Can view"), ) __repr__ = sane_repr('project_id', 'checksum') def __unicode__(self): return "(%s) %s" % (self.times_seen, self.error()) def save(self, *args, **kwargs): if not self.last_seen: self.last_seen = timezone.now() if not self.first_seen: self.first_seen = self.last_seen if not self.active_at: self.active_at = self.first_seen if self.message: # We limit what we store for the message body self.message = self.message.splitlines()[0][:255] super(Group, self).save(*args, **kwargs) def get_absolute_url(self): return absolute_uri( reverse('sentry-group', args=[self.organization.slug, self.project.slug, self.id])) @property def avg_time_spent(self): if not self.time_spent_count: return return float(self.time_spent_total) / self.time_spent_count def natural_key(self): return (self.project, self.checksum) def is_over_resolve_age(self): resolve_age = self.project.get_option('sentry:resolve_age', None) if not resolve_age: return False return self.last_seen < timezone.now() - timedelta( hours=int(resolve_age)) def is_muted(self): return self.get_status() == STATUS_MUTED def is_resolved(self): return self.get_status() == STATUS_RESOLVED def get_status(self): if self.status == STATUS_UNRESOLVED and self.is_over_resolve_age(): return STATUS_RESOLVED return self.status def get_score(self): return int( math.log(self.times_seen) * 600 + float(time.mktime(self.last_seen.timetuple()))) def get_latest_event(self): from sentry.models import Event if not hasattr(self, '_latest_event'): try: self._latest_event = Event.objects.filter( group=self, ).order_by('-datetime')[0] except IndexError: self._latest_event = None return self._latest_event def get_unique_tags(self, tag, since=None, order_by='-times_seen'): # TODO(dcramer): this has zero test coverage and is a critical path from sentry.models import GroupTagValue queryset = GroupTagValue.objects.filter( group=self, key=tag, ) if since: queryset = queryset.filter(last_seen__gte=since) return queryset.values_list( 'value', 'times_seen', 'first_seen', 'last_seen', ).order_by(order_by) def get_tags(self, with_internal=True): from sentry.models import GroupTagKey if not hasattr(self, '_tag_cache'): self._tag_cache = sorted([ t for t in GroupTagKey.objects.filter( group=self, project=self.project, ).values_list('key', flat=True) if with_internal or not t.startswith('sentry:') ]) return self._tag_cache def error(self): return self.message error.short_description = _('error') def has_two_part_message(self): message = strip(self.message) return '\n' in message or len(message) > 100 @property def title(self): culprit = strip(self.culprit) if culprit: return culprit return self.message @property def message_short(self): message = strip(self.message) if not message: message = '<unlabeled message>' else: message = truncatechars(message.splitlines()[0], 100) return message @property def organization(self): return self.project.organization @property def team(self): return self.project.team def get_email_subject(self): return '[%s %s] %s: %s' % ( self.team.name.encode('utf-8'), self.project.name.encode('utf-8'), six.text_type(self.get_level_display()).upper().encode('utf-8'), self.message_short.encode('utf-8'))
class Group(Model): """ Aggregated message which summarizes a set of Events. """ __core__ = False project = FlexibleForeignKey('sentry.Project', null=True) logger = models.CharField(max_length=64, blank=True, default=DEFAULT_LOGGER_NAME, db_index=True) level = BoundedPositiveIntegerField(choices=LOG_LEVELS.items(), default=logging.ERROR, blank=True, db_index=True) message = models.TextField() culprit = models.CharField(max_length=MAX_CULPRIT_LENGTH, blank=True, null=True, db_column='view') num_comments = BoundedPositiveIntegerField(default=0, null=True) platform = models.CharField(max_length=64, null=True) status = BoundedPositiveIntegerField(default=0, choices=( (GroupStatus.UNRESOLVED, _('Unresolved')), (GroupStatus.RESOLVED, _('Resolved')), (GroupStatus.IGNORED, _('Ignored')), ), db_index=True) times_seen = BoundedPositiveIntegerField(default=1, db_index=True) last_seen = models.DateTimeField(default=timezone.now, db_index=True) first_seen = models.DateTimeField(default=timezone.now, db_index=True) first_release = FlexibleForeignKey('sentry.Release', null=True, on_delete=models.PROTECT) resolved_at = models.DateTimeField(null=True, db_index=True) # active_at should be the same as first_seen by default active_at = models.DateTimeField(null=True, db_index=True) time_spent_total = BoundedIntegerField(default=0) time_spent_count = BoundedIntegerField(default=0) score = BoundedIntegerField(default=0) # deprecated, do not use. GroupShare has superseded is_public = models.NullBooleanField(default=False, null=True) data = GzippedDictField(blank=True, null=True) short_id = BoundedBigIntegerField(null=True) objects = GroupManager() class Meta: app_label = 'sentry' db_table = 'sentry_groupedmessage' verbose_name_plural = _('grouped messages') verbose_name = _('grouped message') permissions = (("can_view", "Can view"), ) index_together = (('project', 'first_release'), ) unique_together = (('project', 'short_id'), ) __repr__ = sane_repr('project_id') def __unicode__(self): return "(%s) %s" % (self.times_seen, self.error()) def save(self, *args, **kwargs): if not self.last_seen: self.last_seen = timezone.now() if not self.first_seen: self.first_seen = self.last_seen if not self.active_at: self.active_at = self.first_seen # We limit what we store for the message body self.message = strip(self.message) if self.message: self.message = truncatechars(self.message.splitlines()[0], 255) super(Group, self).save(*args, **kwargs) def get_absolute_url(self): return absolute_uri( reverse('sentry-group', args=[self.organization.slug, self.project.slug, self.id])) @property def qualified_short_id(self): if self.short_id is not None: return '%s-%s' % ( self.project.slug.upper(), base32_encode(self.short_id), ) @property def event_set(self): from sentry.models import Event return Event.objects.filter(group_id=self.id) def is_over_resolve_age(self): resolve_age = self.project.get_option('sentry:resolve_age', None) if not resolve_age: return False return self.last_seen < timezone.now() - timedelta( hours=int(resolve_age)) def is_ignored(self): return self.get_status() == GroupStatus.IGNORED # TODO(dcramer): remove in 9.0 / after plugins no long ref is_muted = is_ignored def is_resolved(self): return self.get_status() == GroupStatus.RESOLVED def get_status(self): # XXX(dcramer): GroupSerializer reimplements this logic from sentry.models import GroupSnooze status = self.status if status == GroupStatus.IGNORED: try: snooze = GroupSnooze.objects.get(group=self) except GroupSnooze.DoesNotExist: pass else: if not snooze.is_valid(group=self): status = GroupStatus.UNRESOLVED if status == GroupStatus.UNRESOLVED and self.is_over_resolve_age(): return GroupStatus.RESOLVED return status def get_share_id(self): from sentry.models import GroupShare try: return GroupShare.objects.filter(group_id=self.id, ).values_list( 'uuid', flat=True)[0] except IndexError: # Otherwise it has not been shared yet. return None @classmethod def from_share_id(cls, share_id): if not share_id or len(share_id) != 32: raise cls.DoesNotExist from sentry.models import GroupShare return cls.objects.get(id=GroupShare.objects.filter( uuid=share_id, ).values_list('group_id'), ) def get_score(self): return int( math.log(self.times_seen) * 600 + float(time.mktime(self.last_seen.timetuple()))) def get_latest_event(self): from sentry.models import Event if not hasattr(self, '_latest_event'): latest_events = sorted( Event.objects.filter( group_id=self.id, ).order_by('-datetime')[0:5], key=EVENT_ORDERING_KEY, reverse=True, ) try: self._latest_event = latest_events[0] except IndexError: self._latest_event = None return self._latest_event def get_oldest_event(self): from sentry.models import Event if not hasattr(self, '_oldest_event'): oldest_events = sorted( Event.objects.filter( group_id=self.id, ).order_by('datetime')[0:5], key=EVENT_ORDERING_KEY, ) try: self._oldest_event = oldest_events[0] except IndexError: self._oldest_event = None return self._oldest_event def get_first_release(self): if self.first_release_id is None: return tagstore.get_first_release(self.project_id, self.id) return self.first_release.version def get_last_release(self): return tagstore.get_last_release(self.project_id, self.id) def get_event_type(self): """ Return the type of this issue. See ``sentry.eventtypes``. """ return self.data.get('type', 'default') def get_event_metadata(self): """ Return the metadata of this issue. See ``sentry.eventtypes``. """ etype = self.data.get('type') if etype is None: etype = 'default' if 'metadata' not in self.data: data = self.data.copy() if self.data else {} data['message'] = self.message return eventtypes.get(etype)(data).get_metadata() return self.data['metadata'] @property def title(self): et = eventtypes.get(self.get_event_type())(self.data) return et.to_string(self.get_event_metadata()) def error(self): warnings.warn('Group.error is deprecated, use Group.title', DeprecationWarning) return self.title error.short_description = _('error') @property def message_short(self): warnings.warn('Group.message_short is deprecated, use Group.title', DeprecationWarning) return self.title @property def organization(self): return self.project.organization @property def checksum(self): warnings.warn('Group.checksum is no longer used', DeprecationWarning) return '' def get_email_subject(self): return '%s - %s' % (self.qualified_short_id.encode('utf-8'), self.title.encode('utf-8')) def count_users_seen(self): return tagstore.get_groups_user_counts(self.project_id, [self.id], environment_id=None)[self.id]
class EventBase(Model): """ Abstract base class for both Event and Group. """ project = models.ForeignKey(Project, null=True) logger = models.CharField(max_length=64, blank=True, default='root', db_index=True) level = BoundedPositiveIntegerField(choices=LOG_LEVELS.items(), default=logging.ERROR, blank=True, db_index=True) message = models.TextField() culprit = models.CharField(max_length=MAX_CULPRIT_LENGTH, blank=True, null=True, db_column='view') checksum = models.CharField(max_length=32, db_index=True) data = GzippedDictField(blank=True, null=True) num_comments = BoundedPositiveIntegerField(default=0, null=True) platform = models.CharField(max_length=64, null=True) class Meta: abstract = True def save(self, *args, **kwargs): if len(self.logger) > 64: self.logger = self.logger[0:61] + u"..." super(EventBase, self).save(*args, **kwargs) def error(self): message = strip(self.message) if message: message = truncatechars(message, 100) else: message = '<unlabeled message>' return message error.short_description = _('error') def has_two_part_message(self): message = strip(self.message) return '\n' in message or len(message) > 100 def message_top(self): culprit = strip(self.culprit) if culprit: return culprit message = strip(self.message) if not strip(message): return '<unlabeled message>' return truncatechars(message.splitlines()[0], 100) @property def team(self): return self.project.team @property def user_ident(self): """ The identifier from a user is considered from several interfaces. In order: - User.id - User.email - User.username - Http.env.REMOTE_ADDR """ user_data = self.data.get('sentry.interfaces.User') if user_data: ident = user_data.get('id') if ident: return 'id:%s' % (ident, ) ident = user_data.get('email') if ident: return 'email:%s' % (ident, ) ident = user_data.get('username') if ident: return 'username:%s' % (ident, ) http_data = self.data.get('sentry.interfaces.Http') if http_data: if 'env' in http_data: ident = http_data['env'].get('REMOTE_ADDR') if ident: return 'ip:%s' % (ident, ) return None
def normalize(self): data = self.data errors = data.get('errors', []) # Before validating with a schema, attempt to cast values to their desired types # so that the schema doesn't have to take every type variation into account. text = six.text_type fp_types = six.string_types + six.integer_types + (float, ) def to_values(v): return {'values': v} if v and isinstance(v, (tuple, list)) else v casts = { 'environment': lambda v: text(v) if v is not None else v, 'fingerprint': lambda v: list(map(text, v)) if isinstance(v, list) and all(isinstance(f, fp_types) for f in v) else v, 'release': lambda v: text(v) if v is not None else v, 'dist': lambda v: text(v).strip() if v is not None else v, 'time_spent': lambda v: int(v) if v is not None else v, 'tags': lambda v: [(text(v_k).replace(' ', '-').strip(), text(v_v).strip()) for (v_k, v_v) in dict(v).items()], 'timestamp': lambda v: process_timestamp(v), 'platform': lambda v: v if v in VALID_PLATFORMS else 'other', # These can be sent as lists and need to be converted to {'values': [...]} 'exception': to_values, 'sentry.interfaces.Exception': to_values, 'breadcrumbs': to_values, 'sentry.interfaces.Breadcrumbs': to_values, 'threads': to_values, 'sentry.interfaces.Threads': to_values, } for c in casts: if c in data: try: data[c] = casts[c](data[c]) except Exception as e: errors.append({ 'type': EventError.INVALID_DATA, 'name': c, 'value': data[c] }) del data[c] # raw 'message' is coerced to the Message interface, as its used for pure index of # searchable strings. If both a raw 'message' and a Message interface exist, try and # add the former as the 'formatted' attribute of the latter. # See GH-3248 msg_str = data.pop('message', None) if msg_str: msg_if = data.setdefault('sentry.interfaces.Message', {'message': msg_str}) if msg_if.get('message') != msg_str: msg_if.setdefault('formatted', msg_str) # Validate main event body and tags against schema is_valid, event_errors = validate_and_default_interface(data, 'event') errors.extend(event_errors) if 'tags' in data: is_valid, tag_errors = validate_and_default_interface(data['tags'], 'tags', name='tags') errors.extend(tag_errors) # Validate interfaces for k in list(iter(data)): if k in CLIENT_RESERVED_ATTRS: continue value = data.pop(k) if not value: self.logger.debug('Ignored empty interface value: %s', k) continue try: interface = get_interface(k) except ValueError: self.logger.debug('Ignored unknown attribute: %s', k) errors.append({ 'type': EventError.INVALID_ATTRIBUTE, 'name': k }) continue try: inst = interface.to_python(value) data[inst.get_path()] = inst.to_json() except Exception as e: log = self.logger.debug if isinstance( e, InterfaceValidationError) else self.logger.error log('Discarded invalid value for interface: %s (%r)', k, value, exc_info=True) errors.append({ 'type': EventError.INVALID_DATA, 'name': k, 'value': value }) level = data.get('level') or DEFAULT_LOG_LEVEL if isinstance(level, int) or (isinstance(level, six.string_types) and level.isdigit()): level = LOG_LEVELS.get(int(level), DEFAULT_LOG_LEVEL) data['level'] = LOG_LEVELS_MAP.get(level, LOG_LEVELS_MAP[DEFAULT_LOG_LEVEL]) if data.get('dist') and not data.get('release'): data['dist'] = None timestamp = data.get('timestamp') if not timestamp: timestamp = timezone.now() # TODO (alex) can this all be replaced by utcnow? # it looks like the only time that this would even be hit is when timestamp # is not defined, as the earlier process_timestamp already converts existing # timestamps to floats. if isinstance(timestamp, datetime): # We must convert date to local time so Django doesn't mess it up # based on TIME_ZONE if settings.TIME_ZONE: if not timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=timezone.utc) elif timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=None) timestamp = float(timestamp.strftime('%s')) data['timestamp'] = timestamp data['received'] = float(timezone.now().strftime('%s')) data.setdefault('culprit', None) data.setdefault('transaction', None) data.setdefault('server_name', None) data.setdefault('site', None) data.setdefault('checksum', None) data.setdefault('fingerprint', None) data.setdefault('platform', None) data.setdefault('dist', None) data.setdefault('environment', None) data.setdefault('extra', {}) data.setdefault('tags', []) # Fix case where legacy apps pass 'environment' as a tag # instead of a top level key. # TODO (alex) save() just reinserts the environment into the tags if not data.get('environment'): tagsdict = dict(data['tags']) if 'environment' in tagsdict: data['environment'] = tagsdict['environment'] del tagsdict['environment'] data['tags'] = tagsdict.items() # the SDKs currently do not describe event types, and we must infer # them from available attributes data['type'] = eventtypes.infer(data).key data['version'] = self.version exception = data.get('sentry.interfaces.Exception') stacktrace = data.get('sentry.interfaces.Stacktrace') if exception and len(exception['values']) == 1 and stacktrace: exception['values'][0]['stacktrace'] = stacktrace del data['sentry.interfaces.Stacktrace'] if 'sentry.interfaces.Http' in data: try: ip_address = validate_ip( data['sentry.interfaces.Http'].get('env', {}).get('REMOTE_ADDR'), required=False, ) if ip_address: data.setdefault('sentry.interfaces.User', {}).setdefault('ip_address', ip_address) except ValueError: pass # Trim values logger = data.get('logger', DEFAULT_LOGGER_NAME) data['logger'] = trim(logger.strip(), 64) trim_dict(data['extra'], max_size=settings.SENTRY_MAX_EXTRA_VARIABLE_SIZE) if data['culprit']: data['culprit'] = trim(data['culprit'], MAX_CULPRIT_LENGTH) if data['transaction']: data['transaction'] = trim(data['transaction'], MAX_CULPRIT_LENGTH) data['errors'] = errors return data
from time import time from sentry.app import env from sentry.cache import default_cache from sentry.constants import (CLIENT_RESERVED_ATTRS, DEFAULT_LOG_LEVEL, LOG_LEVELS, MAX_TAG_VALUE_LENGTH, MAX_TAG_KEY_LENGTH) from sentry.interfaces.base import get_interface from sentry.models import EventError, Project, ProjectKey from sentry.tasks.store import preprocess_event from sentry.utils import is_float, json from sentry.utils.auth import parse_auth_header from sentry.utils.compat import StringIO from sentry.utils.strings import decompress LOG_LEVEL_REVERSE_MAP = dict((v, k) for k, v in LOG_LEVELS.iteritems()) class APIError(Exception): http_status = 400 msg = 'Invalid request' name = None def __init__(self, msg=None, name=None): if msg: self.msg = msg if self.name: self.name = name def __str__(self): return self.msg or ''
def serialize(self, obj, attrs, user): status = obj.status status_details = {} if attrs['ignore_duration']: if attrs['ignore_duration'] < timezone.now() and status == GroupStatus.IGNORED: status = GroupStatus.UNRESOLVED else: status_details['ignoreUntil'] = attrs['ignore_duration'] elif status == GroupStatus.UNRESOLVED and obj.is_over_resolve_age(): status = GroupStatus.RESOLVED status_details['autoResolved'] = True if status == GroupStatus.RESOLVED: status_label = 'resolved' if attrs['pending_resolution']: status_details['inNextRelease'] = True elif status == GroupStatus.IGNORED: status_label = 'ignored' elif status in [GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS]: status_label = 'pending_deletion' elif status == GroupStatus.PENDING_MERGE: status_label = 'pending_merge' else: status_label = 'unresolved' # If user is not logged in and member of the organization, # do not return the permalink which contains private information i.e. org name. if user.is_authenticated() and user.get_orgs().filter(id=obj.organization.id).exists(): permalink = absolute_uri(reverse('sentry-group', args=[ obj.organization.slug, obj.project.slug, obj.id])) else: permalink = None is_subscribed, subscription = attrs['subscription'] return { 'id': six.text_type(obj.id), 'shareId': obj.get_share_id(), 'shortId': obj.qualified_short_id, 'count': six.text_type(obj.times_seen), 'userCount': attrs['user_count'], 'title': obj.title, 'culprit': obj.culprit, 'permalink': permalink, 'firstSeen': obj.first_seen, 'lastSeen': obj.last_seen, 'logger': obj.logger or None, 'level': LOG_LEVELS.get(obj.level, 'unknown'), 'status': status_label, 'statusDetails': status_details, 'isPublic': obj.is_public, 'project': { 'name': obj.project.name, 'slug': obj.project.slug, }, 'type': obj.get_event_type(), 'metadata': obj.get_event_metadata(), 'numComments': obj.num_comments, 'assignedTo': attrs['assigned_to'], 'isBookmarked': attrs['is_bookmarked'], 'isSubscribed': is_subscribed, 'subscriptionDetails': { 'reason': SUBSCRIPTION_REASON_MAP.get( subscription.reason, 'unknown', ), } if is_subscribed and subscription is not None else None, 'hasSeen': attrs['has_seen'], 'annotations': attrs['annotations'], }
class Group(Model): """ Aggregated message which summarizes a set of Events. """ __core__ = False project = FlexibleForeignKey("sentry.Project") logger = models.CharField(max_length=64, blank=True, default=DEFAULT_LOGGER_NAME, db_index=True) level = BoundedPositiveIntegerField(choices=LOG_LEVELS.items(), default=logging.ERROR, blank=True, db_index=True) message = models.TextField() culprit = models.CharField(max_length=MAX_CULPRIT_LENGTH, blank=True, null=True, db_column="view") num_comments = BoundedPositiveIntegerField(default=0, null=True) platform = models.CharField(max_length=64, null=True) status = BoundedPositiveIntegerField( default=0, choices=( (GroupStatus.UNRESOLVED, _("Unresolved")), (GroupStatus.RESOLVED, _("Resolved")), (GroupStatus.IGNORED, _("Ignored")), ), db_index=True, ) times_seen = BoundedPositiveIntegerField(default=1, db_index=True) last_seen = models.DateTimeField(default=timezone.now, db_index=True) first_seen = models.DateTimeField(default=timezone.now, db_index=True) first_release = FlexibleForeignKey("sentry.Release", null=True, on_delete=models.PROTECT) resolved_at = models.DateTimeField(null=True, db_index=True) # active_at should be the same as first_seen by default active_at = models.DateTimeField(null=True, db_index=True) time_spent_total = BoundedIntegerField(default=0) time_spent_count = BoundedIntegerField(default=0) score = BoundedIntegerField(default=0) # deprecated, do not use. GroupShare has superseded is_public = models.NullBooleanField(default=False, null=True) data = GzippedDictField(blank=True, null=True) short_id = BoundedBigIntegerField(null=True) objects = GroupManager(cache_fields=("id", )) class Meta: app_label = "sentry" db_table = "sentry_groupedmessage" verbose_name_plural = _("grouped messages") verbose_name = _("grouped message") permissions = (("can_view", "Can view"), ) index_together = [("project", "first_release"), ("project", "id")] unique_together = (("project", "short_id"), ) __repr__ = sane_repr("project_id") def __unicode__(self): return "(%s) %s" % (self.times_seen, self.error()) def save(self, *args, **kwargs): if not self.last_seen: self.last_seen = timezone.now() if not self.first_seen: self.first_seen = self.last_seen if not self.active_at: self.active_at = self.first_seen # We limit what we store for the message body self.message = strip(self.message) if self.message: self.message = truncatechars(self.message.splitlines()[0], 255) if self.times_seen is None: self.times_seen = 1 self.score = type(self).calculate_score(times_seen=self.times_seen, last_seen=self.last_seen) super(Group, self).save(*args, **kwargs) def get_absolute_url(self, params=None): url = reverse("sentry-organization-issue", args=[self.organization.slug, self.id]) if params: url = url + "?" + urlencode(params) return absolute_uri(url) @property def qualified_short_id(self): if self.short_id is not None: return "%s-%s" % (self.project.slug.upper(), base32_encode(self.short_id)) def is_over_resolve_age(self): resolve_age = self.project.get_option("sentry:resolve_age", None) if not resolve_age: return False return self.last_seen < timezone.now() - timedelta( hours=int(resolve_age)) def is_ignored(self): return self.get_status() == GroupStatus.IGNORED def is_unresolved(self): return self.get_status() == GroupStatus.UNRESOLVED # TODO(dcramer): remove in 9.0 / after plugins no long ref is_muted = is_ignored def is_resolved(self): return self.get_status() == GroupStatus.RESOLVED def get_status(self): # XXX(dcramer): GroupSerializer reimplements this logic from sentry.models import GroupSnooze status = self.status if status == GroupStatus.IGNORED: try: snooze = GroupSnooze.objects.get_from_cache(group=self) except GroupSnooze.DoesNotExist: pass else: if not snooze.is_valid(group=self): status = GroupStatus.UNRESOLVED if status == GroupStatus.UNRESOLVED and self.is_over_resolve_age(): return GroupStatus.RESOLVED return status def get_share_id(self): from sentry.models import GroupShare try: return GroupShare.objects.filter(group_id=self.id).values_list( "uuid", flat=True)[0] except IndexError: # Otherwise it has not been shared yet. return None @classmethod def from_share_id(cls, share_id): if not share_id or len(share_id) != 32: raise cls.DoesNotExist from sentry.models import GroupShare return cls.objects.get(id=GroupShare.objects.filter( uuid=share_id).values_list("group_id")) def get_score(self): return type(self).calculate_score(self.times_seen, self.last_seen) def get_latest_event(self): if not hasattr(self, "_latest_event"): self._latest_event = self.get_latest_event_for_environments() return self._latest_event def get_latest_event_for_environments(self, environments=()): return get_oldest_or_latest_event_for_environments( EventOrdering.LATEST, environments=environments, issue_id=self.id, project_id=self.project_id, ) def get_oldest_event_for_environments(self, environments=()): return get_oldest_or_latest_event_for_environments( EventOrdering.OLDEST, environments=environments, issue_id=self.id, project_id=self.project_id, ) def get_first_release(self): if self.first_release_id is None: return tagstore.get_first_release(self.project_id, self.id) return self.first_release.version def get_last_release(self): return tagstore.get_last_release(self.project_id, self.id) def get_event_type(self): """ Return the type of this issue. See ``sentry.eventtypes``. """ return self.data.get("type", "default") def get_event_metadata(self): """ Return the metadata of this issue. See ``sentry.eventtypes``. """ return self.data["metadata"] @property def title(self): et = eventtypes.get(self.get_event_type())() return et.get_title(self.get_event_metadata()) def location(self): et = eventtypes.get(self.get_event_type())() return et.get_location(self.get_event_metadata()) def error(self): warnings.warn("Group.error is deprecated, use Group.title", DeprecationWarning) return self.title error.short_description = _("error") @property def message_short(self): warnings.warn("Group.message_short is deprecated, use Group.title", DeprecationWarning) return self.title @property def organization(self): return self.project.organization @property def checksum(self): warnings.warn("Group.checksum is no longer used", DeprecationWarning) return "" def get_email_subject(self): return "%s - %s" % (self.qualified_short_id.encode("utf-8"), self.title.encode("utf-8")) def count_users_seen(self): return tagstore.get_groups_user_counts([self.project_id], [self.id], environment_ids=None, start=self.first_seen)[self.id] @classmethod def calculate_score(cls, times_seen, last_seen): return math.log(float(times_seen or 1)) * 600 + float( last_seen.strftime("%s"))
def normalize(self, request_env=None): request_env = request_env or {} data = self.data errors = data['errors'] = [] # Before validating with a schema, attempt to cast values to their desired types # so that the schema doesn't have to take every type variation into account. text = six.text_type fp_types = six.string_types + six.integer_types + (float, ) def to_values(v): return {'values': v} if v and isinstance(v, (tuple, list)) else v def convert_fingerprint(values): rv = values[:] bad_float = False for idx, item in enumerate(rv): if isinstance(item, float) and \ (abs(item) >= (1 << 53) or int(item) != item): bad_float = True rv[idx] = text(item) if bad_float: metrics.incr( 'events.bad_float_fingerprint', skip_internal=True, tags={ 'project_id': data.get('project'), }, ) return rv casts = { 'environment': lambda v: text(v) if v is not None else v, 'fingerprint': lambda v: convert_fingerprint(v) if isinstance(v, list) and all(isinstance(f, fp_types) for f in v) else v, 'release': lambda v: text(v) if v is not None else v, 'dist': lambda v: text(v).strip() if v is not None else v, 'time_spent': lambda v: int(v) if v is not None else v, 'tags': lambda v: [(text(v_k).replace(' ', '-').strip(), text(v_v).strip()) for (v_k, v_v) in dict(v).items()], 'timestamp': lambda v: process_timestamp(v), 'platform': lambda v: v if v in VALID_PLATFORMS else 'other', 'sentry.interfaces.Message': lambda v: v if isinstance(v, dict) else { 'message': v }, # These can be sent as lists and need to be converted to {'values': [...]} 'exception': to_values, 'sentry.interfaces.Exception': to_values, 'breadcrumbs': to_values, 'sentry.interfaces.Breadcrumbs': to_values, 'threads': to_values, 'sentry.interfaces.Threads': to_values, } for c in casts: if c in data: try: data[c] = casts[c](data[c]) except InvalidTimestamp as it: errors.append({ 'type': it.args[0], 'name': c, 'value': data[c] }) del data[c] except Exception as e: errors.append({ 'type': EventError.INVALID_DATA, 'name': c, 'value': data[c] }) del data[c] # raw 'message' is coerced to the Message interface, as its used for pure index of # searchable strings. If both a raw 'message' and a Message interface exist, try and # add the former as the 'formatted' attribute of the latter. # See GH-3248 msg_str = data.pop('message', None) if msg_str: msg_if = data.setdefault('sentry.interfaces.Message', {'message': msg_str}) if msg_if.get('message') != msg_str: msg_if.setdefault('formatted', msg_str) # Fill in ip addresses marked as {{auto}} client_ip = request_env.get('client_ip') if client_ip: if get_path(data, ['sentry.interfaces.Http', 'env', 'REMOTE_ADDR' ]) == '{{auto}}': data['sentry.interfaces.Http']['env'][ 'REMOTE_ADDR'] = client_ip if get_path(data, ['request', 'env', 'REMOTE_ADDR']) == '{{auto}}': data['request']['env']['REMOTE_ADDR'] = client_ip if get_path( data, ['sentry.interfaces.User', 'ip_address']) == '{{auto}}': data['sentry.interfaces.User']['ip_address'] = client_ip if get_path(data, ['user', 'ip_address']) == '{{auto}}': data['user']['ip_address'] = client_ip # Validate main event body and tags against schema is_valid, event_errors = validate_and_default_interface(data, 'event') errors.extend(event_errors) if 'tags' in data: is_valid, tag_errors = validate_and_default_interface(data['tags'], 'tags', name='tags') errors.extend(tag_errors) # Validate interfaces for k in list(iter(data)): if k in CLIENT_RESERVED_ATTRS: continue value = data.pop(k) if not value: self.logger.debug('Ignored empty interface value: %s', k) continue try: interface = get_interface(k) except ValueError: self.logger.debug('Ignored unknown attribute: %s', k) errors.append({ 'type': EventError.INVALID_ATTRIBUTE, 'name': k }) continue try: inst = interface.to_python(value) data[inst.get_path()] = inst.to_json() except Exception as e: log = self.logger.debug if isinstance( e, InterfaceValidationError) else self.logger.error log('Discarded invalid value for interface: %s (%r)', k, value, exc_info=True) errors.append({ 'type': EventError.INVALID_DATA, 'name': k, 'value': value }) # Additional data coercion and defaulting level = data.get('level') or DEFAULT_LOG_LEVEL if isinstance(level, int) or (isinstance(level, six.string_types) and level.isdigit()): level = LOG_LEVELS.get(int(level), DEFAULT_LOG_LEVEL) data['level'] = LOG_LEVELS_MAP.get(level, LOG_LEVELS_MAP[DEFAULT_LOG_LEVEL]) if data.get('dist') and not data.get('release'): data['dist'] = None timestamp = data.get('timestamp') if not timestamp: timestamp = timezone.now() # TODO (alex) can this all be replaced by utcnow? # it looks like the only time that this would even be hit is when timestamp # is not defined, as the earlier process_timestamp already converts existing # timestamps to floats. if isinstance(timestamp, datetime): # We must convert date to local time so Django doesn't mess it up # based on TIME_ZONE if settings.TIME_ZONE: if not timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=timezone.utc) elif timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=None) timestamp = float(timestamp.strftime('%s')) data['timestamp'] = timestamp data['received'] = float(timezone.now().strftime('%s')) data.setdefault('checksum', None) data.setdefault('culprit', None) data.setdefault('dist', None) data.setdefault('environment', None) data.setdefault('extra', {}) data.setdefault('fingerprint', None) data.setdefault('logger', DEFAULT_LOGGER_NAME) data.setdefault('platform', None) data.setdefault('server_name', None) data.setdefault('site', None) data.setdefault('tags', []) data.setdefault('transaction', None) # Fix case where legacy apps pass 'environment' as a tag # instead of a top level key. # TODO (alex) save() just reinserts the environment into the tags if not data.get('environment'): tagsdict = dict(data['tags']) if 'environment' in tagsdict: data['environment'] = tagsdict['environment'] del tagsdict['environment'] data['tags'] = tagsdict.items() # the SDKs currently do not describe event types, and we must infer # them from available attributes data['type'] = eventtypes.infer(data).key data['version'] = self.version exception = data.get('sentry.interfaces.Exception') stacktrace = data.get('sentry.interfaces.Stacktrace') if exception and len(exception['values']) == 1 and stacktrace: exception['values'][0]['stacktrace'] = stacktrace del data['sentry.interfaces.Stacktrace'] # Exception mechanism needs SDK information to resolve proper names in # exception meta (such as signal names). "SDK Information" really means # the operating system version the event was generated on. Some # normalization still works without sdk_info, such as mach_exception # names (they can only occur on macOS). if exception: sdk_info = get_sdk_from_event(data) for ex in exception['values']: if 'mechanism' in ex: normalize_mechanism_meta(ex['mechanism'], sdk_info) # If there is no User ip_addres, update it either from the Http interface # or the client_ip of the request. auth = request_env.get('auth') is_public = auth and auth.is_public add_ip_platforms = ('javascript', 'cocoa', 'objc') http_ip = data.get('sentry.interfaces.Http', {}).get('env', {}).get('REMOTE_ADDR') if http_ip: data.setdefault('sentry.interfaces.User', {}).setdefault('ip_address', http_ip) elif client_ip and (is_public or data.get('platform') in add_ip_platforms): data.setdefault('sentry.interfaces.User', {}).setdefault('ip_address', client_ip) # Trim values data['logger'] = trim(data['logger'].strip(), 64) trim_dict(data['extra'], max_size=settings.SENTRY_MAX_EXTRA_VARIABLE_SIZE) if data['culprit']: data['culprit'] = trim(data['culprit'], MAX_CULPRIT_LENGTH) if data['transaction']: data['transaction'] = trim(data['transaction'], MAX_CULPRIT_LENGTH) return data
class Group(Model): """ Aggregated message which summarizes a set of Events. """ __core__ = False project = FlexibleForeignKey('sentry.Project', null=True) logger = models.CharField(max_length=64, blank=True, default=DEFAULT_LOGGER_NAME, db_index=True) level = BoundedPositiveIntegerField(choices=LOG_LEVELS.items(), default=logging.ERROR, blank=True, db_index=True) message = models.TextField() culprit = models.CharField(max_length=MAX_CULPRIT_LENGTH, blank=True, null=True, db_column='view') num_comments = BoundedPositiveIntegerField(default=0, null=True) platform = models.CharField(max_length=64, null=True) status = BoundedPositiveIntegerField(default=0, choices=( (GroupStatus.UNRESOLVED, _('Unresolved')), (GroupStatus.RESOLVED, _('Resolved')), (GroupStatus.MUTED, _('Muted')), ), db_index=True) times_seen = BoundedPositiveIntegerField(default=1, db_index=True) last_seen = models.DateTimeField(default=timezone.now, db_index=True) first_seen = models.DateTimeField(default=timezone.now, db_index=True) first_release = FlexibleForeignKey('sentry.Release', null=True, on_delete=models.PROTECT) resolved_at = models.DateTimeField(null=True, db_index=True) # active_at should be the same as first_seen by default active_at = models.DateTimeField(null=True, db_index=True) time_spent_total = BoundedIntegerField(default=0) time_spent_count = BoundedIntegerField(default=0) score = BoundedIntegerField(default=0) is_public = models.NullBooleanField(default=False, null=True) data = GzippedDictField(blank=True, null=True) short_id = BoundedBigIntegerField(null=True) objects = GroupManager() class Meta: app_label = 'sentry' db_table = 'sentry_groupedmessage' verbose_name_plural = _('grouped messages') verbose_name = _('grouped message') permissions = (("can_view", "Can view"), ) index_together = (('project', 'first_release'), ) unique_together = (('project', 'short_id'), ) __repr__ = sane_repr('project_id') def __unicode__(self): return "(%s) %s" % (self.times_seen, self.error()) def save(self, *args, **kwargs): if not self.last_seen: self.last_seen = timezone.now() if not self.first_seen: self.first_seen = self.last_seen if not self.active_at: self.active_at = self.first_seen # We limit what we store for the message body self.message = strip(self.message) if self.message: self.message = truncatechars(self.message.splitlines()[0], 255) super(Group, self).save(*args, **kwargs) def get_absolute_url(self): return absolute_uri( reverse('sentry-group', args=[self.organization.slug, self.project.slug, self.id])) @property def qualified_short_id(self): if self.short_id is not None: return '%s-%s' % ( self.project.slug.upper(), base32_encode(self.short_id), ) @property def event_set(self): from sentry.models import Event return Event.objects.filter(group_id=self.id) def is_over_resolve_age(self): resolve_age = self.project.get_option('sentry:resolve_age', None) if not resolve_age: return False return self.last_seen < timezone.now() - timedelta( hours=int(resolve_age)) def is_muted(self): return self.get_status() == GroupStatus.MUTED def is_resolved(self): return self.get_status() == GroupStatus.RESOLVED def get_status(self): # XXX(dcramer): GroupSerializer reimplements this logic from sentry.models import GroupSnooze if self.status == GroupStatus.MUTED: try: snooze = GroupSnooze.objects.get(group=self) except GroupSnooze.DoesNotExist: pass else: # XXX(dcramer): if the snooze row exists then we need # to confirm its still valid if snooze.until > timezone.now(): return GroupStatus.MUTED else: return GroupStatus.UNRESOLVED if self.status == GroupStatus.UNRESOLVED and self.is_over_resolve_age( ): return GroupStatus.RESOLVED return self.status def get_share_id(self): return b16encode( ('{}.{}'.format(self.project_id, self.id)).encode('utf-8')).lower().decode('utf-8') @classmethod def from_share_id(cls, share_id): if not share_id: raise cls.DoesNotExist try: project_id, group_id = b16decode( share_id.upper()).decode('utf-8').split('.') except (ValueError, TypeError): raise cls.DoesNotExist if not (project_id.isdigit() and group_id.isdigit()): raise cls.DoesNotExist return cls.objects.get(project=project_id, id=group_id) def get_score(self): return int( math.log(self.times_seen) * 600 + float(time.mktime(self.last_seen.timetuple()))) def get_latest_event(self): from sentry.models import Event if not hasattr(self, '_latest_event'): latest_events = sorted( Event.objects.filter( group_id=self.id, ).order_by('-datetime')[0:5], key=EVENT_ORDERING_KEY, reverse=True, ) try: self._latest_event = latest_events[0] except IndexError: self._latest_event = None return self._latest_event def get_oldest_event(self): from sentry.models import Event if not hasattr(self, '_oldest_event'): oldest_events = sorted( Event.objects.filter( group_id=self.id, ).order_by('datetime')[0:5], key=EVENT_ORDERING_KEY, ) try: self._oldest_event = oldest_events[0] except IndexError: self._oldest_event = None return self._oldest_event def get_unique_tags(self, tag, since=None, order_by='-times_seen'): # TODO(dcramer): this has zero test coverage and is a critical path from sentry.models import GroupTagValue queryset = GroupTagValue.objects.filter( group=self, key=tag, ) if since: queryset = queryset.filter(last_seen__gte=since) return queryset.values_list( 'value', 'times_seen', 'first_seen', 'last_seen', ).order_by(order_by) def get_tags(self, with_internal=True): from sentry.models import GroupTagKey, TagKey if not hasattr(self, '_tag_cache'): group_tags = GroupTagKey.objects.filter( group=self, project=self.project, ) if not with_internal: group_tags = group_tags.exclude(key__startswith='sentry:') group_tags = list(group_tags.values_list('key', flat=True)) tag_keys = dict( (t.key, t) for t in TagKey.objects.filter(project=self.project, key__in=group_tags)) results = [] for key in group_tags: try: tag_key = tag_keys[key] except KeyError: label = key.replace('_', ' ').title() else: label = tag_key.get_label() results.append({ 'key': key, 'label': label, }) self._tag_cache = sorted(results, key=lambda x: x['label']) return self._tag_cache def get_event_type(self): """ Return the type of this issue. See ``sentry.eventtypes``. """ return self.data.get('type', 'default') def get_event_metadata(self): """ Return the metadata of this issue. See ``sentry.eventtypes``. """ etype = self.data.get('type') if etype is None: etype = 'default' if 'metadata' not in self.data: data = self.data.copy() if self.data else {} data['message'] = self.message return eventtypes.get(etype)(data).get_metadata() return self.data['metadata'] @property def title(self): et = eventtypes.get(self.get_event_type())(self.data) return et.to_string(self.get_event_metadata()) def error(self): warnings.warn('Group.error is deprecated, use Group.title', DeprecationWarning) return self.title error.short_description = _('error') @property def message_short(self): warnings.warn('Group.message_short is deprecated, use Group.title', DeprecationWarning) return self.title def has_two_part_message(self): warnings.warn('Group.has_two_part_message is no longer used', DeprecationWarning) return False @property def organization(self): return self.project.organization @property def team(self): return self.project.team @property def checksum(self): warnings.warn('Group.checksum is no longer used', DeprecationWarning) return '' def get_email_subject(self): return '[%s] %s: %s' % (self.project.get_full_name().encode('utf-8'), six.text_type(self.get_level_display()).upper( ).encode('utf-8'), self.title.encode('utf-8'))
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import from collections import OrderedDict from django import forms from sentry.constants import LOG_LEVELS, LOG_LEVELS_MAP from sentry.rules.conditions.base import EventCondition LEVEL_CHOICES = OrderedDict( [("{0}".format(k), v) for k, v in sorted(LOG_LEVELS.items(), key=lambda x: x[0], reverse=True)] ) class MatchType(object): EQUAL = "eq" LESS_OR_EQUAL = "lte" GREATER_OR_EQUAL = "gte" MATCH_CHOICES = OrderedDict( [ (MatchType.EQUAL, "equal to"), (MatchType.LESS_OR_EQUAL, "less than or equal to"), (MatchType.GREATER_OR_EQUAL, "greater than or equal to"), ]
def digest(request): seed = request.GET.get('seed', str(time.time())) logger.debug('Using random seed value: %s') random = Random(seed) now = datetime.utcnow().replace(tzinfo=pytz.utc) # TODO: Refactor all of these into something more manageable. org = Organization( id=1, slug='example', name='Example Organization', ) team = Team( id=1, slug='example', name='Example Team', organization=org, ) project = Project( id=1, slug='example', name='Example Project', team=team, organization=org, ) rules = {i: Rule( id=i, project=project, label="Rule #%s" % (i,), ) for i in xrange(1, random.randint(2, 4))} state = { 'project': project, 'groups': {}, 'rules': rules, 'event_counts': {}, 'user_counts': {}, } records = [] group_sequence = itertools.count(1) event_sequence = itertools.count(1) for i in xrange(random.randint(1, 30)): group_id = next(group_sequence) culprit = '{module} in {function}'.format( module='.'.join( ''.join(random.sample(WORDS, random.randint(1, int(random.paretovariate(2.2))))) for word in xrange(1, 4) ), function=random.choice(WORDS) ) group = state['groups'][group_id] = Group( id=group_id, project=project, message=words(int(random.weibullvariate(8, 4)), common=False), culprit=culprit, level=random.choice(LOG_LEVELS.keys()), ) offset = timedelta(seconds=0) for i in xrange(random.randint(1, 10)): offset += timedelta(seconds=random.random() * 120) event = Event( id=next(event_sequence), event_id=uuid.uuid4().hex, project=project, group=group, message=group.message, data=load_data('python'), datetime=now - offset, ) records.append( Record( event.event_id, Notification( event, random.sample(state['rules'], random.randint(1, len(state['rules']))), ), to_timestamp(event.datetime), ) ) state['event_counts'][group_id] = random.randint(10, 1e4) state['user_counts'][group_id] = random.randint(10, 1e4) digest = build_digest(project, records, state) start, end, counts = get_digest_metadata(digest) return MailPreview( html_template='sentry/emails/digests/body.html', text_template='sentry/emails/digests/body.txt', context={ 'project': project, 'counts': counts, 'digest': digest, 'start': start, 'end': end, }, ).render()
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import from collections import OrderedDict from django import forms from sentry.constants import LOG_LEVELS, LOG_LEVELS_MAP from sentry.rules.conditions.base import EventCondition LEVEL_CHOICES = OrderedDict([ ("{0}".format(k), "{0}".format(v.capitalize())) for k, v in sorted(LOG_LEVELS.items(), key=lambda x: x[0], reverse=True) ]) class LevelMatchType(object): EQUAL = 'eq' LESS_OR_EQUAL = 'lte' GREATER_OR_EQUAL = 'gte' class LevelEventForm(forms.Form): level = forms.ChoiceField( choices=LEVEL_CHOICES.items(), initial=30) match = forms.ChoiceField( choices=(
def digest(request): seed = request.GET.get('seed', str(time.time())) logger.debug('Using random seed value: %s') random = Random(seed) now = datetime.utcnow().replace(tzinfo=pytz.utc) # TODO: Refactor all of these into something more manageable. org = Organization( id=1, slug='example', name='Example Organization', ) team = Team( id=1, slug='example', name='Example Team', organization=org, ) project = Project( id=1, slug='example', name='Example Project', team=team, organization=org, ) rules = {i: Rule( id=i, project=project, label="Rule #%s" % (i,), ) for i in xrange(1, random.randint(2, 4))} state = { 'project': project, 'groups': {}, 'rules': rules, 'event_counts': {}, 'user_counts': {}, } records = [] group_sequence = itertools.count(1) event_sequence = itertools.count(1) for i in xrange(random.randint(1, 30)): group_id = next(group_sequence) group = state['groups'][group_id] = Group( id=group_id, project=project, message=words(int(random.paretovariate(1.05)), common=False), culprit=words(int(random.paretovariate(1)), common=False), level=random.choice(LOG_LEVELS.keys()), ) offset = timedelta(seconds=0) for i in xrange(random.randint(1, 10)): offset += timedelta(seconds=random.random() * 120) event = Event( id=next(event_sequence), event_id=uuid.uuid4().hex, project=project, group=group, message=group.message, data=load_data('python'), datetime=now - offset, ) records.append( Record( event.event_id, Notification( event, random.sample(state['rules'], random.randint(1, len(state['rules']))), ), to_timestamp(event.datetime), ) ) state['event_counts'][group_id] = random.randint(10, 1e4) state['user_counts'][group_id] = random.randint(10, 1e4) digest = build_digest(project, records, state) # TODO(tkaemming): This duplication from ``MailPlugin.notify_digest`` is a code smell counts = Counter() for rule, groups in digest.iteritems(): counts.update(groups.keys()) return MailPreview( html_template='sentry/emails/digests/body.html', text_template='sentry/emails/digests/body.txt', context={ 'project': project, 'counts': counts, 'digest': digest, }, ).render()
def digest(request): seed = request.GET.get("seed", str(time.time())) logger.debug("Using random seed value: %s") random = Random(seed) now = datetime.utcnow().replace(tzinfo=pytz.utc) # TODO: Refactor all of these into something more manageable. org = Organization(id=1, slug="example", name="Example Organization") team = Team(id=1, slug="example", name="Example Team", organization=org) project = Project(id=1, slug="example", name="Example Project", team=team, organization=org) rules = {i: Rule(id=i, project=project, label="Rule #%s" % (i,)) for i in xrange(1, random.randint(2, 4))} state = {"project": project, "groups": {}, "rules": rules, "event_counts": {}, "user_counts": {}} records = [] group_sequence = itertools.count(1) event_sequence = itertools.count(1) for i in xrange(random.randint(1, 30)): group_id = next(group_sequence) culprit = "{module} in {function}".format( module=".".join( "".join(random.sample(WORDS, random.randint(1, int(random.paretovariate(2.2))))) for word in xrange(1, 4) ), function=random.choice(WORDS), ) group = state["groups"][group_id] = Group( id=group_id, project=project, message=words(int(random.weibullvariate(8, 4)), common=False), culprit=culprit, level=random.choice(LOG_LEVELS.keys()), ) offset = timedelta(seconds=0) for i in xrange(random.randint(1, 10)): offset += timedelta(seconds=random.random() * 120) event = Event( id=next(event_sequence), event_id=uuid.uuid4().hex, project=project, group=group, message=group.message, data=load_data("python"), datetime=now - offset, ) records.append( Record( event.event_id, Notification(event, random.sample(state["rules"], random.randint(1, len(state["rules"])))), to_timestamp(event.datetime), ) ) state["event_counts"][group_id] = random.randint(10, 1e4) state["user_counts"][group_id] = random.randint(10, 1e4) digest = build_digest(project, records, state) start, end, counts = get_digest_metadata(digest) return MailPreview( html_template="sentry/emails/digests/body.html", text_template="sentry/emails/digests/body.txt", context={"project": project, "counts": counts, "digest": digest, "start": start, "end": end}, ).render()
class Event(Model): """ An individual event. """ group = models.ForeignKey('sentry.Group', blank=True, null=True, related_name="event_set") event_id = models.CharField(max_length=32, null=True, db_column="message_id") project = models.ForeignKey('sentry.Project', null=True) logger = models.CharField( max_length=64, blank=True, default='root', db_index=True) level = BoundedPositiveIntegerField( choices=LOG_LEVELS.items(), default=logging.ERROR, blank=True, db_index=True) message = models.TextField() culprit = models.CharField( max_length=MAX_CULPRIT_LENGTH, blank=True, null=True, db_column='view') checksum = models.CharField(max_length=32, db_index=True) num_comments = BoundedPositiveIntegerField(default=0, null=True) platform = models.CharField(max_length=64, null=True) datetime = models.DateTimeField(default=timezone.now, db_index=True) time_spent = BoundedIntegerField(null=True) server_name = models.CharField(max_length=128, db_index=True, null=True) site = models.CharField(max_length=128, db_index=True, null=True) data = NodeField(blank=True, null=True) objects = BaseManager() class Meta: app_label = 'sentry' db_table = 'sentry_message' verbose_name = _('message') verbose_name_plural = _('messages') unique_together = ('project', 'event_id') __repr__ = sane_repr('project_id', 'group_id', 'checksum') def error(self): message = strip(self.message) if not message: message = '<unlabeled message>' else: message = truncatechars(message.splitlines()[0], 100) return message error.short_description = _('error') def has_two_part_message(self): message = strip(self.message) return '\n' in message or len(message) > 100 def message_top(self): culprit = strip(self.culprit) if culprit: return culprit return self.error() @property def team(self): return self.project.team @memoize def ip_address(self): http_data = self.data.get('sentry.interfaces.Http') if http_data and 'env' in http_data: value = http_data['env'].get('REMOTE_ADDR') if value: return value user_data = self.data.get('sentry.interfaces.User') if user_data: value = user_data.get('ip_address') if value: return value return None @memoize def user_ident(self): """ The identifier from a user is considered from several interfaces. In order: - User.id - User.email - User.username - Http.env.REMOTE_ADDR """ user_data = self.data.get('sentry.interfaces.User') if user_data: ident = user_data.get('id') if ident: return 'id:%s' % (ident,) ident = user_data.get('email') if ident: return 'email:%s' % (ident,) ident = user_data.get('username') if ident: return 'username:%s' % (ident,) ident = self.ip_address if ident: return 'ip:%s' % (ident,) return None @memoize def interfaces(self): result = [] for key, data in self.data.iteritems(): if '.' not in key: continue try: cls = import_string(key) except ImportError: continue # suppress invalid interfaces value = safe_execute(cls, **data) if not value: continue result.append((key, value)) return SortedDict((k, v) for k, v in sorted(result, key=lambda x: x[1].get_score(), reverse=True)) def get_version(self): if not self.data: return if '__sentry__' not in self.data: return if 'version' not in self.data['__sentry__']: return module = self.data['__sentry__'].get('module', 'ver') return module, self.data['__sentry__']['version'] def get_tags(self): try: return [ (t, v) for t, v in self.data.get('tags') or () if not t.startswith('sentry:') ] except ValueError: # at one point Sentry allowed invalid tag sets such as (foo, bar) # vs ((tag, foo), (tag, bar)) return [] def as_dict(self): # We use a SortedDict to keep elements ordered for a potential JSON serializer data = SortedDict() data['id'] = self.event_id data['checksum'] = self.checksum data['project'] = self.project.slug data['logger'] = self.logger data['level'] = self.get_level_display() data['culprit'] = self.culprit data['datetime'] = self.datetime data['time_spent'] = self.time_spent for k, v in sorted(self.data.iteritems()): data[k] = v return data @property def size(self): return len(unicode(vars(self)))
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import from collections import OrderedDict from django import forms from sentry.constants import LOG_LEVELS from sentry.rules.conditions.base import EventCondition LEVEL_CHOICES = OrderedDict([ ("{0}".format(k), "{0}".format(v.capitalize())) for k, v in sorted(LOG_LEVELS.items(), key=lambda x: x[0], reverse=True) ]) LOG_LEVEL_REVERSE_MAP = dict((v, k) for k, v in LOG_LEVELS.iteritems()) class LevelMatchType(object): EQUAL = 'eq' LESS_OR_EQUAL = 'lte' GREATER_OR_EQUAL = 'gte' class LevelEventForm(forms.Form): level = forms.ChoiceField(choices=LEVEL_CHOICES.items(), initial=30) match = forms.ChoiceField(choices=((LevelMatchType.EQUAL, 'equal'), (LevelMatchType.LESS_OR_EQUAL, 'less than or equal to'),
:license: BSD, see LICENSE for more details. """ from __future__ import absolute_import from collections import OrderedDict from django import forms from sentry.constants import LOG_LEVELS from sentry.rules.conditions.base import EventCondition LEVEL_CHOICES = OrderedDict( [ ("{0}".format(k), "{0}".format(v.capitalize())) for k, v in sorted(LOG_LEVELS.items(), key=lambda x: x[0], reverse=True) ] ) LOG_LEVEL_REVERSE_MAP = dict((v, k) for k, v in LOG_LEVELS.iteritems()) class LevelMatchType(object): EQUAL = "eq" LESS_OR_EQUAL = "lte" GREATER_OR_EQUAL = "gte" class LevelEventForm(forms.Form): level = forms.ChoiceField(choices=LEVEL_CHOICES.items(), initial=30) match = forms.ChoiceField( choices=(
from sentry.app import cache, env from sentry.constants import DEFAULT_LOG_LEVEL, LOG_LEVELS, MAX_TAG_VALUE_LENGTH, MAX_TAG_KEY_LENGTH from sentry.exceptions import InvalidTimestamp from sentry.interfaces.base import get_interface from sentry.models import Project, ProjectKey from sentry.tasks.store import preprocess_event from sentry.utils import is_float, json from sentry.utils.auth import parse_auth_header from sentry.utils.compat import StringIO from sentry.utils.strings import decompress logger = logging.getLogger("sentry.coreapi") LOG_LEVEL_REVERSE_MAP = dict((v, k) for k, v in LOG_LEVELS.iteritems()) RESERVED_FIELDS = ( "project", "event_id", "message", "checksum", "culprit", "level", "time_spent", "logger", "server_name", "site", "timestamp", "extra", "modules",
class Group(Model): """ Aggregated message which summarizes a set of Events. """ __core__ = False project = FlexibleForeignKey('sentry.Project', null=True) logger = models.CharField(max_length=64, blank=True, default=DEFAULT_LOGGER_NAME, db_index=True) level = BoundedPositiveIntegerField(choices=LOG_LEVELS.items(), default=logging.ERROR, blank=True, db_index=True) message = models.TextField() culprit = models.CharField(max_length=MAX_CULPRIT_LENGTH, blank=True, null=True, db_column='view') num_comments = BoundedPositiveIntegerField(default=0, null=True) platform = models.CharField(max_length=64, null=True) status = BoundedPositiveIntegerField(default=0, choices=( (GroupStatus.UNRESOLVED, _('Unresolved')), (GroupStatus.RESOLVED, _('Resolved')), (GroupStatus.MUTED, _('Muted')), ), db_index=True) times_seen = BoundedPositiveIntegerField(default=1, db_index=True) last_seen = models.DateTimeField(default=timezone.now, db_index=True) first_seen = models.DateTimeField(default=timezone.now, db_index=True) first_release = FlexibleForeignKey('sentry.Release', null=True) resolved_at = models.DateTimeField(null=True, db_index=True) # active_at should be the same as first_seen by default active_at = models.DateTimeField(null=True, db_index=True) time_spent_total = BoundedIntegerField(default=0) time_spent_count = BoundedIntegerField(default=0) score = BoundedIntegerField(default=0) is_public = models.NullBooleanField(default=False, null=True) data = GzippedDictField(blank=True, null=True) objects = GroupManager() class Meta: app_label = 'sentry' db_table = 'sentry_groupedmessage' verbose_name_plural = _('grouped messages') verbose_name = _('grouped message') permissions = (("can_view", "Can view"), ) index_together = (('project', 'first_release'), ) __repr__ = sane_repr('project_id') def __unicode__(self): return "(%s) %s" % (self.times_seen, self.error()) def save(self, *args, **kwargs): if not self.last_seen: self.last_seen = timezone.now() if not self.first_seen: self.first_seen = self.last_seen if not self.active_at: self.active_at = self.first_seen if self.message: # We limit what we store for the message body self.message = self.message.splitlines()[0][:255] super(Group, self).save(*args, **kwargs) def get_absolute_url(self): return absolute_uri( reverse('sentry-group', args=[self.organization.slug, self.project.slug, self.id])) @property def avg_time_spent(self): if not self.time_spent_count: return return float(self.time_spent_total) / self.time_spent_count def is_over_resolve_age(self): resolve_age = self.project.get_option('sentry:resolve_age', None) if not resolve_age: return False return self.last_seen < timezone.now() - timedelta( hours=int(resolve_age)) def is_muted(self): return self.get_status() == GroupStatus.MUTED def is_resolved(self): return self.get_status() == GroupStatus.RESOLVED def get_status(self): if self.status == GroupStatus.UNRESOLVED and self.is_over_resolve_age( ): return GroupStatus.RESOLVED return self.status def get_share_id(self): return b16encode('{}.{}'.format(self.project_id, self.id)).lower() @classmethod def from_share_id(cls, share_id): try: project_id, group_id = b16decode(share_id.upper()).split('.') except ValueError: raise cls.DoesNotExist return cls.objects.get(project=project_id, id=group_id) def get_score(self): return int( math.log(self.times_seen) * 600 + float(time.mktime(self.last_seen.timetuple()))) def get_latest_event(self): from sentry.models import Event if not hasattr(self, '_latest_event'): try: self._latest_event = Event.objects.filter( group=self, ).order_by('-datetime')[0] except IndexError: self._latest_event = None return self._latest_event def get_unique_tags(self, tag, since=None, order_by='-times_seen'): # TODO(dcramer): this has zero test coverage and is a critical path from sentry.models import GroupTagValue queryset = GroupTagValue.objects.filter( group=self, key=tag, ) if since: queryset = queryset.filter(last_seen__gte=since) return queryset.values_list( 'value', 'times_seen', 'first_seen', 'last_seen', ).order_by(order_by) def get_tags(self, with_internal=True): from sentry.models import GroupTagKey, TagKey if not hasattr(self, '_tag_cache'): group_tags = GroupTagKey.objects.filter( group=self, project=self.project, ) if not with_internal: group_tags = group_tags.exclude(key__startswith='sentry:') group_tags = list(group_tags.values_list('key', flat=True)) tag_keys = dict( (t.key, t) for t in TagKey.objects.filter(project=self.project, key__in=group_tags)) results = [] for key in group_tags: try: tag_key = tag_keys[key] except KeyError: label = key.replace('_', ' ').title() else: label = tag_key.get_label() results.append({ 'key': key, 'label': label, }) self._tag_cache = sorted(results, key=lambda x: x['label']) return self._tag_cache def error(self): return self.message error.short_description = _('error') def has_two_part_message(self): message = strip(self.message) return '\n' in message or len(message) > 100 @property def title(self): culprit = strip(self.culprit) if culprit: return culprit return self.message @property def message_short(self): message = strip(self.message) if not message: message = '<unlabeled message>' else: message = truncatechars(message.splitlines()[0], 100) return message @property def organization(self): return self.project.organization @property def team(self): return self.project.team @property def checksum(self): warnings.warn('Group.checksum is no longer used', DeprecationWarning) return '' def get_email_subject(self): return '[%s] %s: %s' % ( self.project.get_full_name().encode('utf-8'), six.text_type(self.get_level_display()).upper().encode('utf-8'), self.message_short.encode('utf-8'))
def serialize(self, obj, attrs, user): status = obj.status status_details = {} if attrs['ignore_until']: snooze = attrs['ignore_until'] if snooze.is_valid(group=obj): # counts return the delta remaining when window is not set status_details.update({ 'ignoreCount': (snooze.count - (obj.times_seen - snooze.state['times_seen']) if snooze.count and not snooze.window else snooze.count), 'ignoreUntil': snooze.until, 'ignoreUserCount': (snooze.user_count - (attrs['user_count'] - snooze.state['users_seen']) if snooze.user_count and not snooze.user_window else snooze.user_count), 'ignoreUserWindow': snooze.user_window, 'ignoreWindow': snooze.window, 'actor': attrs['ignore_actor'], }) else: status = GroupStatus.UNRESOLVED if status == GroupStatus.UNRESOLVED and obj.is_over_resolve_age(): status = GroupStatus.RESOLVED status_details['autoResolved'] = True if status == GroupStatus.RESOLVED: status_label = 'resolved' if attrs['resolution_type'] == 'release': res_type, res_version, _ = attrs['resolution'] if res_type in (GroupResolution.Type.in_next_release, None): status_details['inNextRelease'] = True elif res_type == GroupResolution.Type.in_release: status_details['inRelease'] = res_version status_details['actor'] = attrs['resolution_actor'] elif attrs['resolution_type'] == 'commit': status_details['inCommit'] = attrs['resolution'] elif status == GroupStatus.IGNORED: status_label = 'ignored' elif status in [ GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS ]: status_label = 'pending_deletion' elif status == GroupStatus.PENDING_MERGE: status_label = 'pending_merge' else: status_label = 'unresolved' # If user is not logged in and member of the organization, # do not return the permalink which contains private information i.e. org name. request = env.request is_superuser = (request and is_active_superuser(request) and request.user == user) if is_superuser or (user.is_authenticated() and user.get_orgs().filter( id=obj.organization.id).exists()): permalink = obj.get_absolute_url() else: permalink = None subscription_details = None if attrs['subscription'] is not disabled: is_subscribed, subscription = attrs['subscription'] if subscription is not None and subscription.is_active: subscription_details = { 'reason': SUBSCRIPTION_REASON_MAP.get( subscription.reason, 'unknown', ), } else: is_subscribed = False subscription_details = { 'disabled': True, } share_id = attrs['share_id'] return { 'id': six.text_type(obj.id), 'shareId': share_id, 'shortId': obj.qualified_short_id, 'count': six.text_type(attrs['times_seen']), 'userCount': attrs['user_count'], 'title': obj.title, 'culprit': obj.culprit, 'permalink': permalink, 'firstSeen': attrs['first_seen'], 'lastSeen': attrs['last_seen'], 'logger': obj.logger or None, 'level': LOG_LEVELS.get(obj.level, 'unknown'), 'status': status_label, 'statusDetails': status_details, 'isPublic': share_id is not None, 'platform': obj.platform, 'project': { 'id': six.text_type(obj.project.id), 'name': obj.project.name, 'slug': obj.project.slug, 'platform': obj.project.platform, }, 'type': obj.get_event_type(), 'metadata': obj.get_event_metadata(), 'numComments': obj.num_comments, 'assignedTo': serialize(attrs['assigned_to'], user, ActorSerializer()), 'isBookmarked': attrs['is_bookmarked'], 'isSubscribed': is_subscribed, 'subscriptionDetails': subscription_details, 'hasSeen': attrs['has_seen'], 'annotations': attrs['annotations'], }
def serialize(self, obj, attrs, user): status = obj.status status_details = {} if attrs['ignore_until']: snooze = attrs['ignore_until'] if snooze.is_valid(group=obj): # counts return the delta remaining when window is not set status_details.update( { 'ignoreCount': ( snooze.count - (obj.times_seen - snooze.state['times_seen']) if snooze.count and not snooze.window else snooze.count ), 'ignoreUntil': snooze.until, 'ignoreUserCount': ( snooze.user_count - (attrs['user_count'] - snooze.state['users_seen']) if snooze.user_count and not snooze.user_window else snooze.user_count ), 'ignoreUserWindow': snooze.user_window, 'ignoreWindow': snooze.window, 'actor': attrs['ignore_actor'], } ) else: status = GroupStatus.UNRESOLVED if status == GroupStatus.UNRESOLVED and obj.is_over_resolve_age(): status = GroupStatus.RESOLVED status_details['autoResolved'] = True if status == GroupStatus.RESOLVED: status_label = 'resolved' if attrs['resolution_type'] == 'release': res_type, res_version, _ = attrs['resolution'] if res_type in (GroupResolution.Type.in_next_release, None): status_details['inNextRelease'] = True elif res_type == GroupResolution.Type.in_release: status_details['inRelease'] = res_version status_details['actor'] = attrs['resolution_actor'] elif attrs['resolution_type'] == 'commit': status_details['inCommit'] = attrs['resolution'] elif status == GroupStatus.IGNORED: status_label = 'ignored' elif status in [GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS]: status_label = 'pending_deletion' elif status == GroupStatus.PENDING_MERGE: status_label = 'pending_merge' else: status_label = 'unresolved' # If user is not logged in and member of the organization, # do not return the permalink which contains private information i.e. org name. if user.is_authenticated() and user.get_orgs().filter(id=obj.organization.id).exists(): permalink = absolute_uri( reverse('sentry-group', args=[obj.organization.slug, obj.project.slug, obj.id]) ) else: permalink = None subscription_details = None if attrs['subscription'] is not disabled: is_subscribed, subscription = attrs['subscription'] if subscription is not None and subscription.is_active: subscription_details = { 'reason': SUBSCRIPTION_REASON_MAP.get( subscription.reason, 'unknown', ), } else: is_subscribed = False subscription_details = { 'disabled': True, } share_id = attrs['share_id'] return { 'id': six.text_type(obj.id), 'shareId': share_id, 'shortId': obj.qualified_short_id, 'count': six.text_type(attrs['times_seen']), 'userCount': attrs['user_count'], 'title': obj.title, 'culprit': obj.culprit, 'permalink': permalink, 'firstSeen': attrs['first_seen'], 'lastSeen': attrs['last_seen'], 'logger': obj.logger or None, 'level': LOG_LEVELS.get(obj.level, 'unknown'), 'status': status_label, 'statusDetails': status_details, 'isPublic': share_id is not None, 'project': { 'id': six.text_type(obj.project.id), 'name': obj.project.name, 'slug': obj.project.slug, }, 'type': obj.get_event_type(), 'metadata': obj.get_event_metadata(), 'numComments': obj.num_comments, 'assignedTo': serialize(attrs['assigned_to'], user, ActorSerializer()), 'isBookmarked': attrs['is_bookmarked'], 'isSubscribed': is_subscribed, 'subscriptionDetails': subscription_details, 'hasSeen': attrs['has_seen'], 'annotations': attrs['annotations'], }
class Group(Model): """ Aggregated message which summarizes a set of Events. """ __include_in_export__ = False project = FlexibleForeignKey("sentry.Project") logger = models.CharField(max_length=64, blank=True, default=str(DEFAULT_LOGGER_NAME), db_index=True) level = BoundedPositiveIntegerField( choices=[(key, str(val)) for key, val in sorted(LOG_LEVELS.items())], default=logging.ERROR, blank=True, db_index=True, ) message = models.TextField() culprit = models.CharField(max_length=MAX_CULPRIT_LENGTH, blank=True, null=True, db_column="view") num_comments = BoundedPositiveIntegerField(default=0, null=True) platform = models.CharField(max_length=64, null=True) status = BoundedPositiveIntegerField( default=0, choices=( (GroupStatus.UNRESOLVED, _("Unresolved")), (GroupStatus.RESOLVED, _("Resolved")), (GroupStatus.IGNORED, _("Ignored")), ), db_index=True, ) times_seen = BoundedPositiveIntegerField(default=1, db_index=True) last_seen = models.DateTimeField(default=timezone.now, db_index=True) first_seen = models.DateTimeField(default=timezone.now, db_index=True) first_release = FlexibleForeignKey("sentry.Release", null=True, on_delete=models.PROTECT) resolved_at = models.DateTimeField(null=True, db_index=True) # active_at should be the same as first_seen by default active_at = models.DateTimeField(null=True, db_index=True) time_spent_total = BoundedIntegerField(default=0) time_spent_count = BoundedIntegerField(default=0) score = BoundedIntegerField(default=0) # deprecated, do not use. GroupShare has superseded is_public = models.NullBooleanField(default=False, null=True) data = GzippedDictField(blank=True, null=True) short_id = BoundedBigIntegerField(null=True) objects = GroupManager(cache_fields=("id", )) class Meta: app_label = "sentry" db_table = "sentry_groupedmessage" verbose_name_plural = _("grouped messages") verbose_name = _("grouped message") permissions = (("can_view", "Can view"), ) index_together = [ ("project", "first_release"), ("project", "id"), ("project", "status", "last_seen", "id"), ] unique_together = ( ("project", "short_id"), ("project", "id"), ) __repr__ = sane_repr("project_id") def __str__(self): return f"({self.times_seen}) {self.error()}" def save(self, *args, **kwargs): if not self.last_seen: self.last_seen = timezone.now() if not self.first_seen: self.first_seen = self.last_seen if not self.active_at: self.active_at = self.first_seen # We limit what we store for the message body self.message = strip(self.message) if self.message: self.message = truncatechars(self.message.splitlines()[0], 255) if self.times_seen is None: self.times_seen = 1 self.score = type(self).calculate_score(times_seen=self.times_seen, last_seen=self.last_seen) super().save(*args, **kwargs) def get_absolute_url( self, params: Mapping[str, str] | None = None, event_id: int | None = None, organization_slug: str | None = None, ) -> str: # Built manually in preference to django.urls.reverse, # because reverse has a measured performance impact. event_path = f"events/{event_id}/" if event_id else "" url = "organizations/{org}/issues/{id}/{event_path}{params}".format( # Pass organization_slug if this needs to be called multiple times to avoid n+1 queries org=urlquote(self.organization.slug if organization_slug is None else organization_slug), id=self.id, event_path=event_path, params="?" + urlencode(params) if params else "", ) return absolute_uri(url) @property def qualified_short_id(self): if self.short_id is not None: return f"{self.project.slug.upper()}-{base32_encode(self.short_id)}" def is_over_resolve_age(self): resolve_age = self.project.get_option("sentry:resolve_age", None) if not resolve_age: return False return self.last_seen < timezone.now() - timedelta( hours=int(resolve_age)) def is_ignored(self): return self.get_status() == GroupStatus.IGNORED def is_unresolved(self): return self.get_status() == GroupStatus.UNRESOLVED # TODO(dcramer): remove in 9.0 / after plugins no long ref is_muted = is_ignored def is_resolved(self): return self.get_status() == GroupStatus.RESOLVED def get_status(self): # XXX(dcramer): GroupSerializer reimplements this logic from sentry.models import GroupSnooze status = self.status if status == GroupStatus.IGNORED: try: snooze = GroupSnooze.objects.get_from_cache(group=self) except GroupSnooze.DoesNotExist: pass else: if not snooze.is_valid(group=self): status = GroupStatus.UNRESOLVED if status == GroupStatus.UNRESOLVED and self.is_over_resolve_age(): return GroupStatus.RESOLVED return status def get_share_id(self): from sentry.models import GroupShare try: return GroupShare.objects.filter(group_id=self.id).values_list( "uuid", flat=True)[0] except IndexError: # Otherwise it has not been shared yet. return None def get_score(self): return type(self).calculate_score(self.times_seen, self.last_seen) def get_latest_event(self) -> Event | None: if not hasattr(self, "_latest_event"): self._latest_event = self.get_latest_event_for_environments() return self._latest_event def get_latest_event_for_environments(self, environments=()): return get_oldest_or_latest_event_for_environments( EventOrdering.LATEST, environments=environments, issue_id=self.id, project_id=self.project_id, ) def get_oldest_event_for_environments(self, environments=()): return get_oldest_or_latest_event_for_environments( EventOrdering.OLDEST, environments=environments, issue_id=self.id, project_id=self.project_id, ) def _get_cache_key(self, project_id, group_id, first): return f"g-r:{group_id}-{project_id}-{first}" def __get_release(self, project_id, group_id, first=True, use_cache=True): from sentry.models import GroupRelease, Release orderby = "first_seen" if first else "-last_seen" cache_key = self._get_cache_key(project_id, group_id, first) try: release_version = cache.get(cache_key) if use_cache else None if release_version is None: release_version = Release.objects.get( id__in=GroupRelease.objects.filter(group_id=group_id). order_by(orderby).values("release_id")[:1]).version cache.set(cache_key, release_version, 3600) elif release_version is False: release_version = None return release_version except Release.DoesNotExist: cache.set(cache_key, False, 3600) return None def get_first_release(self): if self.first_release_id is None: first_release = self.__get_release(self.project_id, self.id, True) return first_release return self.first_release.version def get_last_release(self, use_cache=True): return self.__get_release(self.project_id, self.id, False, use_cache=use_cache) def get_event_type(self): """ Return the type of this issue. See ``sentry.eventtypes``. """ return self.data.get("type", "default") def get_event_metadata(self) -> Mapping[str, str]: """ Return the metadata of this issue. See ``sentry.eventtypes``. """ return self.data["metadata"] @property def title(self) -> str: et = eventtypes.get(self.get_event_type())() return et.get_title(self.get_event_metadata()) def location(self): et = eventtypes.get(self.get_event_type())() return et.get_location(self.get_event_metadata()) def error(self): warnings.warn("Group.error is deprecated, use Group.title", DeprecationWarning) return self.title error.short_description = _("error") @property def message_short(self): warnings.warn("Group.message_short is deprecated, use Group.title", DeprecationWarning) return self.title @property def organization(self): return self.project.organization @property def checksum(self): warnings.warn("Group.checksum is no longer used", DeprecationWarning) return "" def get_email_subject(self): return f"{self.qualified_short_id} - {self.title}" def count_users_seen(self): return tagstore.get_groups_user_counts([self.project_id], [self.id], environment_ids=None, start=self.first_seen)[self.id] @classmethod def calculate_score(cls, times_seen, last_seen): return math.log(float(times_seen or 1)) * 600 + float( last_seen.strftime("%s")) @staticmethod def issues_mapping(group_ids, project_ids, organization): """Create a dictionary of group_ids to their qualified_short_ids""" return { i.id: i.qualified_short_id for i in Group.objects.filter(id__in=group_ids, project_id__in=project_ids, project__organization=organization) } def get_assignee(self) -> Team | User | None: from sentry.models import GroupAssignee try: group_assignee = GroupAssignee.objects.get(group=self) except GroupAssignee.DoesNotExist: return None assigned_actor = group_assignee.assigned_actor() try: return assigned_actor.resolve() except assigned_actor.type.DoesNotExist: return None
from __future__ import annotations from collections import OrderedDict from typing import Any, Callable, Tuple from django import forms from sentry.constants import LOG_LEVELS, LOG_LEVELS_MAP from sentry.eventstore.models import Event from sentry.rules import LEVEL_MATCH_CHOICES as MATCH_CHOICES from sentry.rules import EventState, MatchType from sentry.rules.conditions.base import EventCondition key: Callable[[Tuple[int, str]], int] = lambda x: x[0] LEVEL_CHOICES = OrderedDict([ (f"{k}", v) for k, v in sorted(LOG_LEVELS.items(), key=key, reverse=True) ]) class LevelEventForm(forms.Form): # type: ignore level = forms.ChoiceField(choices=list(LEVEL_CHOICES.items())) match = forms.ChoiceField(choices=list(MATCH_CHOICES.items())) class LevelCondition(EventCondition): form_cls = LevelEventForm label = "The event's level is {match} {level}" form_fields = { "level": { "type": "choice", "choices": list(LEVEL_CHOICES.items())
def normalize(self, request_env=None): request_env = request_env or {} data = self.data errors = data['errors'] = [] # Ignore event meta data for now. data.pop('_meta', None) # Before validating with a schema, attempt to cast values to their desired types # so that the schema doesn't have to take every type variation into account. text = six.text_type fp_types = six.string_types + six.integer_types + (float, ) def to_values(v): return {'values': v} if v and isinstance(v, (tuple, list)) else v def stringify(f): if isinstance(f, float): return text(int(f)) if abs(f) < (1 << 53) else None return text(f) casts = { 'environment': lambda v: text(v) if v is not None else v, 'fingerprint': lambda v: list(x for x in map(stringify, v) if x is not None) if isinstance(v, list) and all(isinstance(f, fp_types) for f in v) else v, 'release': lambda v: text(v) if v is not None else v, 'dist': lambda v: text(v).strip() if v is not None else v, 'time_spent': lambda v: int(v) if v is not None else v, 'tags': lambda v: [(text(v_k).replace(' ', '-').strip(), text(v_v).strip()) for (v_k, v_v) in dict(v).items()], 'timestamp': lambda v: process_timestamp(v), 'platform': lambda v: v if v in VALID_PLATFORMS else 'other', 'logentry': lambda v: v if isinstance(v, dict) else {'message': v}, # These can be sent as lists and need to be converted to {'values': [...]} 'exception': to_values, 'breadcrumbs': to_values, 'threads': to_values, } for c in casts: if c in data: try: data[c] = casts[c](data[c]) except InvalidTimestamp as it: errors.append({'type': it.args[0], 'name': c, 'value': data[c]}) del data[c] except Exception as e: errors.append({'type': EventError.INVALID_DATA, 'name': c, 'value': data[c]}) del data[c] # raw 'message' is coerced to the Message interface, as its used for pure index of # searchable strings. If both a raw 'message' and a Message interface exist, try and # add the former as the 'formatted' attribute of the latter. # See GH-3248 msg_str = data.pop('message', None) if msg_str: msg_if = data.get('logentry') msg_meta = data.get('_meta', {}).get('message') if not msg_if: msg_if = data['logentry'] = {'message': msg_str} if msg_meta: data.setdefault('_meta', {}).setdefault('logentry', {})['message'] = msg_meta if msg_if.get('message') != msg_str: if not msg_if.get('formatted'): msg_if['formatted'] = msg_str if msg_meta: data.setdefault('_meta', {}).setdefault( 'logentry', {})['formatted'] = msg_meta # Fill in ip addresses marked as {{auto}} client_ip = request_env.get('client_ip') if client_ip: if get_path(data, ['sentry.interfaces.Http', 'env', 'REMOTE_ADDR']) == '{{auto}}': data['sentry.interfaces.Http']['env']['REMOTE_ADDR'] = client_ip if get_path(data, ['request', 'env', 'REMOTE_ADDR']) == '{{auto}}': data['request']['env']['REMOTE_ADDR'] = client_ip if get_path(data, ['sentry.interfaces.User', 'ip_address']) == '{{auto}}': data['sentry.interfaces.User']['ip_address'] = client_ip if get_path(data, ['user', 'ip_address']) == '{{auto}}': data['user']['ip_address'] = client_ip # Validate main event body and tags against schema. # XXX(ja): jsonschema does not like CanonicalKeyDict, so we need to pass # in the inner data dict. is_valid, event_errors = validate_and_default_interface(data.data, 'event') errors.extend(event_errors) if 'tags' in data: is_valid, tag_errors = validate_and_default_interface(data['tags'], 'tags', name='tags') errors.extend(tag_errors) # Validate interfaces for k in list(iter(data)): if k in CLIENT_RESERVED_ATTRS: continue value = data.pop(k) if not value: self.logger.debug('Ignored empty interface value: %s', k) continue try: interface = get_interface(k) except ValueError: self.logger.debug('Ignored unknown attribute: %s', k) errors.append({'type': EventError.INVALID_ATTRIBUTE, 'name': k}) continue try: inst = interface.to_python(value) data[inst.get_path()] = inst.to_json() except Exception as e: log = self.logger.debug if isinstance( e, InterfaceValidationError) else self.logger.error log('Discarded invalid value for interface: %s (%r)', k, value, exc_info=True) errors.append({'type': EventError.INVALID_DATA, 'name': k, 'value': value}) # Additional data coercion and defaulting level = data.get('level') or DEFAULT_LOG_LEVEL if isinstance(level, int) or (isinstance(level, six.string_types) and level.isdigit()): level = LOG_LEVELS.get(int(level), DEFAULT_LOG_LEVEL) data['level'] = LOG_LEVELS_MAP.get(level, LOG_LEVELS_MAP[DEFAULT_LOG_LEVEL]) if data.get('dist') and not data.get('release'): data['dist'] = None timestamp = data.get('timestamp') if not timestamp: timestamp = timezone.now() # TODO (alex) can this all be replaced by utcnow? # it looks like the only time that this would even be hit is when timestamp # is not defined, as the earlier process_timestamp already converts existing # timestamps to floats. if isinstance(timestamp, datetime): # We must convert date to local time so Django doesn't mess it up # based on TIME_ZONE if settings.TIME_ZONE: if not timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=timezone.utc) elif timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=None) timestamp = float(timestamp.strftime('%s')) data['timestamp'] = timestamp data['received'] = float(timezone.now().strftime('%s')) data.setdefault('checksum', None) data.setdefault('culprit', None) data.setdefault('dist', None) data.setdefault('environment', None) data.setdefault('extra', {}) data.setdefault('fingerprint', None) data.setdefault('logger', DEFAULT_LOGGER_NAME) data.setdefault('platform', None) data.setdefault('server_name', None) data.setdefault('site', None) data.setdefault('tags', []) data.setdefault('transaction', None) # Fix case where legacy apps pass 'environment' as a tag # instead of a top level key. # TODO (alex) save() just reinserts the environment into the tags if not data.get('environment'): tagsdict = dict(data['tags']) if 'environment' in tagsdict: data['environment'] = tagsdict['environment'] del tagsdict['environment'] data['tags'] = tagsdict.items() # the SDKs currently do not describe event types, and we must infer # them from available attributes data['type'] = eventtypes.infer(data).key data['version'] = self.version exception = data.get('sentry.interfaces.Exception') stacktrace = data.get('sentry.interfaces.Stacktrace') if exception and len(exception['values']) == 1 and stacktrace: exception['values'][0]['stacktrace'] = stacktrace del data['sentry.interfaces.Stacktrace'] # Exception mechanism needs SDK information to resolve proper names in # exception meta (such as signal names). "SDK Information" really means # the operating system version the event was generated on. Some # normalization still works without sdk_info, such as mach_exception # names (they can only occur on macOS). if exception: sdk_info = get_sdk_from_event(data) for ex in exception['values']: if 'mechanism' in ex: normalize_mechanism_meta(ex['mechanism'], sdk_info) # If there is no User ip_addres, update it either from the Http interface # or the client_ip of the request. auth = request_env.get('auth') is_public = auth and auth.is_public add_ip_platforms = ('javascript', 'cocoa', 'objc') http_ip = data.get('sentry.interfaces.Http', {}).get('env', {}).get('REMOTE_ADDR') if http_ip: data.setdefault('sentry.interfaces.User', {}).setdefault('ip_address', http_ip) elif client_ip and (is_public or data.get('platform') in add_ip_platforms): data.setdefault('sentry.interfaces.User', {}).setdefault('ip_address', client_ip) # Trim values data['logger'] = trim(data['logger'].strip(), 64) trim_dict(data['extra'], max_size=settings.SENTRY_MAX_EXTRA_VARIABLE_SIZE) if data['culprit']: data['culprit'] = trim(data['culprit'], MAX_CULPRIT_LENGTH) if data['transaction']: data['transaction'] = trim(data['transaction'], MAX_CULPRIT_LENGTH) return data
def serialize(self, obj, attrs, user): status = obj.status status_details = {} if attrs['snooze']: if attrs['snooze'] < timezone.now( ) and status == GroupStatus.MUTED: status = GroupStatus.UNRESOLVED else: status_details['snoozeUntil'] = attrs['snooze'] elif status == GroupStatus.UNRESOLVED and obj.is_over_resolve_age(): status = GroupStatus.RESOLVED status_details['autoResolved'] = True if status == GroupStatus.RESOLVED: status_label = 'resolved' if attrs['pending_resolution']: status_details['inNextRelease'] = True elif status == GroupStatus.MUTED: status_label = 'muted' elif status in [ GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS ]: status_label = 'pending_deletion' elif status == GroupStatus.PENDING_MERGE: status_label = 'pending_merge' else: status_label = 'unresolved' permalink = absolute_uri( reverse('sentry-group', args=[obj.organization.slug, obj.project.slug, obj.id])) event_type = obj.data.get('type', 'default') metadata = obj.data.get('metadata') or { 'title': obj.message_short, } # TODO(dcramer): remove in 8.6+ if event_type == 'error': if 'value' in metadata: metadata['value'] = unicode(metadata['value']) if 'type' in metadata: metadata['type'] = unicode(metadata['type']) return { 'id': str(obj.id), 'shareId': obj.get_share_id(), 'shortId': obj.qualified_short_id, 'count': str(obj.times_seen), 'userCount': attrs['user_count'], 'title': obj.message_short, 'culprit': obj.culprit, 'permalink': permalink, 'firstSeen': obj.first_seen, 'lastSeen': obj.last_seen, 'logger': obj.logger or None, 'level': LOG_LEVELS.get(obj.level, 'unknown'), 'status': status_label, 'statusDetails': status_details, 'isPublic': obj.is_public, 'project': { 'name': obj.project.name, 'slug': obj.project.slug, }, 'type': event_type, 'metadata': metadata, 'numComments': obj.num_comments, 'assignedTo': attrs['assigned_to'], 'isBookmarked': attrs['is_bookmarked'], 'hasSeen': attrs['has_seen'], 'annotations': attrs['annotations'], }
def serialize(self, obj, attrs, user): status = obj.status status_details = {} if attrs["ignore_until"]: snooze = attrs["ignore_until"] if snooze.is_valid(group=obj): # counts return the delta remaining when window is not set status_details.update({ "ignoreCount": (snooze.count - (obj.times_seen - snooze.state["times_seen"]) if snooze.count and not snooze.window else snooze.count), "ignoreUntil": snooze.until, "ignoreUserCount": (snooze.user_count - (attrs["user_count"] - snooze.state["users_seen"]) if snooze.user_count and not snooze.user_window else snooze.user_count), "ignoreUserWindow": snooze.user_window, "ignoreWindow": snooze.window, "actor": attrs["ignore_actor"], }) else: status = GroupStatus.UNRESOLVED if status == GroupStatus.UNRESOLVED and obj.is_over_resolve_age(): status = GroupStatus.RESOLVED status_details["autoResolved"] = True if status == GroupStatus.RESOLVED: status_label = "resolved" if attrs["resolution_type"] == "release": res_type, res_version, _ = attrs["resolution"] if res_type in (GroupResolution.Type.in_next_release, None): status_details["inNextRelease"] = True elif res_type == GroupResolution.Type.in_release: status_details["inRelease"] = res_version status_details["actor"] = attrs["resolution_actor"] elif attrs["resolution_type"] == "commit": status_details["inCommit"] = attrs["resolution"] elif status == GroupStatus.IGNORED: status_label = "ignored" elif status in [ GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS ]: status_label = "pending_deletion" elif status == GroupStatus.PENDING_MERGE: status_label = "pending_merge" else: status_label = "unresolved" # If user is not logged in and member of the organization, # do not return the permalink which contains private information i.e. org name. request = env.request is_superuser = request and is_active_superuser( request) and request.user == user # If user is a sentry_app then it's a proxy user meaning we can't do a org lookup via `get_orgs()` # because the user isn't an org member. Instead we can use the auth token and the installation # it's associated with to find out what organization the token has access to. is_valid_sentryapp = False if (request and getattr(request.user, "is_sentry_app", False) and isinstance(request.auth, ApiToken)): is_valid_sentryapp = SentryAppInstallationToken.has_organization_access( request.auth, obj.organization) if (is_superuser or (user.is_authenticated() and user.get_orgs().filter(id=obj.organization.id).exists()) or is_valid_sentryapp): permalink = obj.get_absolute_url() else: permalink = None subscription_details = None if attrs["subscription"] is not disabled: is_subscribed, subscription = attrs["subscription"] if subscription is not None and subscription.is_active: subscription_details = { "reason": SUBSCRIPTION_REASON_MAP.get(subscription.reason, "unknown") } else: is_subscribed = False subscription_details = {"disabled": True} share_id = attrs["share_id"] return { "id": six.text_type(obj.id), "shareId": share_id, "shortId": obj.qualified_short_id, "count": six.text_type(attrs["times_seen"]), "userCount": attrs["user_count"], "title": obj.title, "culprit": obj.culprit, "permalink": permalink, "firstSeen": attrs["first_seen"], "lastSeen": attrs["last_seen"], "logger": obj.logger or None, "level": LOG_LEVELS.get(obj.level, "unknown"), "status": status_label, "statusDetails": status_details, "isPublic": share_id is not None, "platform": obj.platform, "project": { "id": six.text_type(obj.project.id), "name": obj.project.name, "slug": obj.project.slug, "platform": obj.project.platform, }, "type": obj.get_event_type(), "metadata": obj.get_event_metadata(), "numComments": obj.num_comments, "assignedTo": serialize(attrs["assigned_to"], user, ActorSerializer()), "isBookmarked": attrs["is_bookmarked"], "isSubscribed": is_subscribed, "subscriptionDetails": subscription_details, "hasSeen": attrs["has_seen"], "annotations": attrs["annotations"], }
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import from collections import OrderedDict from django import forms from sentry.constants import LOG_LEVELS, LOG_LEVELS_MAP from sentry.rules.conditions.base import EventCondition LEVEL_CHOICES = OrderedDict( [(u"{0}".format(k), v) for k, v in sorted(LOG_LEVELS.items(), key=lambda x: x[0], reverse=True)] ) class MatchType(object): EQUAL = 'eq' LESS_OR_EQUAL = 'lte' GREATER_OR_EQUAL = 'gte' MATCH_CHOICES = OrderedDict( [ (MatchType.EQUAL, 'equal to'), (MatchType.LESS_OR_EQUAL, 'less than or equal to'), (MatchType.GREATER_OR_EQUAL, 'greater than or equal to') ] )
from collections import OrderedDict from django import forms from sentry.constants import LOG_LEVELS, LOG_LEVELS_MAP from sentry.rules.conditions.base import EventCondition LEVEL_CHOICES = OrderedDict([ ("{0}".format(k), v) for k, v in sorted(LOG_LEVELS.items(), key=lambda x: x[0], reverse=True) ]) class MatchType(object): EQUAL = "eq" LESS_OR_EQUAL = "lte" GREATER_OR_EQUAL = "gte" MATCH_CHOICES = OrderedDict([ (MatchType.EQUAL, "equal to"), (MatchType.LESS_OR_EQUAL, "less than or equal to"), (MatchType.GREATER_OR_EQUAL, "greater than or equal to"), ]) class LevelEventForm(forms.Form): level = forms.ChoiceField(choices=list(LEVEL_CHOICES.items())) match = forms.ChoiceField(choices=list(MATCH_CHOICES.items()))
from sentry.utils import settings from sentry.plugins import Plugin from sentry.conf import server from sentry.utils.http import absolute_uri from sentry.web.helpers import render_to_string from sentry.constants import LOG_LEVELS import sentry_pushover import requests message_template = 'sentry_pushover/error.txt' message_template_alert = 'sentry_pushover/alert.txt' choices_levels = ((i, level.upper()) for i, level in LOG_LEVELS.iteritems()) choices_sounds = (( ('pushover', 'Pushover (default)'), ('bike', 'Bike'), ('bugle', 'Bugle'), ('cashregister', 'Cash Register'), ('classical', 'Classical'), ('cosmic', 'Cosmic'), ('falling', 'Falling'), ('gamelan', 'Gamelan'), ('incoming', 'Incoming'), ('intermission', 'Intermission'), ('magic', 'Magic'), ('mechanical', 'Mechanical'), ('pianobar', 'Piano Bar'), ('siren', 'Siren'),