def serialize(self, obj, attrs, user): status = obj.status status_details = {} if attrs['ignore_duration']: if attrs['ignore_duration'] < timezone.now( ) and status == GroupStatus.IGNORED: status = GroupStatus.UNRESOLVED else: status_details['ignoreUntil'] = attrs['ignore_duration'] elif status == GroupStatus.UNRESOLVED and obj.is_over_resolve_age(): status = GroupStatus.RESOLVED status_details['autoResolved'] = True if status == GroupStatus.RESOLVED: status_label = 'resolved' if attrs['pending_resolution']: status_details['inNextRelease'] = True elif status == GroupStatus.IGNORED: status_label = 'ignored' elif status in [ GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS ]: status_label = 'pending_deletion' elif status == GroupStatus.PENDING_MERGE: status_label = 'pending_merge' else: status_label = 'unresolved' permalink = absolute_uri( reverse('sentry-group', args=[obj.organization.slug, obj.project.slug, obj.id])) return { 'id': six.text_type(obj.id), 'shareId': obj.get_share_id(), 'shortId': obj.qualified_short_id, 'count': six.text_type(obj.times_seen), 'userCount': attrs['user_count'], 'title': obj.title, 'culprit': obj.culprit, 'permalink': permalink, 'firstSeen': obj.first_seen, 'lastSeen': obj.last_seen, 'logger': obj.logger or None, 'level': LOG_LEVELS.get(obj.level, 'unknown'), 'status': status_label, 'statusDetails': status_details, 'isPublic': obj.is_public, 'project': { 'name': obj.project.name, 'slug': obj.project.slug, }, 'type': obj.get_event_type(), 'metadata': obj.get_event_metadata(), 'numComments': obj.num_comments, 'assignedTo': attrs['assigned_to'], 'isBookmarked': attrs['is_bookmarked'], 'isSubscribed': attrs['is_subscribed'], 'hasSeen': attrs['has_seen'], 'annotations': attrs['annotations'], }
def serialize(self, obj, attrs, user): status = obj.status status_details = {} if attrs['snooze']: if attrs['snooze'] < timezone.now() and status == GroupStatus.MUTED: status = GroupStatus.UNRESOLVED else: status_details['snoozeUntil'] = attrs['snooze'] elif status == GroupStatus.UNRESOLVED and obj.is_over_resolve_age(): status = GroupStatus.RESOLVED status_details['autoResolved'] = True if status == GroupStatus.RESOLVED: status_label = 'resolved' if attrs['pending_resolution']: status_details['inNextRelease'] = True elif status == GroupStatus.MUTED: status_label = 'muted' elif status in [GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS]: status_label = 'pending_deletion' elif status == GroupStatus.PENDING_MERGE: status_label = 'pending_merge' else: status_label = 'unresolved' permalink = absolute_uri(reverse('sentry-group', args=[ obj.organization.slug, obj.project.slug, obj.id])) event_type = obj.data.get('type', 'default') metadata = obj.data.get('metadata') or { 'title': obj.message_short, } return { 'id': str(obj.id), 'shareId': obj.get_share_id(), 'shortId': obj.qualified_short_id, 'count': str(obj.times_seen), 'userCount': attrs['user_count'], 'title': obj.message_short, 'culprit': obj.culprit, 'permalink': permalink, 'firstSeen': obj.first_seen, 'lastSeen': obj.last_seen, 'logger': obj.logger or None, 'level': LOG_LEVELS.get(obj.level, 'unknown'), 'status': status_label, 'statusDetails': status_details, 'isPublic': obj.is_public, 'project': { 'name': obj.project.name, 'slug': obj.project.slug, }, 'type': event_type, 'metadata': metadata, 'numComments': obj.num_comments, 'assignedTo': attrs['assigned_to'], 'isBookmarked': attrs['is_bookmarked'], 'hasSeen': attrs['has_seen'], 'annotations': attrs['annotations'], }
def serialize(self, obj, attrs, user): return { 'id': six.text_type(obj.id), 'level': LOG_LEVELS.get(obj.level, 'unknown'), 'message': obj.message, 'culprit': obj.culprit, 'type': obj.get_event_type(), 'actor': attrs.get('user'), }
def serialize(self, obj, attrs, user): return { 'id': six.text_type(obj.id), 'level': LOG_LEVELS.get(obj.level, 'unknown'), 'message': obj.message, 'culprit': obj.culprit, 'type': obj.get_event_type(), 'metadata': obj.get_event_metadata(), 'actor': attrs.get('user'), }
def serialize(self, obj, attrs, user): return { "id": six.text_type(obj.id), "level": LOG_LEVELS.get(obj.level, "unknown"), "message": obj.message, "culprit": obj.culprit, "type": obj.get_event_type(), "metadata": obj.get_event_metadata(), "actor": attrs.get("user"), }
def serialize(self, obj, attrs, user): status = obj.status if attrs['snooze']: if attrs['snooze'] < timezone.now() and status == GroupStatus.MUTED: status = GroupStatus.UNRESOLVED elif status == GroupStatus.UNRESOLVED and obj.is_over_resolve_age(): status = GroupStatus.RESOLVED if status == GroupStatus.RESOLVED: status_label = 'resolved' elif status == GroupStatus.MUTED: status_label = 'muted' elif status in [GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS]: status_label = 'pending_deletion' elif status == GroupStatus.PENDING_MERGE: status_label = 'pending_merge' else: status_label = 'unresolved' if obj.team: permalink = absolute_uri(reverse('sentry-group', args=[ obj.organization.slug, obj.project.slug, obj.id])) else: permalink = None return { 'id': str(obj.id), 'shareId': obj.get_share_id(), 'count': str(obj.times_seen), 'userCount': attrs['user_count'], 'title': obj.message_short, 'culprit': obj.culprit, 'permalink': permalink, 'firstSeen': obj.first_seen, 'lastSeen': obj.last_seen, 'timeSpent': obj.avg_time_spent, 'logger': obj.logger or None, 'level': LOG_LEVELS.get(obj.level, 'unknown'), 'status': status_label, 'snoozeUntil': attrs['snooze'], 'isPublic': obj.is_public, 'project': { 'name': obj.project.name, 'slug': obj.project.slug, }, 'numComments': obj.num_comments, 'assignedTo': attrs['assigned_to'], 'isBookmarked': attrs['is_bookmarked'], 'hasSeen': attrs['has_seen'], 'annotations': attrs['annotations'], }
def serialize(self, obj, attrs, user): status = obj.get_status() if status == GroupStatus.RESOLVED: status_label = 'resolved' elif status == GroupStatus.MUTED: status_label = 'muted' elif status in [ GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS ]: status_label = 'pending_deletion' elif status == GroupStatus.PENDING_MERGE: status_label = 'pending_merge' else: status_label = 'unresolved' if obj.team: permalink = absolute_uri( reverse('sentry-group', args=[obj.organization.slug, obj.project.slug, obj.id])) else: permalink = None d = { 'id': str(obj.id), 'shareId': obj.get_share_id(), 'count': str(obj.times_seen), 'userCount': attrs['user_count'], 'title': obj.message_short, 'culprit': obj.culprit, 'permalink': permalink, 'firstSeen': obj.first_seen, 'lastSeen': obj.last_seen, 'timeSpent': obj.avg_time_spent, 'logger': obj.logger or None, 'level': LOG_LEVELS.get(obj.level, 'unknown'), 'status': status_label, 'isPublic': obj.is_public, 'project': { 'name': obj.project.name, 'slug': obj.project.slug, }, 'numComments': obj.num_comments, 'assignedTo': attrs['assigned_to'], 'isBookmarked': attrs['is_bookmarked'], 'hasSeen': attrs['has_seen'], 'annotations': attrs['annotations'], } return d
def serialize(self, obj, attrs, user): status_details, status_label = self._get_status(attrs, obj) permalink = self._get_permalink(obj, user) is_subscribed, subscription_details = get_subscription_from_attributes( attrs) share_id = attrs["share_id"] group_dict = { "id": str(obj.id), "shareId": share_id, "shortId": obj.qualified_short_id, "title": obj.title, "culprit": obj.culprit, "permalink": permalink, "logger": obj.logger or None, "level": LOG_LEVELS.get(obj.level, "unknown"), "status": status_label, "statusDetails": status_details, "isPublic": share_id is not None, "platform": obj.platform, "project": { "id": str(obj.project.id), "name": obj.project.name, "slug": obj.project.slug, "platform": obj.project.platform, }, "type": obj.get_event_type(), "metadata": obj.get_event_metadata(), "numComments": obj.num_comments, "assignedTo": serialize(attrs["assigned_to"], user, ActorSerializer()), "isBookmarked": attrs["is_bookmarked"], "isSubscribed": is_subscribed, "subscriptionDetails": subscription_details, "hasSeen": attrs["has_seen"], "annotations": attrs["annotations"], } # This attribute is currently feature gated if "is_unhandled" in attrs: group_dict["isUnhandled"] = attrs["is_unhandled"] if "times_seen" in attrs: group_dict.update(self._convert_seen_stats(attrs)) return group_dict
def serialize(self, obj, attrs, user): status = obj.get_status() if status == GroupStatus.RESOLVED: status_label = 'resolved' elif status == GroupStatus.MUTED: status_label = 'muted' else: status_label = 'unresolved' if obj.team: permalink = absolute_uri(reverse('sentry-group', args=[ obj.organization.slug, obj.project.slug, obj.id])) else: permalink = None d = { 'id': str(obj.id), 'shareId': obj.get_share_id(), 'count': str(obj.times_seen), 'title': obj.message_short, 'culprit': obj.culprit, 'permalink': permalink, 'firstSeen': obj.first_seen, 'lastSeen': obj.last_seen, 'timeSpent': obj.avg_time_spent, 'logger': obj.logger or None, 'level': LOG_LEVELS.get(obj.level, 'unknown'), 'status': status_label, 'isPublic': obj.is_public, 'project': { 'name': obj.project.name, 'slug': obj.project.slug, }, 'numComments': obj.num_comments, 'assignedTo': attrs['assigned_to'], 'isBookmarked': attrs['is_bookmarked'], 'hasSeen': attrs['has_seen'], 'tags': attrs['tags'], 'annotations': attrs['annotations'], } return d
def serialize(self, obj, attrs, user): status_details, status_label = self._get_status(attrs, obj) permalink = self._get_permalink(obj, user) is_subscribed, subscription_details = self._get_subscription(attrs) share_id = attrs["share_id"] return { "id": six.text_type(obj.id), "shareId": share_id, "shortId": obj.qualified_short_id, "count": six.text_type(attrs["times_seen"]), "userCount": attrs["user_count"], "title": obj.title, "culprit": obj.culprit, "permalink": permalink, "firstSeen": attrs["first_seen"], "lastSeen": attrs["last_seen"], "logger": obj.logger or None, "level": LOG_LEVELS.get(obj.level, "unknown"), "status": status_label, "statusDetails": status_details, "isPublic": share_id is not None, "platform": obj.platform, "project": { "id": six.text_type(obj.project.id), "name": obj.project.name, "slug": obj.project.slug, "platform": obj.project.platform, }, "type": obj.get_event_type(), "metadata": obj.get_event_metadata(), "numComments": obj.num_comments, "assignedTo": serialize(attrs["assigned_to"], user, ActorSerializer()), "isBookmarked": attrs["is_bookmarked"], "isSubscribed": is_subscribed, "subscriptionDetails": subscription_details, "hasSeen": attrs["has_seen"], "annotations": attrs["annotations"], }
def serialize(self, obj, attrs, user): status = obj.status status_details = {} if attrs["ignore_until"]: snooze = attrs["ignore_until"] if snooze.is_valid(group=obj): # counts return the delta remaining when window is not set status_details.update({ "ignoreCount": (snooze.count - (obj.times_seen - snooze.state["times_seen"]) if snooze.count and not snooze.window else snooze.count), "ignoreUntil": snooze.until, "ignoreUserCount": (snooze.user_count - (attrs["user_count"] - snooze.state["users_seen"]) if snooze.user_count and not snooze.user_window else snooze.user_count), "ignoreUserWindow": snooze.user_window, "ignoreWindow": snooze.window, "actor": attrs["ignore_actor"], }) else: status = GroupStatus.UNRESOLVED if status == GroupStatus.UNRESOLVED and obj.is_over_resolve_age(): status = GroupStatus.RESOLVED status_details["autoResolved"] = True if status == GroupStatus.RESOLVED: status_label = "resolved" if attrs["resolution_type"] == "release": res_type, res_version, _ = attrs["resolution"] if res_type in (GroupResolution.Type.in_next_release, None): status_details["inNextRelease"] = True elif res_type == GroupResolution.Type.in_release: status_details["inRelease"] = res_version status_details["actor"] = attrs["resolution_actor"] elif attrs["resolution_type"] == "commit": status_details["inCommit"] = attrs["resolution"] elif status == GroupStatus.IGNORED: status_label = "ignored" elif status in [ GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS ]: status_label = "pending_deletion" elif status == GroupStatus.PENDING_MERGE: status_label = "pending_merge" else: status_label = "unresolved" # If user is not logged in and member of the organization, # do not return the permalink which contains private information i.e. org name. request = env.request is_superuser = request and is_active_superuser( request) and request.user == user # If user is a sentry_app then it's a proxy user meaning we can't do a org lookup via `get_orgs()` # because the user isn't an org member. Instead we can use the auth token and the installation # it's associated with to find out what organization the token has access to. is_valid_sentryapp = False if (request and getattr(request.user, "is_sentry_app", False) and isinstance(request.auth, ApiToken)): is_valid_sentryapp = SentryAppInstallationToken.has_organization_access( request.auth, obj.organization) if (is_superuser or (user.is_authenticated() and user.get_orgs().filter(id=obj.organization.id).exists()) or is_valid_sentryapp): permalink = obj.get_absolute_url() else: permalink = None subscription_details = None if attrs["subscription"] is not disabled: is_subscribed, subscription = attrs["subscription"] if subscription is not None and subscription.is_active: subscription_details = { "reason": SUBSCRIPTION_REASON_MAP.get(subscription.reason, "unknown") } else: is_subscribed = False subscription_details = {"disabled": True} share_id = attrs["share_id"] return { "id": six.text_type(obj.id), "shareId": share_id, "shortId": obj.qualified_short_id, "count": six.text_type(attrs["times_seen"]), "userCount": attrs["user_count"], "title": obj.title, "culprit": obj.culprit, "permalink": permalink, "firstSeen": attrs["first_seen"], "lastSeen": attrs["last_seen"], "logger": obj.logger or None, "level": LOG_LEVELS.get(obj.level, "unknown"), "status": status_label, "statusDetails": status_details, "isPublic": share_id is not None, "platform": obj.platform, "project": { "id": six.text_type(obj.project.id), "name": obj.project.name, "slug": obj.project.slug, "platform": obj.project.platform, }, "type": obj.get_event_type(), "metadata": obj.get_event_metadata(), "numComments": obj.num_comments, "assignedTo": serialize(attrs["assigned_to"], user, ActorSerializer()), "isBookmarked": attrs["is_bookmarked"], "isSubscribed": is_subscribed, "subscriptionDetails": subscription_details, "hasSeen": attrs["has_seen"], "annotations": attrs["annotations"], }
def normalize(self, request_env=None): request_env = request_env or {} data = self.data errors = data['errors'] = [] # Ignore event meta data for now. data.pop('_meta', None) # Before validating with a schema, attempt to cast values to their desired types # so that the schema doesn't have to take every type variation into account. text = six.text_type fp_types = six.string_types + six.integer_types + (float, ) def to_values(v): return {'values': v} if v and isinstance(v, (tuple, list)) else v def stringify(f): if isinstance(f, float): return text(int(f)) if abs(f) < (1 << 53) else None return text(f) casts = { 'environment': lambda v: text(v) if v is not None else v, 'fingerprint': lambda v: list(x for x in map(stringify, v) if x is not None) if isinstance(v, list) and all(isinstance(f, fp_types) for f in v) else v, 'release': lambda v: text(v) if v is not None else v, 'dist': lambda v: text(v).strip() if v is not None else v, 'time_spent': lambda v: int(v) if v is not None else v, 'tags': lambda v: [(text(v_k).replace(' ', '-').strip(), text(v_v).strip()) for (v_k, v_v) in dict(v).items()], 'timestamp': lambda v: process_timestamp(v), 'platform': lambda v: v if v in VALID_PLATFORMS else 'other', 'logentry': lambda v: v if isinstance(v, dict) else {'message': v}, # These can be sent as lists and need to be converted to {'values': [...]} 'exception': to_values, 'breadcrumbs': to_values, 'threads': to_values, } for c in casts: if c in data: try: data[c] = casts[c](data[c]) except InvalidTimestamp as it: errors.append({'type': it.args[0], 'name': c, 'value': data[c]}) del data[c] except Exception as e: errors.append({'type': EventError.INVALID_DATA, 'name': c, 'value': data[c]}) del data[c] # raw 'message' is coerced to the Message interface, as its used for pure index of # searchable strings. If both a raw 'message' and a Message interface exist, try and # add the former as the 'formatted' attribute of the latter. # See GH-3248 msg_str = data.pop('message', None) if msg_str: msg_if = data.get('logentry') msg_meta = data.get('_meta', {}).get('message') if not msg_if: msg_if = data['logentry'] = {'message': msg_str} if msg_meta: data.setdefault('_meta', {}).setdefault('logentry', {})['message'] = msg_meta if msg_if.get('message') != msg_str: if not msg_if.get('formatted'): msg_if['formatted'] = msg_str if msg_meta: data.setdefault('_meta', {}).setdefault( 'logentry', {})['formatted'] = msg_meta # Fill in ip addresses marked as {{auto}} client_ip = request_env.get('client_ip') if client_ip: if get_path(data, ['sentry.interfaces.Http', 'env', 'REMOTE_ADDR']) == '{{auto}}': data['sentry.interfaces.Http']['env']['REMOTE_ADDR'] = client_ip if get_path(data, ['request', 'env', 'REMOTE_ADDR']) == '{{auto}}': data['request']['env']['REMOTE_ADDR'] = client_ip if get_path(data, ['sentry.interfaces.User', 'ip_address']) == '{{auto}}': data['sentry.interfaces.User']['ip_address'] = client_ip if get_path(data, ['user', 'ip_address']) == '{{auto}}': data['user']['ip_address'] = client_ip # Validate main event body and tags against schema. # XXX(ja): jsonschema does not like CanonicalKeyDict, so we need to pass # in the inner data dict. is_valid, event_errors = validate_and_default_interface(data.data, 'event') errors.extend(event_errors) if 'tags' in data: is_valid, tag_errors = validate_and_default_interface(data['tags'], 'tags', name='tags') errors.extend(tag_errors) # Validate interfaces for k in list(iter(data)): if k in CLIENT_RESERVED_ATTRS: continue value = data.pop(k) if not value: self.logger.debug('Ignored empty interface value: %s', k) continue try: interface = get_interface(k) except ValueError: self.logger.debug('Ignored unknown attribute: %s', k) errors.append({'type': EventError.INVALID_ATTRIBUTE, 'name': k}) continue try: inst = interface.to_python(value) data[inst.get_path()] = inst.to_json() except Exception as e: log = self.logger.debug if isinstance( e, InterfaceValidationError) else self.logger.error log('Discarded invalid value for interface: %s (%r)', k, value, exc_info=True) errors.append({'type': EventError.INVALID_DATA, 'name': k, 'value': value}) # Additional data coercion and defaulting level = data.get('level') or DEFAULT_LOG_LEVEL if isinstance(level, int) or (isinstance(level, six.string_types) and level.isdigit()): level = LOG_LEVELS.get(int(level), DEFAULT_LOG_LEVEL) data['level'] = LOG_LEVELS_MAP.get(level, LOG_LEVELS_MAP[DEFAULT_LOG_LEVEL]) if data.get('dist') and not data.get('release'): data['dist'] = None timestamp = data.get('timestamp') if not timestamp: timestamp = timezone.now() # TODO (alex) can this all be replaced by utcnow? # it looks like the only time that this would even be hit is when timestamp # is not defined, as the earlier process_timestamp already converts existing # timestamps to floats. if isinstance(timestamp, datetime): # We must convert date to local time so Django doesn't mess it up # based on TIME_ZONE if settings.TIME_ZONE: if not timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=timezone.utc) elif timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=None) timestamp = float(timestamp.strftime('%s')) data['timestamp'] = timestamp data['received'] = float(timezone.now().strftime('%s')) data.setdefault('checksum', None) data.setdefault('culprit', None) data.setdefault('dist', None) data.setdefault('environment', None) data.setdefault('extra', {}) data.setdefault('fingerprint', None) data.setdefault('logger', DEFAULT_LOGGER_NAME) data.setdefault('platform', None) data.setdefault('server_name', None) data.setdefault('site', None) data.setdefault('tags', []) data.setdefault('transaction', None) # Fix case where legacy apps pass 'environment' as a tag # instead of a top level key. # TODO (alex) save() just reinserts the environment into the tags if not data.get('environment'): tagsdict = dict(data['tags']) if 'environment' in tagsdict: data['environment'] = tagsdict['environment'] del tagsdict['environment'] data['tags'] = tagsdict.items() # the SDKs currently do not describe event types, and we must infer # them from available attributes data['type'] = eventtypes.infer(data).key data['version'] = self.version exception = data.get('sentry.interfaces.Exception') stacktrace = data.get('sentry.interfaces.Stacktrace') if exception and len(exception['values']) == 1 and stacktrace: exception['values'][0]['stacktrace'] = stacktrace del data['sentry.interfaces.Stacktrace'] # Exception mechanism needs SDK information to resolve proper names in # exception meta (such as signal names). "SDK Information" really means # the operating system version the event was generated on. Some # normalization still works without sdk_info, such as mach_exception # names (they can only occur on macOS). if exception: sdk_info = get_sdk_from_event(data) for ex in exception['values']: if 'mechanism' in ex: normalize_mechanism_meta(ex['mechanism'], sdk_info) # If there is no User ip_addres, update it either from the Http interface # or the client_ip of the request. auth = request_env.get('auth') is_public = auth and auth.is_public add_ip_platforms = ('javascript', 'cocoa', 'objc') http_ip = data.get('sentry.interfaces.Http', {}).get('env', {}).get('REMOTE_ADDR') if http_ip: data.setdefault('sentry.interfaces.User', {}).setdefault('ip_address', http_ip) elif client_ip and (is_public or data.get('platform') in add_ip_platforms): data.setdefault('sentry.interfaces.User', {}).setdefault('ip_address', client_ip) # Trim values data['logger'] = trim(data['logger'].strip(), 64) trim_dict(data['extra'], max_size=settings.SENTRY_MAX_EXTRA_VARIABLE_SIZE) if data['culprit']: data['culprit'] = trim(data['culprit'], MAX_CULPRIT_LENGTH) if data['transaction']: data['transaction'] = trim(data['transaction'], MAX_CULPRIT_LENGTH) return data
def normalize(self, request_env=None): request_env = request_env or {} data = self.data errors = data['errors'] = [] # Before validating with a schema, attempt to cast values to their desired types # so that the schema doesn't have to take every type variation into account. text = six.text_type fp_types = six.string_types + six.integer_types + (float, ) def to_values(v): return {'values': v} if v and isinstance(v, (tuple, list)) else v def convert_fingerprint(values): rv = values[:] bad_float = False for idx, item in enumerate(rv): if isinstance(item, float) and \ (abs(item) >= (1 << 53) or int(item) != item): bad_float = True rv[idx] = text(item) if bad_float: metrics.incr( 'events.bad_float_fingerprint', skip_internal=True, tags={ 'project_id': data.get('project'), }, ) return rv casts = { 'environment': lambda v: text(v) if v is not None else v, 'fingerprint': lambda v: convert_fingerprint(v) if isinstance(v, list) and all(isinstance(f, fp_types) for f in v) else v, 'release': lambda v: text(v) if v is not None else v, 'dist': lambda v: text(v).strip() if v is not None else v, 'time_spent': lambda v: int(v) if v is not None else v, 'tags': lambda v: [(text(v_k).replace(' ', '-').strip(), text(v_v).strip()) for (v_k, v_v) in dict(v).items()], 'timestamp': lambda v: process_timestamp(v), 'platform': lambda v: v if v in VALID_PLATFORMS else 'other', 'sentry.interfaces.Message': lambda v: v if isinstance(v, dict) else { 'message': v }, # These can be sent as lists and need to be converted to {'values': [...]} 'exception': to_values, 'sentry.interfaces.Exception': to_values, 'breadcrumbs': to_values, 'sentry.interfaces.Breadcrumbs': to_values, 'threads': to_values, 'sentry.interfaces.Threads': to_values, } for c in casts: if c in data: try: data[c] = casts[c](data[c]) except InvalidTimestamp as it: errors.append({ 'type': it.args[0], 'name': c, 'value': data[c] }) del data[c] except Exception as e: errors.append({ 'type': EventError.INVALID_DATA, 'name': c, 'value': data[c] }) del data[c] # raw 'message' is coerced to the Message interface, as its used for pure index of # searchable strings. If both a raw 'message' and a Message interface exist, try and # add the former as the 'formatted' attribute of the latter. # See GH-3248 msg_str = data.pop('message', None) if msg_str: msg_if = data.setdefault('sentry.interfaces.Message', {'message': msg_str}) if msg_if.get('message') != msg_str: msg_if.setdefault('formatted', msg_str) # Fill in ip addresses marked as {{auto}} client_ip = request_env.get('client_ip') if client_ip: if get_path(data, ['sentry.interfaces.Http', 'env', 'REMOTE_ADDR' ]) == '{{auto}}': data['sentry.interfaces.Http']['env'][ 'REMOTE_ADDR'] = client_ip if get_path(data, ['request', 'env', 'REMOTE_ADDR']) == '{{auto}}': data['request']['env']['REMOTE_ADDR'] = client_ip if get_path( data, ['sentry.interfaces.User', 'ip_address']) == '{{auto}}': data['sentry.interfaces.User']['ip_address'] = client_ip if get_path(data, ['user', 'ip_address']) == '{{auto}}': data['user']['ip_address'] = client_ip # Validate main event body and tags against schema is_valid, event_errors = validate_and_default_interface(data, 'event') errors.extend(event_errors) if 'tags' in data: is_valid, tag_errors = validate_and_default_interface(data['tags'], 'tags', name='tags') errors.extend(tag_errors) # Validate interfaces for k in list(iter(data)): if k in CLIENT_RESERVED_ATTRS: continue value = data.pop(k) if not value: self.logger.debug('Ignored empty interface value: %s', k) continue try: interface = get_interface(k) except ValueError: self.logger.debug('Ignored unknown attribute: %s', k) errors.append({ 'type': EventError.INVALID_ATTRIBUTE, 'name': k }) continue try: inst = interface.to_python(value) data[inst.get_path()] = inst.to_json() except Exception as e: log = self.logger.debug if isinstance( e, InterfaceValidationError) else self.logger.error log('Discarded invalid value for interface: %s (%r)', k, value, exc_info=True) errors.append({ 'type': EventError.INVALID_DATA, 'name': k, 'value': value }) # Additional data coercion and defaulting level = data.get('level') or DEFAULT_LOG_LEVEL if isinstance(level, int) or (isinstance(level, six.string_types) and level.isdigit()): level = LOG_LEVELS.get(int(level), DEFAULT_LOG_LEVEL) data['level'] = LOG_LEVELS_MAP.get(level, LOG_LEVELS_MAP[DEFAULT_LOG_LEVEL]) if data.get('dist') and not data.get('release'): data['dist'] = None timestamp = data.get('timestamp') if not timestamp: timestamp = timezone.now() # TODO (alex) can this all be replaced by utcnow? # it looks like the only time that this would even be hit is when timestamp # is not defined, as the earlier process_timestamp already converts existing # timestamps to floats. if isinstance(timestamp, datetime): # We must convert date to local time so Django doesn't mess it up # based on TIME_ZONE if settings.TIME_ZONE: if not timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=timezone.utc) elif timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=None) timestamp = float(timestamp.strftime('%s')) data['timestamp'] = timestamp data['received'] = float(timezone.now().strftime('%s')) data.setdefault('checksum', None) data.setdefault('culprit', None) data.setdefault('dist', None) data.setdefault('environment', None) data.setdefault('extra', {}) data.setdefault('fingerprint', None) data.setdefault('logger', DEFAULT_LOGGER_NAME) data.setdefault('platform', None) data.setdefault('server_name', None) data.setdefault('site', None) data.setdefault('tags', []) data.setdefault('transaction', None) # Fix case where legacy apps pass 'environment' as a tag # instead of a top level key. # TODO (alex) save() just reinserts the environment into the tags if not data.get('environment'): tagsdict = dict(data['tags']) if 'environment' in tagsdict: data['environment'] = tagsdict['environment'] del tagsdict['environment'] data['tags'] = tagsdict.items() # the SDKs currently do not describe event types, and we must infer # them from available attributes data['type'] = eventtypes.infer(data).key data['version'] = self.version exception = data.get('sentry.interfaces.Exception') stacktrace = data.get('sentry.interfaces.Stacktrace') if exception and len(exception['values']) == 1 and stacktrace: exception['values'][0]['stacktrace'] = stacktrace del data['sentry.interfaces.Stacktrace'] # Exception mechanism needs SDK information to resolve proper names in # exception meta (such as signal names). "SDK Information" really means # the operating system version the event was generated on. Some # normalization still works without sdk_info, such as mach_exception # names (they can only occur on macOS). if exception: sdk_info = get_sdk_from_event(data) for ex in exception['values']: if 'mechanism' in ex: normalize_mechanism_meta(ex['mechanism'], sdk_info) # If there is no User ip_addres, update it either from the Http interface # or the client_ip of the request. auth = request_env.get('auth') is_public = auth and auth.is_public add_ip_platforms = ('javascript', 'cocoa', 'objc') http_ip = data.get('sentry.interfaces.Http', {}).get('env', {}).get('REMOTE_ADDR') if http_ip: data.setdefault('sentry.interfaces.User', {}).setdefault('ip_address', http_ip) elif client_ip and (is_public or data.get('platform') in add_ip_platforms): data.setdefault('sentry.interfaces.User', {}).setdefault('ip_address', client_ip) # Trim values data['logger'] = trim(data['logger'].strip(), 64) trim_dict(data['extra'], max_size=settings.SENTRY_MAX_EXTRA_VARIABLE_SIZE) if data['culprit']: data['culprit'] = trim(data['culprit'], MAX_CULPRIT_LENGTH) if data['transaction']: data['transaction'] = trim(data['transaction'], MAX_CULPRIT_LENGTH) return data
def serialize(self, obj, attrs, user): status = obj.status status_details = {} if attrs['ignore_until']: snooze = attrs['ignore_until'] if snooze.is_valid(group=obj): # counts return the delta remaining when window is not set status_details.update({ 'ignoreCount': (snooze.count - (obj.times_seen - snooze.state['times_seen']) if snooze.count and not snooze.window else snooze.count), 'ignoreUntil': snooze.until, 'ignoreUserCount': (snooze.user_count - (attrs['user_count'] - snooze.state['users_seen']) if snooze.user_count and not snooze.user_window else snooze.user_count), 'ignoreUserWindow': snooze.user_window, 'ignoreWindow': snooze.window, 'actor': attrs['ignore_actor'], }) else: status = GroupStatus.UNRESOLVED if status == GroupStatus.UNRESOLVED and obj.is_over_resolve_age(): status = GroupStatus.RESOLVED status_details['autoResolved'] = True if status == GroupStatus.RESOLVED: status_label = 'resolved' if attrs['resolution_type'] == 'release': res_type, res_version, _ = attrs['resolution'] if res_type in (GroupResolution.Type.in_next_release, None): status_details['inNextRelease'] = True elif res_type == GroupResolution.Type.in_release: status_details['inRelease'] = res_version status_details['actor'] = attrs['resolution_actor'] elif attrs['resolution_type'] == 'commit': status_details['inCommit'] = attrs['resolution'] elif status == GroupStatus.IGNORED: status_label = 'ignored' elif status in [ GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS ]: status_label = 'pending_deletion' elif status == GroupStatus.PENDING_MERGE: status_label = 'pending_merge' else: status_label = 'unresolved' # If user is not logged in and member of the organization, # do not return the permalink which contains private information i.e. org name. request = env.request is_superuser = (request and is_active_superuser(request) and request.user == user) if is_superuser or (user.is_authenticated() and user.get_orgs().filter( id=obj.organization.id).exists()): permalink = obj.get_absolute_url() else: permalink = None subscription_details = None if attrs['subscription'] is not disabled: is_subscribed, subscription = attrs['subscription'] if subscription is not None and subscription.is_active: subscription_details = { 'reason': SUBSCRIPTION_REASON_MAP.get( subscription.reason, 'unknown', ), } else: is_subscribed = False subscription_details = { 'disabled': True, } share_id = attrs['share_id'] return { 'id': six.text_type(obj.id), 'shareId': share_id, 'shortId': obj.qualified_short_id, 'count': six.text_type(attrs['times_seen']), 'userCount': attrs['user_count'], 'title': obj.title, 'culprit': obj.culprit, 'permalink': permalink, 'firstSeen': attrs['first_seen'], 'lastSeen': attrs['last_seen'], 'logger': obj.logger or None, 'level': LOG_LEVELS.get(obj.level, 'unknown'), 'status': status_label, 'statusDetails': status_details, 'isPublic': share_id is not None, 'platform': obj.platform, 'project': { 'id': six.text_type(obj.project.id), 'name': obj.project.name, 'slug': obj.project.slug, 'platform': obj.project.platform, }, 'type': obj.get_event_type(), 'metadata': obj.get_event_metadata(), 'numComments': obj.num_comments, 'assignedTo': serialize(attrs['assigned_to'], user, ActorSerializer()), 'isBookmarked': attrs['is_bookmarked'], 'isSubscribed': is_subscribed, 'subscriptionDetails': subscription_details, 'hasSeen': attrs['has_seen'], 'annotations': attrs['annotations'], }
def serialize(self, obj, attrs, user): status = obj.status status_details = {} if attrs['snooze']: if attrs['snooze'] < timezone.now() and status == GroupStatus.MUTED: status = GroupStatus.UNRESOLVED else: status_details['snoozeUntil'] = attrs['snooze'] elif status == GroupStatus.UNRESOLVED and obj.is_over_resolve_age(): status = GroupStatus.RESOLVED status_details['autoResolved'] = True if status == GroupStatus.RESOLVED: status_label = 'resolved' if attrs['pending_resolution']: status_details['inNextRelease'] = True elif status == GroupStatus.MUTED: status_label = 'muted' elif status in [GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS]: status_label = 'pending_deletion' elif status == GroupStatus.PENDING_MERGE: status_label = 'pending_merge' else: status_label = 'unresolved' permalink = absolute_uri(reverse('sentry-group', args=[ obj.organization.slug, obj.project.slug, obj.id])) # add by hzwangzhiwei @ 20160406 for show 'server_name' instead of 'USER' server_name = obj.get_unique_tags('server_name') or [['', 0, '', '']] server_name = server_name[0][0] tags_dict = {} tags_dict['server_name'] = server_name return { 'id': str(obj.id), 'shareId': obj.get_share_id(), 'count': str(obj.times_seen), 'userCount': attrs['user_count'], 'title': obj.message_short, 'culprit': obj.culprit, 'permalink': permalink, 'firstSeen': obj.first_seen, 'lastSeen': obj.last_seen, 'timeSpent': obj.avg_time_spent, 'logger': obj.logger or None, 'level': LOG_LEVELS.get(obj.level, 'unknown'), 'status': status_label, 'statusDetails': status_details, 'isPublic': obj.is_public, 'redmineId': obj.redmine_id, # add by hzwangzhiwei @20160411 redmine id 'blameInfo': obj.blame_info, # add by hzwangzhiwei @20160612 blame information 'follower': { 'id': obj.follower and obj.follower.id, 'name': obj.follower and obj.follower.name, 'email': obj.follower and obj.follower.email }, # add by hzwangzhiwei @201600824 follower information 'followerId': obj.follower_id, 'project': { 'name': obj.project.name, 'slug': obj.project.slug, 'redmine': obj.project.redmine # add by hzwangzhiwei @20160411 redmine URL }, 'numComments': obj.num_comments, 'assignedTo': attrs['assigned_to'], 'isBookmarked': attrs['is_bookmarked'], 'hasSeen': attrs['has_seen'], 'annotations': attrs['annotations'], 'tags': tags_dict }
def serialize(self, obj, attrs, user): status = obj.status status_details = {} if attrs['ignore_until']: snooze = attrs['ignore_until'] if snooze.is_valid(group=obj): # counts return the delta remaining when window is not set status_details.update( { 'ignoreCount': ( snooze.count - (obj.times_seen - snooze.state['times_seen']) if snooze.count and not snooze.window else snooze.count ), 'ignoreUntil': snooze.until, 'ignoreUserCount': ( snooze.user_count - (attrs['user_count'] - snooze.state['users_seen']) if snooze.user_count and not snooze.user_window else snooze.user_count ), 'ignoreUserWindow': snooze.user_window, 'ignoreWindow': snooze.window, 'actor': attrs['ignore_actor'], } ) else: status = GroupStatus.UNRESOLVED if status == GroupStatus.UNRESOLVED and obj.is_over_resolve_age(): status = GroupStatus.RESOLVED status_details['autoResolved'] = True if status == GroupStatus.RESOLVED: status_label = 'resolved' if attrs['resolution_type'] == 'release': res_type, res_version, _ = attrs['resolution'] if res_type in (GroupResolution.Type.in_next_release, None): status_details['inNextRelease'] = True elif res_type == GroupResolution.Type.in_release: status_details['inRelease'] = res_version status_details['actor'] = attrs['resolution_actor'] elif attrs['resolution_type'] == 'commit': status_details['inCommit'] = attrs['resolution'] elif status == GroupStatus.IGNORED: status_label = 'ignored' elif status in [GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS]: status_label = 'pending_deletion' elif status == GroupStatus.PENDING_MERGE: status_label = 'pending_merge' else: status_label = 'unresolved' # If user is not logged in and member of the organization, # do not return the permalink which contains private information i.e. org name. if user.is_authenticated() and user.get_orgs().filter(id=obj.organization.id).exists(): permalink = absolute_uri( reverse('sentry-group', args=[obj.organization.slug, obj.project.slug, obj.id]) ) else: permalink = None subscription_details = None if attrs['subscription'] is not disabled: is_subscribed, subscription = attrs['subscription'] if subscription is not None and subscription.is_active: subscription_details = { 'reason': SUBSCRIPTION_REASON_MAP.get( subscription.reason, 'unknown', ), } else: is_subscribed = False subscription_details = { 'disabled': True, } share_id = attrs['share_id'] return { 'id': six.text_type(obj.id), 'shareId': share_id, 'shortId': obj.qualified_short_id, 'count': six.text_type(attrs['times_seen']), 'userCount': attrs['user_count'], 'title': obj.title, 'culprit': obj.culprit, 'permalink': permalink, 'firstSeen': attrs['first_seen'], 'lastSeen': attrs['last_seen'], 'logger': obj.logger or None, 'level': LOG_LEVELS.get(obj.level, 'unknown'), 'status': status_label, 'statusDetails': status_details, 'isPublic': share_id is not None, 'project': { 'id': six.text_type(obj.project.id), 'name': obj.project.name, 'slug': obj.project.slug, }, 'type': obj.get_event_type(), 'metadata': obj.get_event_metadata(), 'numComments': obj.num_comments, 'assignedTo': serialize(attrs['assigned_to'], user, ActorSerializer()), 'isBookmarked': attrs['is_bookmarked'], 'isSubscribed': is_subscribed, 'subscriptionDetails': subscription_details, 'hasSeen': attrs['has_seen'], 'annotations': attrs['annotations'], }
def normalize(self): data = self.data errors = data.get('errors', []) # Before validating with a schema, attempt to cast values to their desired types # so that the schema doesn't have to take every type variation into account. text = six.text_type fp_types = six.string_types + six.integer_types + (float, ) def to_values(v): return {'values': v} if v and isinstance(v, (tuple, list)) else v casts = { 'environment': lambda v: text(v) if v is not None else v, 'fingerprint': lambda v: list(map(text, v)) if isinstance(v, list) and all(isinstance(f, fp_types) for f in v) else v, 'release': lambda v: text(v) if v is not None else v, 'dist': lambda v: text(v).strip() if v is not None else v, 'time_spent': lambda v: int(v) if v is not None else v, 'tags': lambda v: [(text(v_k).replace(' ', '-').strip(), text(v_v).strip()) for (v_k, v_v) in dict(v).items()], 'timestamp': lambda v: process_timestamp(v), 'platform': lambda v: v if v in VALID_PLATFORMS else 'other', # These can be sent as lists and need to be converted to {'values': [...]} 'exception': to_values, 'sentry.interfaces.Exception': to_values, 'breadcrumbs': to_values, 'sentry.interfaces.Breadcrumbs': to_values, 'threads': to_values, 'sentry.interfaces.Threads': to_values, } for c in casts: if c in data: try: data[c] = casts[c](data[c]) except Exception as e: errors.append({ 'type': EventError.INVALID_DATA, 'name': c, 'value': data[c] }) del data[c] # raw 'message' is coerced to the Message interface, as its used for pure index of # searchable strings. If both a raw 'message' and a Message interface exist, try and # add the former as the 'formatted' attribute of the latter. # See GH-3248 msg_str = data.pop('message', None) if msg_str: msg_if = data.setdefault('sentry.interfaces.Message', {'message': msg_str}) if msg_if.get('message') != msg_str: msg_if.setdefault('formatted', msg_str) # Validate main event body and tags against schema is_valid, event_errors = validate_and_default_interface(data, 'event') errors.extend(event_errors) if 'tags' in data: is_valid, tag_errors = validate_and_default_interface(data['tags'], 'tags', name='tags') errors.extend(tag_errors) # Validate interfaces for k in list(iter(data)): if k in CLIENT_RESERVED_ATTRS: continue value = data.pop(k) if not value: self.logger.debug('Ignored empty interface value: %s', k) continue try: interface = get_interface(k) except ValueError: self.logger.debug('Ignored unknown attribute: %s', k) errors.append({ 'type': EventError.INVALID_ATTRIBUTE, 'name': k }) continue try: inst = interface.to_python(value) data[inst.get_path()] = inst.to_json() except Exception as e: log = self.logger.debug if isinstance( e, InterfaceValidationError) else self.logger.error log('Discarded invalid value for interface: %s (%r)', k, value, exc_info=True) errors.append({ 'type': EventError.INVALID_DATA, 'name': k, 'value': value }) level = data.get('level') or DEFAULT_LOG_LEVEL if isinstance(level, int) or (isinstance(level, six.string_types) and level.isdigit()): level = LOG_LEVELS.get(int(level), DEFAULT_LOG_LEVEL) data['level'] = LOG_LEVELS_MAP.get(level, LOG_LEVELS_MAP[DEFAULT_LOG_LEVEL]) if data.get('dist') and not data.get('release'): data['dist'] = None timestamp = data.get('timestamp') if not timestamp: timestamp = timezone.now() # TODO (alex) can this all be replaced by utcnow? # it looks like the only time that this would even be hit is when timestamp # is not defined, as the earlier process_timestamp already converts existing # timestamps to floats. if isinstance(timestamp, datetime): # We must convert date to local time so Django doesn't mess it up # based on TIME_ZONE if settings.TIME_ZONE: if not timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=timezone.utc) elif timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=None) timestamp = float(timestamp.strftime('%s')) data['timestamp'] = timestamp data['received'] = float(timezone.now().strftime('%s')) data.setdefault('culprit', None) data.setdefault('transaction', None) data.setdefault('server_name', None) data.setdefault('site', None) data.setdefault('checksum', None) data.setdefault('fingerprint', None) data.setdefault('platform', None) data.setdefault('dist', None) data.setdefault('environment', None) data.setdefault('extra', {}) data.setdefault('tags', []) # Fix case where legacy apps pass 'environment' as a tag # instead of a top level key. # TODO (alex) save() just reinserts the environment into the tags if not data.get('environment'): tagsdict = dict(data['tags']) if 'environment' in tagsdict: data['environment'] = tagsdict['environment'] del tagsdict['environment'] data['tags'] = tagsdict.items() # the SDKs currently do not describe event types, and we must infer # them from available attributes data['type'] = eventtypes.infer(data).key data['version'] = self.version exception = data.get('sentry.interfaces.Exception') stacktrace = data.get('sentry.interfaces.Stacktrace') if exception and len(exception['values']) == 1 and stacktrace: exception['values'][0]['stacktrace'] = stacktrace del data['sentry.interfaces.Stacktrace'] if 'sentry.interfaces.Http' in data: try: ip_address = validate_ip( data['sentry.interfaces.Http'].get('env', {}).get('REMOTE_ADDR'), required=False, ) if ip_address: data.setdefault('sentry.interfaces.User', {}).setdefault('ip_address', ip_address) except ValueError: pass # Trim values logger = data.get('logger', DEFAULT_LOGGER_NAME) data['logger'] = trim(logger.strip(), 64) trim_dict(data['extra'], max_size=settings.SENTRY_MAX_EXTRA_VARIABLE_SIZE) if data['culprit']: data['culprit'] = trim(data['culprit'], MAX_CULPRIT_LENGTH) if data['transaction']: data['transaction'] = trim(data['transaction'], MAX_CULPRIT_LENGTH) data['errors'] = errors return data
def serialize(self, obj, attrs, user): status = obj.status status_details = {} if attrs['snooze']: if attrs['snooze'] < timezone.now( ) and status == GroupStatus.MUTED: status = GroupStatus.UNRESOLVED else: status_details['snoozeUntil'] = attrs['snooze'] elif status == GroupStatus.UNRESOLVED and obj.is_over_resolve_age(): status = GroupStatus.RESOLVED status_details['autoResolved'] = True if status == GroupStatus.RESOLVED: status_label = 'resolved' if attrs['pending_resolution']: status_details['inNextRelease'] = True elif status == GroupStatus.MUTED: status_label = 'muted' elif status in [ GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS ]: status_label = 'pending_deletion' elif status == GroupStatus.PENDING_MERGE: status_label = 'pending_merge' else: status_label = 'unresolved' permalink = absolute_uri( reverse('sentry-group', args=[obj.organization.slug, obj.project.slug, obj.id])) event_type = obj.data.get('type', 'default') metadata = obj.data.get('metadata') or { 'title': obj.message_short, } # TODO(dcramer): remove in 8.6+ if event_type == 'error': if 'value' in metadata: metadata['value'] = unicode(metadata['value']) if 'type' in metadata: metadata['type'] = unicode(metadata['type']) return { 'id': str(obj.id), 'shareId': obj.get_share_id(), 'shortId': obj.qualified_short_id, 'count': str(obj.times_seen), 'userCount': attrs['user_count'], 'title': obj.message_short, 'culprit': obj.culprit, 'permalink': permalink, 'firstSeen': obj.first_seen, 'lastSeen': obj.last_seen, 'logger': obj.logger or None, 'level': LOG_LEVELS.get(obj.level, 'unknown'), 'status': status_label, 'statusDetails': status_details, 'isPublic': obj.is_public, 'project': { 'name': obj.project.name, 'slug': obj.project.slug, }, 'type': event_type, 'metadata': metadata, 'numComments': obj.num_comments, 'assignedTo': attrs['assigned_to'], 'isBookmarked': attrs['is_bookmarked'], 'hasSeen': attrs['has_seen'], 'annotations': attrs['annotations'], }
def serialize(self, obj, attrs, user): status = obj.status status_details = {} if attrs['ignore_duration']: if attrs['ignore_duration'] < timezone.now() and status == GroupStatus.IGNORED: status = GroupStatus.UNRESOLVED else: status_details['ignoreUntil'] = attrs['ignore_duration'] elif status == GroupStatus.UNRESOLVED and obj.is_over_resolve_age(): status = GroupStatus.RESOLVED status_details['autoResolved'] = True if status == GroupStatus.RESOLVED: status_label = 'resolved' if attrs['pending_resolution']: status_details['inNextRelease'] = True elif status == GroupStatus.IGNORED: status_label = 'ignored' elif status in [GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS]: status_label = 'pending_deletion' elif status == GroupStatus.PENDING_MERGE: status_label = 'pending_merge' else: status_label = 'unresolved' # If user is not logged in and member of the organization, # do not return the permalink which contains private information i.e. org name. if user.is_authenticated() and user.get_orgs().filter(id=obj.organization.id).exists(): permalink = absolute_uri(reverse('sentry-group', args=[ obj.organization.slug, obj.project.slug, obj.id])) else: permalink = None is_subscribed, subscription = attrs['subscription'] return { 'id': six.text_type(obj.id), 'shareId': obj.get_share_id(), 'shortId': obj.qualified_short_id, 'count': six.text_type(obj.times_seen), 'userCount': attrs['user_count'], 'title': obj.title, 'culprit': obj.culprit, 'permalink': permalink, 'firstSeen': obj.first_seen, 'lastSeen': obj.last_seen, 'logger': obj.logger or None, 'level': LOG_LEVELS.get(obj.level, 'unknown'), 'status': status_label, 'statusDetails': status_details, 'isPublic': obj.is_public, 'project': { 'name': obj.project.name, 'slug': obj.project.slug, }, 'type': obj.get_event_type(), 'metadata': obj.get_event_metadata(), 'numComments': obj.num_comments, 'assignedTo': attrs['assigned_to'], 'isBookmarked': attrs['is_bookmarked'], 'isSubscribed': is_subscribed, 'subscriptionDetails': { 'reason': SUBSCRIPTION_REASON_MAP.get( subscription.reason, 'unknown', ), } if is_subscribed and subscription is not None else None, 'hasSeen': attrs['has_seen'], 'annotations': attrs['annotations'], }