def get(self, request: HttpRequest, organization: Organization, trace_id: str) -> HttpResponse: if not self.has_feature(organization, request): return Response(status=404) try: # The trace view isn't useful without global views, so skipping the check here params = self.get_snuba_params(request, organization, check_global_views=False) except NoProjects: return Response(status=404) detailed: bool = request.GET.get("detailed", "0") == "1" event_id: Optional[str] = request.GET.get("event_id") # Only need to validate event_id as trace_id is validated in the URL if event_id and not is_event_id(event_id): return Response({"detail": INVALID_EVENT_DETAILS.format("Event")}, status=400) with self.handle_query_errors(): transactions, errors = query_trace_data(trace_id, params) if len(transactions) == 0: return Response(status=404) len_transactions = len(transactions) sentry_sdk.set_tag("trace_view.transactions", len_transactions) sentry_sdk.set_tag( "trace_view.transactions.grouped", "<10" if len_transactions < 10 else "<100" if len_transactions < 100 else ">100", ) warning_extra: Dict[str, str] = { "trace": trace_id, "organization": organization } # Look for the roots roots: List[SnubaTransaction] = [] for item in transactions: if is_root(item): roots.append(item) else: break if len(roots) > 1: sentry_sdk.set_tag("discover.trace-view.warning", "root.extra-found") logger.warning( "discover.trace-view.root.extra-found", { "extra_roots": len(roots), **warning_extra }, ) return Response( self.serialize(transactions, errors, roots, warning_extra, event_id, detailed))
def is_event_id(self): """Return whether the current value is a valid event id Empty strings are valid, so that it can be used for has:id queries """ if not isinstance(self.raw_value, str): return False return is_event_id(self.raw_value) or self.raw_value == ""
def get(self, request, organization, trace_id): if not self.has_feature(organization, request): return Response(status=404) try: # The trace view isn't useful without global views, so skipping the check here params = self.get_snuba_params(request, organization, check_global_views=False) except NoProjects: return Response(status=404) detailed = request.GET.get("detailed", "0") == "1" event_id = request.GET.get("event_id") # Only need to validate event_id as trace_id is validated in the URL if event_id and not is_event_id(event_id): return Response({"detail": INVALID_EVENT_DETAILS.format("Event")}, status=400) with self.handle_query_errors(): transactions, errors = query_trace_data(trace_id, params) if len(transactions) == 0: return Response(status=404) len_transactions = len(transactions) sentry_sdk.set_tag("trace_view.transactions", len_transactions) sentry_sdk.set_tag( "trace_view.transactions.grouped", "<10" if len_transactions < 10 else "<100" if len_transactions < 100 else ">100", ) warning_extra = {"trace": trace_id, "organization": organization} root = transactions[0] if is_root(transactions[0]) else None # Look for extra roots extra_roots = 0 for item in transactions[1:]: if is_root(item): extra_roots += 1 else: break if extra_roots > 0: sentry_sdk.set_tag("discover.trace-view.warning", "root.extra-found") logger.warning( "discover.trace-view.root.extra-found", { "extra_roots": extra_roots, **warning_extra }, ) return Response( self.serialize(transactions, errors, root, warning_extra, event_id, detailed))
def _get_events_legacy( self, request, group, environments, query, tags, start, end, ): events = Event.objects.filter(group_id=group.id) if query: q = Q(message__icontains=query) if is_event_id(query): q |= Q(event_id__exact=query) events = events.filter(q) if tags: event_filter = tagstore.get_group_event_filter( group.project_id, group.id, [env.id for env in environments], tags, start, end, ) if not event_filter: return Response([]) events = events.filter(**event_filter) # Filter start/end here in case we didn't filter by tags at all if start: events = events.filter(datetime__gte=start) if end: events = events.filter(datetime__lte=end) # filter out events which are beyond the retention period retention = quotas.get_event_retention(organization=group.project.organization) if retention: events = events.filter( datetime__gte=timezone.now() - timedelta(days=retention) ) return self.paginate( request=request, queryset=events, order_by='-datetime', on_results=lambda x: serialize(x, request.user), paginator_cls=DateTimePaginator, )
def _get_events_legacy( self, request, group, environments, query, tags, start, end, ): events = Event.objects.filter(group_id=group.id) if query: q = Q(message__icontains=query) if is_event_id(query): q |= Q(event_id__exact=query) events = events.filter(q) if tags: event_filter = tagstore.get_group_event_filter( group.project_id, group.id, [env.id for env in environments], tags, start, end, ) if not event_filter: return Response([]) events = events.filter(**event_filter) # Filter start/end here in case we didn't filter by tags at all if start: events = events.filter(datetime__gte=start) if end: events = events.filter(datetime__lte=end) # filter out events which are beyond the retention period retention = quotas.get_event_retention( organization=group.project.organization) if retention: events = events.filter(datetime__gte=timezone.now() - timedelta(days=retention)) return self.paginate( request=request, queryset=events, order_by='-datetime', on_results=lambda x: serialize(x, request.user), paginator_cls=DateTimePaginator, )
def get(self, request, project, event_id): """ Retrieve an Event for a Project ``````````````````````````````` Return details on an individual event. :pparam string organization_slug: the slug of the organization the event belongs to. :pparam string project_slug: the slug of the project the event belongs to. :pparam string event_id: the id of the event to retrieve (either the numeric primary-key or the hexadecimal id as reported by the raven client) :auth: required """ event = None # If its a numeric string, check if it's an event Primary Key first if event_id.isdigit(): try: event = Event.objects.get( id=event_id, project_id=project.id, ) except Event.DoesNotExist: pass # If it was not found as a PK, and its a possible event_id, search by that instead. if event is None and is_event_id(event_id): try: event = Event.objects.get( event_id=event_id, project_id=project.id, ) except Event.DoesNotExist: pass if event is None: return Response({'detail': 'Event not found'}, status=404) Event.objects.bind_nodes([event], 'data') data = serialize(event, request.user, DetailedEventSerializer()) next_event = event.next_event prev_event = event.prev_event # TODO this is inconsistent with the event_details API which uses the # `id` instead of the `event_id` data['nextEventID'] = next_event and six.text_type(next_event.event_id) data['previousEventID'] = prev_event and six.text_type( prev_event.event_id) return Response(data)
def get(self, request, organization): # Check for a direct hit on event ID query = request.GET.get('query', '').strip() if is_event_id(query): try: snuba_args = get_snuba_query_args( query=u'id:{}'.format(query), params=self.get_filter_params(request, organization)) results = raw_query( selected_columns=SnubaEvent.selected_columns, referrer='api.organization-events', **snuba_args )['data'] if len(results) == 1: response = Response( serialize([SnubaEvent(row) for row in results], request.user) ) response['X-Sentry-Direct-Hit'] = '1' return response except (OrganizationEventsError, NoProjects): pass try: snuba_args = self.get_snuba_query_args(request, organization) except OrganizationEventsError as exc: return Response({'detail': exc.message}, status=400) except NoProjects: # return empty result if org doesn't have projects # or user doesn't have access to projects in org data_fn = lambda *args, **kwargs: [] else: data_fn = partial( # extract 'data' from raw_query result lambda *args, **kwargs: raw_query(*args, **kwargs)['data'], selected_columns=SnubaEvent.selected_columns, orderby='-timestamp', referrer='api.organization-events', **snuba_args ) return self.paginate( request=request, on_results=lambda results: serialize( [SnubaEvent(row) for row in results], request.user), paginator=GenericOffsetPaginator(data_fn=data_fn) )
def _get_events_snuba(self, request, group, environments, query, tags, start, end): conditions = [] if query: msg_substr = [ 'positionCaseInsensitive', ['message', "'%s'" % (query, )] ] message_condition = [msg_substr, '!=', 0] if is_event_id(query): or_condition = [message_condition, ['event_id', '=', query]] conditions.append(or_condition) else: conditions.append(message_condition) if tags: for tag_name, tag_val in tags.items(): operator = 'IN' if isinstance(tag_val, list) else '=' conditions.append( [u'tags[{}]'.format(tag_name), operator, tag_val]) default_end = timezone.now() default_start = default_end - timedelta(days=90) data_fn = partial( # extract 'data' from raw_query result lambda *args, **kwargs: raw_query(*args, **kwargs)['data'], start=max(start, default_start) if start else default_start, end=min(end, default_end) if end else default_end, conditions=conditions, filter_keys={ 'project_id': [group.project_id], 'issue': [group.id] }, selected_columns=SnubaEvent.selected_columns, orderby='-timestamp', referrer='api.group-events', ) serializer = SimpleEventSerializer() return self.paginate( request=request, on_results=lambda results: serialize( [SnubaEvent(row) for row in results], request.user, serializer), paginator=GenericOffsetPaginator(data_fn=data_fn))
def get(self, request: Request, organization, event_id) -> Response: """ Resolve an Event ID `````````````````` This resolves an event ID to the project slug and internal issue ID and internal event ID. :pparam string organization_slug: the slug of the organization the event ID should be looked up in. :param string event_id: the event ID to look up. validated by a regex in the URL. :auth: required """ if event_id and not is_event_id(event_id): return Response({"detail": INVALID_ID_DETAILS.format("Event ID")}, status=400) project_slugs_by_id = dict( Project.objects.filter(organization=organization).values_list( "id", "slug")) try: snuba_filter = eventstore.Filter( conditions=[["event.type", "!=", "transaction"]], project_ids=list(project_slugs_by_id.keys()), event_ids=[event_id], ) event = eventstore.get_events(filter=snuba_filter, limit=1)[0] except IndexError: raise ResourceDoesNotExist() else: return Response({ "organizationSlug": organization.slug, "projectSlug": project_slugs_by_id[event.project_id], "groupId": str(event.group_id), "eventId": str(event.event_id), "event": serialize(event, request.user), })
def _get_events_snuba(self, request, group, environment, query, tags): conditions = [] if query: msg_substr = [ 'positionCaseInsensitive', ['message', "'%s'" % (query, )] ] message_condition = [msg_substr, '!=', 0] if is_event_id(query): or_condition = [message_condition, ['event_id', '=', query]] conditions.append(or_condition) else: conditions.append(message_condition) if tags: conditions.extend([[u'tags[{}]'.format(k), '=', v] for (k, v) in tags.items()]) now = timezone.now() data_fn = partial( # extract 'data' from raw_query result lambda *args, **kwargs: raw_query(*args, **kwargs)['data'], start=now - timedelta(days=90), end=now, conditions=conditions, filter_keys={ 'project_id': [group.project_id], 'issue': [group.id] }, selected_columns=SnubaEvent.selected_columns + ['tags.key', 'tags.value'], orderby='-timestamp', referrer='api.group-events', ) return self.paginate(request=request, on_results=lambda results: serialize( [SnubaEvent(row) for row in results], request.user), paginator=GenericOffsetPaginator(data_fn=data_fn))
def from_event_id(self, id_or_event_id, project_id): """ Get an Event by either its id primary key or its hex event_id. Will automatically try to infer the type of id, and grab the correct event. If the provided id is a hex event_id, the project_id must also be provided to disambiguate it. Returns None if the event cannot be found under either scheme. """ # TODO (alexh) instrument this to report any times we are still trying # to get events by id. # TODO (alexh) deprecate lookup by id so we can move to snuba. event = None if id_or_event_id.isdigit() and int(id_or_event_id) <= BoundedBigIntegerField.MAX_VALUE: # If its a numeric string, check if it's an event Primary Key first try: if project_id is None: event = self.get( id=id_or_event_id, ) else: event = self.get( id=id_or_event_id, project_id=project_id, ) except ObjectDoesNotExist: pass # If it was not found as a PK, and its a possible event_id, search by that instead. if project_id is not None and event is None and is_event_id(id_or_event_id): try: event = self.get( event_id=id_or_event_id, project_id=project_id, ) except ObjectDoesNotExist: pass return event
def from_event_id(self, id_or_event_id, project_id): """ Get an Event by either its id primary key or its hex event_id. Will automatically try to infer the type of id, and grab the correct event. If the provided id is a hex event_id, the project_id must also be provided to disambiguate it. Returns None if the event cannot be found under either scheme. """ # TODO (alexh) instrument this to report any times we are still trying # to get events by id. # TODO (alexh) deprecate lookup by id so we can move to snuba. event = None if id_or_event_id.isdigit( ) and int(id_or_event_id) <= BoundedBigIntegerField.MAX_VALUE: # If its a numeric string, check if it's an event Primary Key first try: if project_id is None: event = self.get(id=id_or_event_id, ) else: event = self.get( id=id_or_event_id, project_id=project_id, ) except ObjectDoesNotExist: pass # If it was not found as a PK, and its a possible event_id, search by that instead. if project_id is not None and event is None and is_event_id( id_or_event_id): try: event = self.get( event_id=id_or_event_id, project_id=project_id, ) except ObjectDoesNotExist: pass return event
def get_direct_hit_response(request, query, snuba_params, referrer): """ Checks whether a query is a direct hit for an event, and if so returns a response. Otherwise returns None """ if is_event_id(query): snuba_args = get_snuba_query_args( query=u'id:{}'.format(query), params=snuba_params) results = raw_query( selected_columns=SnubaEvent.selected_columns, referrer=referrer, **snuba_args )['data'] if len(results) == 1: response = Response( serialize([SnubaEvent(row) for row in results], request.user) ) response['X-Sentry-Direct-Hit'] = '1' return response
def from_event_id(self, id_or_event_id, project_id): """ Get a SnubaEvent by either its id primary key or its hex event_id. Returns None if the event cannot be found under either scheme. Log any attempt to fetch a SnubaEvent by primary key and eventually remove. """ from sentry.models import SnubaEvent, Event if not is_event_id(id_or_event_id): logger.warning('Attempt to fetch SnubaEvent by primary key', exc_info=True, extra={ 'stack': True }) event = Event.objects.from_event_id(id_or_event_id, project_id) if not event: return None id_or_event_id = event.event_id return SnubaEvent.get_event(project_id, id_or_event_id)
def from_event_id(self, id_or_event_id, project_id): """ Get a SnubaEvent by either its id primary key or its hex event_id. Returns None if the event cannot be found under either scheme. Log any attempt to fetch a SnubaEvent by primary key and eventually remove. """ from sentry.models import SnubaEvent, Event if not is_event_id(id_or_event_id): logger.warning('Attempt to fetch SnubaEvent by primary key', exc_info=True, extra={'stack': True}) event = Event.objects.from_event_id(id_or_event_id, project_id) if not event: return None id_or_event_id = event.event_id return SnubaEvent.get_event(project_id, id_or_event_id)
def test_is_event_id(): assert is_event_id('b802415f7531431caa27f5c0bf923302') assert is_event_id('B802415F7531431CAA27F5C0BF923302') assert is_event_id('b802415f-7531-431c-aa27-f5c0bf923302') assert is_event_id('B802415F-7531-431C-AA27-F5C0BF923302') assert is_event_id(b'b802415f7531431caa27f5c0bf923302') assert not is_event_id('') assert not is_event_id('b802415f7531431caa') assert not is_event_id('XXXX415f7531431caa27f5c0bf92XXXX') assert not is_event_id(4711) assert not is_event_id(False) assert not is_event_id(None)
def validate_data(self, project, data): # TODO(dcramer): move project out of the data packet data['project'] = project.id data['errors'] = [] if data.get('culprit'): if not isinstance(data['culprit'], six.string_types): raise APIForbidden('Invalid value for culprit') if not data.get('event_id'): data['event_id'] = uuid.uuid4().hex elif not isinstance(data['event_id'], six.string_types): raise APIForbidden('Invalid value for event_id') if len(data['event_id']) > 32: self.log.debug( 'Discarded value for event_id due to length (%d chars)', len(data['event_id'])) data['errors'].append({ 'type': EventError.VALUE_TOO_LONG, 'name': 'event_id', 'value': data['event_id'], }) data['event_id'] = uuid.uuid4().hex elif not is_event_id(data['event_id']): self.log.debug('Discarded invalid value for event_id: %r', data['event_id'], exc_info=True) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'event_id', 'value': data['event_id'], }) data['event_id'] = uuid.uuid4().hex if 'timestamp' in data: try: self._process_data_timestamp(data) except InvalidTimestamp as e: self.log.debug('Discarded invalid value for timestamp: %r', data['timestamp'], exc_info=True) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'timestamp', 'value': data['timestamp'], }) del data['timestamp'] if 'fingerprint' in data: try: self._process_fingerprint(data) except InvalidFingerprint as e: self.log.debug('Discarded invalid value for fingerprint: %r', data['fingerprint'], exc_info=True) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'fingerprint', 'value': data['fingerprint'], }) del data['fingerprint'] if 'platform' not in data or data['platform'] not in VALID_PLATFORMS: data['platform'] = 'other' if data.get('modules') and type(data['modules']) != dict: self.log.debug('Discarded invalid type for modules: %s', type(data['modules'])) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'modules', 'value': data['modules'], }) del data['modules'] if data.get('extra') is not None and type(data['extra']) != dict: self.log.debug('Discarded invalid type for extra: %s', type(data['extra'])) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'extra', 'value': data['extra'], }) del data['extra'] if data.get('tags') is not None: if type(data['tags']) == dict: data['tags'] = list(data['tags'].items()) elif not isinstance(data['tags'], (list, tuple)): self.log.debug('Discarded invalid type for tags: %s', type(data['tags'])) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': data['tags'], }) del data['tags'] if data.get('tags'): # remove any values which are over 32 characters tags = [] for pair in data['tags']: try: k, v = pair except ValueError: self.log.debug('Discarded invalid tag value: %r', pair) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue if not isinstance(k, six.string_types): try: k = six.text_type(k) except Exception: self.log.debug('Discarded invalid tag key: %r', type(k)) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue if not isinstance(v, six.string_types): try: v = six.text_type(v) except Exception: self.log.debug('Discarded invalid tag value: %s=%r', k, type(v)) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue if len(k) > MAX_TAG_KEY_LENGTH or len( v) > MAX_TAG_VALUE_LENGTH: self.log.debug('Discarded invalid tag: %s=%s', k, v) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue # support tags with spaces by converting them k = k.replace(' ', '-') if tagstore.is_reserved_key(k): self.log.debug('Discarding reserved tag key: %s', k) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue if not tagstore.is_valid_key(k): self.log.debug('Discarded invalid tag key: %s', k) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue if not tagstore.is_valid_value(v): self.log.debug('Discard invalid tag value: %s', v) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue tags.append((k, v)) data['tags'] = tags for k in list(iter(data)): if k in CLIENT_RESERVED_ATTRS: continue value = data.pop(k) if not value: self.log.debug('Ignored empty interface value: %s', k) continue try: interface = get_interface(k) except ValueError: self.log.debug('Ignored unknown attribute: %s', k) data['errors'].append({ 'type': EventError.INVALID_ATTRIBUTE, 'name': k, }) continue if type(value) != dict: # HACK(dcramer): the exception/breadcrumbs interface supports a # list as the value. We should change this in a new protocol # version. if type(value) in (list, tuple): value = {'values': value} else: self.log.debug('Invalid parameter for value: %s (%r)', k, type(value)) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': k, 'value': value, }) continue try: inst = interface.to_python(value) data[inst.get_path()] = inst.to_json() except Exception as e: if isinstance(e, InterfaceValidationError): log = self.log.debug else: log = self.log.error log('Discarded invalid value for interface: %s (%r)', k, value, exc_info=True) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': k, 'value': value, }) # TODO(dcramer): ideally this logic would happen in normalize, but today # we don't do "validation" there (create errors) # message is coerced to an interface, as its used for pure # index of searchable strings # See GH-3248 message = data.pop('message', None) if message: if 'sentry.interfaces.Message' not in data: value = { 'message': message, } elif not data['sentry.interfaces.Message'].get('formatted'): value = data['sentry.interfaces.Message'] value['formatted'] = message else: value = None if value is not None: k = 'sentry.interfaces.Message' interface = get_interface(k) try: inst = interface.to_python(value) data[inst.get_path()] = inst.to_json() except Exception as e: if isinstance(e, InterfaceValidationError): log = self.log.debug else: log = self.log.error log('Discarded invalid value for interface: %s (%r)', k, value, exc_info=True) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': k, 'value': value, }) level = data.get('level') or DEFAULT_LOG_LEVEL if isinstance(level, six.string_types) and not level.isdigit(): # assume it's something like 'warning' try: data['level'] = LOG_LEVELS_MAP[level] except KeyError as e: self.log.debug('Discarded invalid logger value: %s', level) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'level', 'value': level, }) data['level'] = LOG_LEVELS_MAP.get(DEFAULT_LOG_LEVEL, DEFAULT_LOG_LEVEL) if data.get('release'): data['release'] = six.text_type(data['release']) if len(data['release']) > 64: data['errors'].append({ 'type': EventError.VALUE_TOO_LONG, 'name': 'release', 'value': data['release'], }) del data['release'] if data.get('dist'): data['dist'] = six.text_type(data['dist']).strip() if not data.get('release'): data['dist'] = None elif len(data['dist']) > 64: data['errors'].append({ 'type': EventError.VALUE_TOO_LONG, 'name': 'dist', 'value': data['dist'], }) del data['dist'] elif _dist_re.match(data['dist']) is None: data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'dist', 'value': data['dist'], }) del data['dist'] if data.get('environment'): data['environment'] = six.text_type(data['environment']) if len(data['environment']) > 64: data['errors'].append({ 'type': EventError.VALUE_TOO_LONG, 'name': 'environment', 'value': data['environment'], }) del data['environment'] if data.get('time_spent'): try: data['time_spent'] = int(data['time_spent']) except (ValueError, TypeError): data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'time_spent', 'value': data['time_spent'], }) del data['time_spent'] else: if data['time_spent'] > BoundedIntegerField.MAX_VALUE: data['errors'].append({ 'type': EventError.VALUE_TOO_LONG, 'name': 'time_spent', 'value': data['time_spent'], }) del data['time_spent'] return data
def dispatch(self, request): try: event_id = request.GET['eventId'] except KeyError: return self._smart_response( request, {'eventId': 'Missing or invalid parameter.'}, status=400) if event_id and not is_event_id(event_id): return self._smart_response( request, {'eventId': 'Missing or invalid parameter.'}, status=400) # XXX(dcramer): enforce case insensitivty by coercing this to a lowercase string event_id = event_id.lower() key = self._get_project_key(request) if not key: return self._smart_response( request, {'dsn': 'Missing or invalid parameter.'}, status=404) origin = self._get_origin(request) if not is_valid_origin(origin, key.project): return self._smart_response(request, status=403) if request.method == 'OPTIONS': return self._smart_response(request) # customization options options = DEFAULT_OPTIONS.copy() for name in six.iterkeys(options): if name in request.GET: options[name] = six.text_type(request.GET[name]) # TODO(dcramer): since we cant use a csrf cookie we should at the very # least sign the request / add some kind of nonce initial = { 'name': request.GET.get('name'), 'email': request.GET.get('email'), } form = UserReportForm( request.POST if request.method == 'POST' else None, initial=initial) if form.is_valid(): # TODO(dcramer): move this to post to the internal API report = form.save(commit=False) report.project = key.project report.event_id = event_id try: event = Event.objects.filter(project_id=report.project.id, event_id=report.event_id)[0] except IndexError: try: report.group = Group.objects.from_event_id( report.project, report.event_id) except Group.DoesNotExist: pass else: Event.objects.bind_nodes([event]) report.environment = event.get_environment() report.group = event.group try: with transaction.atomic(): report.save() except IntegrityError: # There was a duplicate, so just overwrite the existing # row with the new one. The only way this ever happens is # if someone is messing around with the API, or doing # something wrong with the SDK, but this behavior is # more reasonable than just hard erroring and is more # expected. UserReport.objects.filter( project=report.project, event_id=report.event_id, ).update( name=report.name, email=report.email, comments=report.comments, date_added=timezone.now(), ) else: if report.group: report.notify() user_feedback_received.send(project=report.project, group=report.group, sender=self) return self._smart_response(request) elif request.method == 'POST': return self._smart_response(request, { "errors": dict(form.errors), }, status=400) show_branding = ProjectOption.objects.get_value( project=key.project, key='feedback:branding', default='1') == '1' template = render_to_string( 'sentry/error-page-embed.html', { 'form': form, 'show_branding': show_branding, 'title': options['title'], 'subtitle': options['subtitle'], 'subtitle2': options['subtitle2'], 'name_label': options['labelName'], 'email_label': options['labelEmail'], 'comments_label': options['labelComments'], 'submit_label': options['labelSubmit'], 'close_label': options['labelClose'], }) context = { 'endpoint': mark_safe('*/' + json.dumps(request.build_absolute_uri()) + ';/*'), 'template': mark_safe('*/' + json.dumps(template) + ';/*'), 'strings': json.dumps_htmlsafe({ 'generic_error': six.text_type(options['errorGeneric']), 'form_error': six.text_type(options['errorFormEntry']), 'sent_message': six.text_type(options['successMessage']), }), } return render_to_response('sentry/error-page-embed.js', context, request, content_type='text/javascript')
def get(self, request, project): """ List a Project's Issues ``````````````````````` Return a list of issues (groups) bound to a project. All parameters are supplied as query string parameters. A default query of ``is:unresolved`` is applied. To return results with other statuses send an new query value (i.e. ``?query=`` for all results). The ``statsPeriod`` parameter can be used to select the timeline stats which should be present. Possible values are: '' (disable), '24h', '14d' :qparam string statsPeriod: an optional stat period (can be one of ``"24h"``, ``"14d"``, and ``""``). :qparam bool shortIdLookup: if this is set to true then short IDs are looked up by this function as well. This can cause the return value of the function to return an event issue of a different project which is why this is an opt-in. Set to `1` to enable. :qparam querystring query: an optional Sentry structured search query. If not provided an implied ``"is:unresolved"`` is assumed.) :pparam string organization_slug: the slug of the organization the issues belong to. :pparam string project_slug: the slug of the project the issues belong to. :auth: required """ stats_period = request.GET.get('statsPeriod') if stats_period not in (None, '', '24h', '14d'): return Response({"detail": ERR_INVALID_STATS_PERIOD}, status=400) elif stats_period is None: # default stats_period = '24h' elif stats_period == '': # disable stats stats_period = None serializer = functools.partial( StreamGroupSerializer, environment_func=self._get_environment_func(request, project.organization_id), stats_period=stats_period, ) query = request.GET.get('query', '').strip() if query: matching_group = None matching_event = None if is_event_id(query): # check to see if we've got an event ID try: matching_group = Group.objects.from_event_id(project, query) except Group.DoesNotExist: pass else: matching_event = Event.objects.from_event_id(query, project.id) if matching_event is not None: Event.objects.bind_nodes([matching_event], 'data') elif matching_group is None: matching_group = get_by_short_id( project.organization_id, request.GET.get('shortIdLookup'), query, ) if matching_group is not None and matching_group.project_id != project.id: matching_group = None if matching_group is not None: matching_event_environment = None try: matching_event_environment = matching_event.get_environment().name if matching_event else None except Environment.DoesNotExist: pass response = Response( serialize( [matching_group], request.user, serializer( matching_event_id=getattr(matching_event, 'id', None), matching_event_environment=matching_event_environment, ) ) ) response['X-Sentry-Direct-Hit'] = '1' return response try: cursor_result, query_kwargs = self._search(request, project, {'count_hits': True}) except ValidationError as exc: return Response({'detail': six.text_type(exc)}, status=400) results = list(cursor_result) context = serialize(results, request.user, serializer()) # HACK: remove auto resolved entries if query_kwargs.get('status') == GroupStatus.UNRESOLVED: context = [r for r in context if r['status'] == 'unresolved'] response = Response(context) self.add_cursor_headers(request, response, cursor_result) if results and query not in DEFAULT_SAVED_SEARCH_QUERIES: advanced_search.send(project=project, sender=request.user) analytics.record('project_issue.searched', user_id=request.user.id, organization_id=project.organization_id, project_id=project.id, query=query) return response
def get(self, request, organization): """ List an Organization's Issues ````````````````````````````` Return a list of issues (groups) bound to an organization. All parameters are supplied as query string parameters. A default query of ``is:unresolved`` is applied. To return results with other statuses send an new query value (i.e. ``?query=`` for all results). The ``groupStatsPeriod`` parameter can be used to select the timeline stats which should be present. Possible values are: '' (disable), '24h', '14d' The ``statsPeriod`` parameter can be used to select a date window starting from now. Ex. ``14d``. The ``start`` and ``end`` parameters can be used to select an absolute date period to fetch issues from. :qparam string statsPeriod: an optional stat period (can be one of ``"24h"``, ``"14d"``, and ``""``). :qparam string groupStatsPeriod: an optional stat period (can be one of ``"24h"``, ``"14d"``, and ``""``). :qparam string start: Beginning date. You must also provide ``end``. :qparam string end: End date. You must also provide ``start``. :qparam bool shortIdLookup: if this is set to true then short IDs are looked up by this function as well. This can cause the return value of the function to return an event issue of a different project which is why this is an opt-in. Set to `1` to enable. :qparam querystring query: an optional Sentry structured search query. If not provided an implied ``"is:unresolved"`` is assumed.) :pparam string organization_slug: the slug of the organization the issues belong to. :auth: required """ stats_period = request.GET.get('groupStatsPeriod') if stats_period not in (None, '', '24h', '14d'): return Response({"detail": ERR_INVALID_STATS_PERIOD}, status=400) elif stats_period is None: # default stats_period = '24h' elif stats_period == '': # disable stats stats_period = None environments = self.get_environments(request, organization) serializer = functools.partial( StreamGroupSerializerSnuba, environment_ids=[env.id for env in environments], stats_period=stats_period, ) projects = self.get_projects(request, organization) project_ids = [p.id for p in projects] if not projects: return Response([]) if len(projects) > 1 and not features.has('organizations:global-views', organization, actor=request.user): return Response( { 'detail': 'You do not have the multi project stream feature enabled' }, status=400) # we ignore date range for both short id and event ids query = request.GET.get('query', '').strip() if query: # check to see if we've got an event ID if is_event_id(query): groups = list( Group.objects.filter_by_event_id(project_ids, query)) if len(groups) == 1: response = Response( serialize(groups, request.user, serializer(matching_event_id=query))) response['X-Sentry-Direct-Hit'] = '1' return response if groups: return Response( serialize(groups, request.user, serializer())) group = get_by_short_id(organization.id, request.GET.get('shortIdLookup'), query) if group is not None: # check all projects user has access to if request.access.has_project_access(group.project): response = Response( serialize([group], request.user, serializer())) response['X-Sentry-Direct-Hit'] = '1' return response try: start, end = get_date_range_from_params(request.GET) except InvalidParams as exc: return Response({'detail': exc.message}, status=400) try: cursor_result, query_kwargs = self._search(request, organization, projects, environments, { 'count_hits': True, 'date_to': end, 'date_from': start, }) except ValidationError as exc: return Response({'detail': six.text_type(exc)}, status=400) results = list(cursor_result) context = serialize(results, request.user, serializer()) # HACK: remove auto resolved entries if query_kwargs.get('status') == GroupStatus.UNRESOLVED: context = [r for r in context if r['status'] == 'unresolved'] response = Response(context) self.add_cursor_headers(request, response, cursor_result) # TODO(jess): add metrics that are similar to project endpoint here return response
def get(self, request, organization, trace_id): if not self.has_feature(organization, request): return Response(status=404) try: # The trace view isn't useful without global views, so skipping the check here params = self.get_snuba_params(request, organization, check_global_views=False) except NoProjects: return Response(status=404) detailed = request.GET.get("detailed", "0") == "1" event_id = request.GET.get("event_id") # Only need to validate event_id as trace_id is validated in the URL if event_id and not is_event_id(event_id): return Response({"detail": INVALID_EVENT_DETAILS.format("Event")}, status=400) # selected_columns is a set list, since we only want to include the minimum to render the trace selected_columns = [ "id", "timestamp", "transaction.duration", "transaction.op", "transaction", # project gets the slug, and project.id gets added automatically "project", "trace.span", "trace.parent_span", 'to_other(trace.parent_span, "", 0, 1) AS root', ] # but if we're getting the detailed view load some extra columns if detailed: # TODO(wmak): Move op and timestamp here once we pass detailed for trace summary selected_columns += [ "transaction.status", ] with self.handle_query_errors(): result = discover.query( selected_columns=selected_columns, # We want to guarantee at least getting the root, and hopefully events near it with timestamp # id is just for consistent results orderby=["-root", "-timestamp", "id"], params=params, query=f"event.type:transaction trace:{trace_id}", limit=MAX_TRACE_SIZE, referrer="api.trace-view.get-ids", ) if len(result["data"]) == 0: return Response(status=404) len_transactions = len(result["data"]) sentry_sdk.set_tag("trace_view.num_transactions", len_transactions) sentry_sdk.set_tag( "trace_view.num_transactions.grouped", "<10" if len_transactions < 10 else "<100" if len_transactions < 100 else ">100", ) warning_extra = {"trace": trace_id, "organization": organization} root = result["data"][0] if is_root(result["data"][0]) else None # Look for extra roots extra_roots = 0 for item in result["data"][1:]: if is_root(item): extra_roots += 1 else: break if extra_roots > 0: sentry_sdk.set_tag("discover.trace-view.warning", "root.extra-found") logger.warning( "discover.trace-view.root.extra-found", {"extra_roots": extra_roots, **warning_extra}, ) current_transaction = find_event(result["data"], lambda t: t["id"] == event_id) errors = self.get_errors(organization, trace_id, params, current_transaction, event_id) return Response( self.serialize(result["data"], errors, root, warning_extra, event_id, detailed) )
def validate_data(self, project, data): # TODO(dcramer): move project out of the data packet data['project'] = project.id data['errors'] = [] if data.get('culprit'): if not isinstance(data['culprit'], six.string_types): raise APIForbidden('Invalid value for culprit') if not data.get('event_id'): data['event_id'] = uuid.uuid4().hex elif not isinstance(data['event_id'], six.string_types): raise APIForbidden('Invalid value for event_id') if len(data['event_id']) > 32: self.log.debug( 'Discarded value for event_id due to length (%d chars)', len(data['event_id'])) data['errors'].append({ 'type': EventError.VALUE_TOO_LONG, 'name': 'event_id', 'value': data['event_id'], }) data['event_id'] = uuid.uuid4().hex elif not is_event_id(data['event_id']): self.log.debug( 'Discarded invalid value for event_id: %r', data['event_id'], exc_info=True) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'event_id', 'value': data['event_id'], }) data['event_id'] = uuid.uuid4().hex if 'timestamp' in data: try: self._process_data_timestamp(data) except InvalidTimestamp as e: self.log.debug( 'Discarded invalid value for timestamp: %r', data['timestamp'], exc_info=True) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'timestamp', 'value': data['timestamp'], }) del data['timestamp'] if 'fingerprint' in data: try: self._process_fingerprint(data) except InvalidFingerprint as e: self.log.debug( 'Discarded invalid value for fingerprint: %r', data['fingerprint'], exc_info=True) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'fingerprint', 'value': data['fingerprint'], }) del data['fingerprint'] if 'platform' not in data or data['platform'] not in VALID_PLATFORMS: data['platform'] = 'other' if data.get('modules') and type(data['modules']) != dict: self.log.debug( 'Discarded invalid type for modules: %s', type(data['modules'])) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'modules', 'value': data['modules'], }) del data['modules'] if data.get('extra') is not None and type(data['extra']) != dict: self.log.debug( 'Discarded invalid type for extra: %s', type(data['extra'])) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'extra', 'value': data['extra'], }) del data['extra'] if data.get('tags') is not None: if type(data['tags']) == dict: data['tags'] = list(data['tags'].items()) elif not isinstance(data['tags'], (list, tuple)): self.log.debug( 'Discarded invalid type for tags: %s', type(data['tags'])) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': data['tags'], }) del data['tags'] if data.get('tags'): # remove any values which are over 32 characters tags = [] for pair in data['tags']: try: k, v = pair except ValueError: self.log.debug('Discarded invalid tag value: %r', pair) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue if not isinstance(k, six.string_types): try: k = six.text_type(k) except Exception: self.log.debug('Discarded invalid tag key: %r', type(k)) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue if not isinstance(v, six.string_types): try: v = six.text_type(v) except Exception: self.log.debug('Discarded invalid tag value: %s=%r', k, type(v)) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue if len(k) > MAX_TAG_KEY_LENGTH or len(v) > MAX_TAG_VALUE_LENGTH: self.log.debug('Discarded invalid tag: %s=%s', k, v) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue # support tags with spaces by converting them k = k.replace(' ', '-') if TagKey.is_reserved_key(k): self.log.debug('Discarding reserved tag key: %s', k) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue if not TagKey.is_valid_key(k): self.log.debug('Discarded invalid tag key: %s', k) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue if not TagValue.is_valid_value(v): self.log.debug('Discard invalid tag value: %s', v) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue tags.append((k, v)) data['tags'] = tags for k in list(iter(data)): if k in CLIENT_RESERVED_ATTRS: continue value = data.pop(k) if not value: self.log.debug('Ignored empty interface value: %s', k) continue try: interface = get_interface(k) except ValueError: self.log.debug('Ignored unknown attribute: %s', k) data['errors'].append({ 'type': EventError.INVALID_ATTRIBUTE, 'name': k, }) continue if type(value) != dict: # HACK(dcramer): the exception/breadcrumbs interface supports a # list as the value. We should change this in a new protocol # version. if type(value) in (list, tuple): value = {'values': value} else: self.log.debug( 'Invalid parameter for value: %s (%r)', k, type(value)) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': k, 'value': value, }) continue try: inst = interface.to_python(value) data[inst.get_path()] = inst.to_json() except Exception as e: if isinstance(e, InterfaceValidationError): log = self.log.debug else: log = self.log.error log('Discarded invalid value for interface: %s (%r)', k, value, exc_info=True) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': k, 'value': value, }) # TODO(dcramer): ideally this logic would happen in normalize, but today # we don't do "validation" there (create errors) # message is coerced to an interface, as its used for pure # index of searchable strings # See GH-3248 message = data.pop('message', None) if message: if 'sentry.interfaces.Message' not in data: value = { 'message': message, } elif not data['sentry.interfaces.Message'].get('formatted'): value = data['sentry.interfaces.Message'] value['formatted'] = message else: value = None if value is not None: k = 'sentry.interfaces.Message' interface = get_interface(k) try: inst = interface.to_python(value) data[inst.get_path()] = inst.to_json() except Exception as e: if isinstance(e, InterfaceValidationError): log = self.log.debug else: log = self.log.error log('Discarded invalid value for interface: %s (%r)', k, value, exc_info=True) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': k, 'value': value, }) level = data.get('level') or DEFAULT_LOG_LEVEL if isinstance(level, six.string_types) and not level.isdigit(): # assume it's something like 'warning' try: data['level'] = LOG_LEVELS_MAP[level] except KeyError as e: self.log.debug( 'Discarded invalid logger value: %s', level) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'level', 'value': level, }) data['level'] = LOG_LEVELS_MAP.get( DEFAULT_LOG_LEVEL, DEFAULT_LOG_LEVEL) if data.get('release'): data['release'] = six.text_type(data['release']) if len(data['release']) > 64: data['errors'].append({ 'type': EventError.VALUE_TOO_LONG, 'name': 'release', 'value': data['release'], }) del data['release'] if data.get('dist'): data['dist'] = six.text_type(data['dist']).strip() if not data.get('release'): data['dist'] = None elif len(data['dist']) > 64: data['errors'].append({ 'type': EventError.VALUE_TOO_LONG, 'name': 'dist', 'value': data['dist'], }) del data['dist'] elif _dist_re.match(data['dist']) is None: data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'dist', 'value': data['dist'], }) del data['dist'] if data.get('environment'): data['environment'] = six.text_type(data['environment']) if len(data['environment']) > 64: data['errors'].append({ 'type': EventError.VALUE_TOO_LONG, 'name': 'environment', 'value': data['environment'], }) del data['environment'] if data.get('time_spent'): try: data['time_spent'] = int(data['time_spent']) except (ValueError, TypeError): data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'time_spent', 'value': data['time_spent'], }) del data['time_spent'] else: if data['time_spent'] > BoundedIntegerField.MAX_VALUE: data['errors'].append({ 'type': EventError.VALUE_TOO_LONG, 'name': 'time_spent', 'value': data['time_spent'], }) del data['time_spent'] return data
def dispatch(self, request): try: event_id = request.GET['eventId'] except KeyError: return self._json_response(request, status=400) if not is_event_id(event_id): return self._json_response(request, status=400) key = self._get_project_key(request) if not key: return self._json_response(request, status=404) origin = self._get_origin(request) if not origin: return self._json_response(request, status=403) if not is_valid_origin(origin, key.project): return HttpResponse(status=403) if request.method == 'OPTIONS': return self._json_response(request) # TODO(dcramer): since we cant use a csrf cookie we should at the very # least sign the request / add some kind of nonce initial = { 'name': request.GET.get('name'), 'email': request.GET.get('email'), } form = UserReportForm( request.POST if request.method == 'POST' else None, initial=initial) if form.is_valid(): # TODO(dcramer): move this to post to the internal API report = form.save(commit=False) report.project = key.project report.event_id = event_id try: mapping = EventMapping.objects.get( event_id=report.event_id, project_id=key.project_id, ) except EventMapping.DoesNotExist: # XXX(dcramer): the system should fill this in later pass else: report.group = Group.objects.get(id=mapping.group_id) try: with transaction.atomic(): report.save() except IntegrityError: # There was a duplicate, so just overwrite the existing # row with the new one. The only way this ever happens is # if someone is messing around with the API, or doing # something wrong with the SDK, but this behavior is # more reasonable than just hard erroring and is more # expected. UserReport.objects.filter( project=report.project, event_id=report.event_id, ).update( name=report.name, email=report.email, comments=report.comments, date_added=timezone.now(), ) user_feedback_received.send(project=report.project, group=report.group, sender=self) return self._json_response(request) elif request.method == 'POST': return self._json_response(request, { "errors": dict(form.errors), }, status=400) show_branding = ProjectOption.objects.get_value( project=key.project, key='feedback:branding', default='1') == '1' template = render_to_string('sentry/error-page-embed.html', { 'form': form, 'show_branding': show_branding, }) context = { 'endpoint': mark_safe('*/' + json.dumps(request.build_absolute_uri()) + ';/*'), 'template': mark_safe('*/' + json.dumps(template) + ';/*'), 'strings': json.dumps_htmlsafe({ 'generic_error': six.text_type(GENERIC_ERROR), 'form_error': six.text_type(FORM_ERROR), 'sent_message': six.text_type(SENT_MESSAGE), }), } return render_to_response('sentry/error-page-embed.js', context, request, content_type='text/javascript')
def get(self, request, organization): """ List an Organization's Issues ````````````````````````````` Return a list of issues (groups) bound to an organization. All parameters are supplied as query string parameters. A default query of ``is:unresolved`` is applied. To return results with other statuses send an new query value (i.e. ``?query=`` for all results). The ``groupStatsPeriod`` parameter can be used to select the timeline stats which should be present. Possible values are: '' (disable), '24h', '14d' The ``statsPeriod`` parameter can be used to select a date window starting from now. Ex. ``14d``. The ``start`` and ``end`` parameters can be used to select an absolute date period to fetch issues from. :qparam string statsPeriod: an optional stat period (can be one of ``"24h"``, ``"14d"``, and ``""``). :qparam string groupStatsPeriod: an optional stat period (can be one of ``"24h"``, ``"14d"``, and ``""``). :qparam string start: Beginning date. You must also provide ``end``. :qparam string end: End date. You must also provide ``start``. :qparam bool shortIdLookup: if this is set to true then short IDs are looked up by this function as well. This can cause the return value of the function to return an event issue of a different project which is why this is an opt-in. Set to `1` to enable. :qparam querystring query: an optional Sentry structured search query. If not provided an implied ``"is:unresolved"`` is assumed.) :pparam string organization_slug: the slug of the organization the issues belong to. :auth: required """ stats_period = request.GET.get('groupStatsPeriod') if stats_period not in (None, '', '24h', '14d'): return Response({"detail": ERR_INVALID_STATS_PERIOD}, status=400) elif stats_period is None: # default stats_period = '24h' elif stats_period == '': # disable stats stats_period = None environments = self.get_environments(request, organization) serializer = functools.partial( StreamGroupSerializerSnuba, environment_ids=[env.id for env in environments], stats_period=stats_period, ) projects = self.get_projects(request, organization) project_ids = [p.id for p in projects] if not projects: return Response([]) if len(projects) > 1 and not features.has( 'organizations:global-views', organization, actor=request.user): return Response({ 'detail': 'You do not have the multi project stream feature enabled' }, status=400) # we ignore date range for both short id and event ids query = request.GET.get('query', '').strip() if query: # check to see if we've got an event ID if is_event_id(query): groups = list( Group.objects.filter_by_event_id(project_ids, query) ) if len(groups) == 1: response = Response( serialize( groups, request.user, serializer( matching_event_id=query ) ) ) response['X-Sentry-Direct-Hit'] = '1' return response if groups: return Response(serialize(groups, request.user, serializer())) group = get_by_short_id(organization.id, request.GET.get('shortIdLookup'), query) if group is not None: # check all projects user has access to if request.access.has_project_access(group.project): response = Response( serialize( [group], request.user, serializer() ) ) response['X-Sentry-Direct-Hit'] = '1' return response try: start, end = get_date_range_from_params(request.GET) except InvalidParams as exc: return Response({'detail': exc.message}, status=400) try: cursor_result, query_kwargs = self._search( request, organization, projects, environments, { 'count_hits': True, 'date_to': end, 'date_from': start, }) except ValidationError as exc: return Response({'detail': six.text_type(exc)}, status=400) results = list(cursor_result) context = serialize(results, request.user, serializer()) # HACK: remove auto resolved entries # TODO: We should try to integrate this into the search backend, since # this can cause us to arbitrarily return fewer results than requested. status = [ search_filter for search_filter in query_kwargs.get('search_filters', []) if search_filter.key.name == 'status' ] if status and status[0].value.raw_value == GroupStatus.UNRESOLVED: context = [r for r in context if r['status'] == 'unresolved'] response = Response(context) self.add_cursor_headers(request, response, cursor_result) # TODO(jess): add metrics that are similar to project endpoint here return response
def dispatch(self, request): try: event_id = request.GET['eventId'] except KeyError: return self._json_response(request, status=400) if not is_event_id(event_id): return self._json_response(request, status=400) key = self._get_project_key(request) if not key: return self._json_response(request, status=404) origin = self._get_origin(request) if not origin: return self._json_response(request, status=403) if not is_valid_origin(origin, key.project): return HttpResponse(status=403) if request.method == 'OPTIONS': return self._json_response(request) # TODO(dcramer): since we cant use a csrf cookie we should at the very # least sign the request / add some kind of nonce initial = { 'name': request.GET.get('name'), 'email': request.GET.get('email'), } form = UserReportForm(request.POST if request.method == 'POST' else None, initial=initial) if form.is_valid(): # TODO(dcramer): move this to post to the internal API report = form.save(commit=False) report.project = key.project report.event_id = event_id try: mapping = EventMapping.objects.get( event_id=report.event_id, project_id=key.project_id, ) except EventMapping.DoesNotExist: # XXX(dcramer): the system should fill this in later pass else: report.group = Group.objects.get(id=mapping.group_id) try: with transaction.atomic(): report.save() except IntegrityError: # There was a duplicate, so just overwrite the existing # row with the new one. The only way this ever happens is # if someone is messing around with the API, or doing # something wrong with the SDK, but this behavior is # more reasonable than just hard erroring and is more # expected. UserReport.objects.filter( project=report.project, event_id=report.event_id, ).update( name=report.name, email=report.email, comments=report.comments, date_added=timezone.now(), ) return self._json_response(request) elif request.method == 'POST': return self._json_response(request, { "errors": dict(form.errors), }, status=400) show_branding = ProjectOption.objects.get_value( project=key.project, key='feedback:branding', default='1' ) == '1' template = render_to_string('sentry/error-page-embed.html', { 'form': form, 'show_branding': show_branding, }) context = { 'endpoint': mark_safe('*/' + json.dumps(request.build_absolute_uri()) + ';/*'), 'template': mark_safe('*/' + json.dumps(template) + ';/*'), 'strings': json.dumps_htmlsafe({ 'generic_error': six.text_type(GENERIC_ERROR), 'form_error': six.text_type(FORM_ERROR), 'sent_message': six.text_type(SENT_MESSAGE), }), } return render_to_response('sentry/error-page-embed.js', context, request, content_type='text/javascript')
def get(self, request, project): """ List a Project's Issues ``````````````````````` Return a list of issues (groups) bound to a project. All parameters are supplied as query string parameters. A default query of ``is:unresolved`` is applied. To return results with other statuses send an new query value (i.e. ``?query=`` for all results). The ``statsPeriod`` parameter can be used to select the timeline stats which should be present. Possible values are: '' (disable), '24h', '14d' :qparam string statsPeriod: an optional stat period (can be one of ``"24h"``, ``"14d"``, and ``""``). :qparam bool shortIdLookup: if this is set to true then short IDs are looked up by this function as well. This can cause the return value of the function to return an event issue of a different project which is why this is an opt-in. Set to `1` to enable. :qparam querystring query: an optional Sentry structured search query. If not provided an implied ``"is:unresolved"`` is assumed.) :pparam string organization_slug: the slug of the organization the issues belong to. :pparam string project_slug: the slug of the project the issues belong to. :auth: required """ stats_period = request.GET.get('statsPeriod') if stats_period not in (None, '', '24h', '14d'): return Response({"detail": ERR_INVALID_STATS_PERIOD}, status=400) elif stats_period is None: # default stats_period = '24h' elif stats_period == '': # disable stats stats_period = None serializer = functools.partial( StreamGroupSerializer, environment_func=self._get_environment_func(request, project.organization_id), stats_period=stats_period, ) query = request.GET.get('query', '').strip() if query: matching_group = None matching_event = None if is_event_id(query): # check to see if we've got an event ID try: matching_group = Group.objects.from_event_id(project, query) except Group.DoesNotExist: pass else: matching_event = Event.objects.from_event_id(query, project.id) if matching_event is not None: Event.objects.bind_nodes([matching_event], 'data') elif matching_group is None: matching_group = get_by_short_id( project.organization_id, request.GET.get('shortIdLookup'), query, ) if matching_group is not None and matching_group.project_id != project.id: matching_group = None if matching_group is not None: matching_event_environment = None try: matching_event_environment = matching_event.get_environment().name if matching_event else None except Environment.DoesNotExist: pass response = Response( serialize( [matching_group], request.user, serializer( matching_event_id=getattr(matching_event, 'id', None), matching_event_environment=matching_event_environment, ) ) ) response['X-Sentry-Direct-Hit'] = '1' return response try: cursor_result, query_kwargs = self._search(request, project, {'count_hits': True}) except ValidationError as exc: return Response({'detail': six.text_type(exc)}, status=400) results = list(cursor_result) context = serialize(results, request.user, serializer()) # HACK: remove auto resolved entries # TODO: We should try to integrate this into the search backend, since # this can cause us to arbitrarily return fewer results than requested. status = [ search_filter for search_filter in query_kwargs.get('search_filters', []) if search_filter.key.name == 'status' ] if status and status[0].value.raw_value == GroupStatus.UNRESOLVED: context = [r for r in context if r['status'] == 'unresolved'] response = Response(context) self.add_cursor_headers(request, response, cursor_result) if results and query not in DEFAULT_SAVED_SEARCH_QUERIES: advanced_search.send(project=project, sender=request.user) analytics.record('project_issue.searched', user_id=request.user.id, organization_id=project.organization_id, project_id=project.id, query=query) return response
def dispatch(self, request): try: event_id = request.GET['eventId'] except KeyError: return self._smart_response( request, {'eventId': 'Missing or invalid parameter.'}, status=400) if event_id and not is_event_id(event_id): return self._smart_response( request, {'eventId': 'Missing or invalid parameter.'}, status=400) key = self._get_project_key(request) if not key: return self._smart_response( request, {'dsn': 'Missing or invalid parameter.'}, status=404) origin = self._get_origin(request) if not is_valid_origin(origin, key.project): return self._smart_response(request, status=403) if request.method == 'OPTIONS': return self._smart_response(request) # customization options options = DEFAULT_OPTIONS.copy() for name in six.iterkeys(options): if name in request.GET: options[name] = six.text_type(request.GET[name]) # TODO(dcramer): since we cant use a csrf cookie we should at the very # least sign the request / add some kind of nonce initial = { 'name': request.GET.get('name'), 'email': request.GET.get('email'), } form = UserReportForm(request.POST if request.method == 'POST' else None, initial=initial) if form.is_valid(): # TODO(dcramer): move this to post to the internal API report = form.save(commit=False) report.project = key.project report.event_id = event_id try: event = Event.objects.filter(project_id=report.project.id, event_id=report.event_id)[0] except IndexError: try: report.group = Group.objects.from_event_id(report.project, report.event_id) except Group.DoesNotExist: pass else: report.environment = event.get_environment() report.group = event.group try: with transaction.atomic(): report.save() except IntegrityError: # There was a duplicate, so just overwrite the existing # row with the new one. The only way this ever happens is # if someone is messing around with the API, or doing # something wrong with the SDK, but this behavior is # more reasonable than just hard erroring and is more # expected. UserReport.objects.filter( project=report.project, event_id=report.event_id, ).update( name=report.name, email=report.email, comments=report.comments, date_added=timezone.now(), ) else: if report.group: report.notify() user_feedback_received.send(project=report.project, group=report.group, sender=self) return self._smart_response(request) elif request.method == 'POST': return self._smart_response( request, { "errors": dict(form.errors), }, status=400 ) show_branding = ProjectOption.objects.get_value( project=key.project, key='feedback:branding', default='1' ) == '1' template = render_to_string( 'sentry/error-page-embed.html', { 'form': form, 'show_branding': show_branding, 'title': options['title'], 'subtitle': options['subtitle'], 'subtitle2': options['subtitle2'], 'name_label': options['labelName'], 'email_label': options['labelEmail'], 'comments_label': options['labelComments'], 'submit_label': options['labelSubmit'], 'close_label': options['labelClose'], } ) context = { 'endpoint': mark_safe('*/' + json.dumps(request.build_absolute_uri()) + ';/*'), 'template': mark_safe('*/' + json.dumps(template) + ';/*'), 'strings': json.dumps_htmlsafe({ 'generic_error': six.text_type(options['errorGeneric']), 'form_error': six.text_type(options['errorFormEntry']), 'sent_message': six.text_type(options['successMessage']), }), } return render_to_response( 'sentry/error-page-embed.js', context, request, content_type='text/javascript' )
def test_is_event_id(): assert is_event_id("b802415f7531431caa27f5c0bf923302") assert is_event_id("B802415F7531431CAA27F5C0BF923302") assert is_event_id("b802415f-7531-431c-aa27-f5c0bf923302") assert is_event_id("B802415F-7531-431C-AA27-F5C0BF923302") assert is_event_id(b"b802415f7531431caa27f5c0bf923302") assert not is_event_id("") assert not is_event_id("b802415f7531431caa") assert not is_event_id("XXXX415f7531431caa27f5c0bf92XXXX") assert not is_event_id(4711) assert not is_event_id(False) assert not is_event_id(None)
def get(self, request, project, event_id): """ Retrieve an Event for a Project ``````````````````````````````` Return details on an individual event. :pparam string organization_slug: the slug of the organization the event belongs to. :pparam string project_slug: the slug of the project the event belongs to. :pparam string event_id: the id of the event to retrieve (either the numeric primary-key or the hexadecimal id as reported by the raven client) :auth: required """ event = None # If its a numeric string, check if it's an event Primary Key first if event_id.isdigit(): try: event = Event.objects.get( id=event_id, project_id=project.id, ) except Event.DoesNotExist: pass # If it was not found as a PK, and its a possible event_id, search by that instead. if event is None and is_event_id(event_id): try: event = Event.objects.get( event_id=event_id, project_id=project.id, ) except Event.DoesNotExist: pass if event is None: return Response({'detail': 'Event not found'}, status=404) Event.objects.bind_nodes([event], 'data') # HACK(dcramer): work around lack of unique sorting on datetime base_qs = Event.objects.filter( group_id=event.group_id, ).exclude(id=event.id) try: next_event = sorted(base_qs.filter( datetime__gte=event.datetime).order_by('datetime')[0:5], key=lambda x: (x.datetime, x.id))[0] except IndexError: next_event = None try: prev_event = sorted(base_qs.filter( datetime__lte=event.datetime, ).order_by('-datetime')[0:5], key=lambda x: (x.datetime, x.id), reverse=True)[0] except IndexError: prev_event = None data = serialize(event, request.user, DetailedEventSerializer()) if next_event: data['nextEventID'] = six.text_type(next_event.event_id) else: data['nextEventID'] = None if prev_event: data['previousEventID'] = six.text_type(prev_event.event_id) else: data['previousEventID'] = None return Response(data)
def get(self, request, project, event_id): """ Retrieve an Event for a Project ``````````````````````````````` Return details on an individual event. :pparam string organization_slug: the slug of the organization the event belongs to. :pparam string project_slug: the slug of the project the event belongs to. :pparam string event_id: the id of the event to retrieve (either the numeric primary-key or the hexadecimal id as reported by the raven client) :auth: required """ event = None # If its a numeric string, check if it's an event Primary Key first if event_id.isdigit(): try: event = Event.objects.get( id=event_id, project_id=project.id, ) except Event.DoesNotExist: pass # If it was not found as a PK, and its a possible event_id, search by that instead. if event is None and is_event_id(event_id): try: event = Event.objects.get( event_id=event_id, project_id=project.id, ) except Event.DoesNotExist: pass if event is None: return Response({'detail': 'Event not found'}, status=404) Event.objects.bind_nodes([event], 'data') # HACK(dcramer): work around lack of unique sorting on datetime base_qs = Event.objects.filter( group_id=event.group_id, ).exclude(id=event.id) try: next_event = sorted( base_qs.filter(datetime__gte=event.datetime).order_by('datetime')[0:5], key=lambda x: (x.datetime, x.id) )[0] except IndexError: next_event = None try: prev_event = sorted( base_qs.filter( datetime__lte=event.datetime, ).order_by('-datetime')[0:5], key=lambda x: (x.datetime, x.id), reverse=True )[0] except IndexError: prev_event = None data = serialize(event, request.user, DetailedEventSerializer()) if next_event: data['nextEventID'] = six.text_type(next_event.event_id) else: data['nextEventID'] = None if prev_event: data['previousEventID'] = six.text_type(prev_event.event_id) else: data['previousEventID'] = None return Response(data)