def get(self, request, project, key): """ List a Tag's Values ``````````````````` Return a list of values associated with this key. The `query` parameter can be used to to perform a "starts with" match on values. :pparam string organization_slug: the slug of the organization. :pparam string project_slug: the slug of the project. :pparam string key: the tag key to look up. :auth: required """ if TagKey.is_reserved_key(key): lookup_key = 'sentry:{0}'.format(key) else: lookup_key = key try: tagkey = TagKey.objects.get( project=project, key=lookup_key, status=TagKeyStatus.VISIBLE, ) except TagKey.DoesNotExist: raise ResourceDoesNotExist base_queryset = TagValue.objects.filter( project=project, key=tagkey.key, ) query = request.GET.get('query') if query: if is_postgres(): # not quite optimal, but best we can do with ORM queryset = TagValue.objects.filter( id__in=base_queryset.order_by('-times_seen')[:10000] ) else: # MySQL can't handle an `IN` with a `LIMIT` clause queryset = base_queryset queryset = queryset.filter(value__contains=query) else: queryset = TagValue.objects.filter( project=project, key=tagkey.key, ) return self.paginate( request=request, queryset=queryset, order_by='-times_seen', on_results=lambda x: serialize(x, request.user), )
def get(self, request, group, key): """ List a Tag's Values ``````````````````` Return a list of values associated with this key for an issue. :pparam string issue_id: the ID of the issue to retrieve. :pparam string key: the tag key to look the values up for. :auth: required """ # XXX(dcramer): kill sentry prefix for internal reserved tags if TagKey.is_reserved_key(key): lookup_key = 'sentry:{0}'.format(key) else: lookup_key = key tagkey = TagKey.objects.filter( project=group.project_id, key=lookup_key, status=TagKeyStatus.VISIBLE, ) if not tagkey.exists(): raise ResourceDoesNotExist queryset = GroupTagValue.objects.filter( group=group, key=lookup_key, ) sort = request.GET.get('sort') if sort == 'date': order_by = '-last_seen' paginator_cls = DateTimePaginator elif sort == 'age': order_by = '-first_seen' paginator_cls = DateTimePaginator elif sort == 'freq': order_by = '-times_seen' paginator_cls = OffsetPaginator else: order_by = '-id' paginator_cls = Paginator if key == 'user': serializer_cls = UserTagValueSerializer() else: serializer_cls = None return self.paginate( request=request, queryset=queryset, order_by=order_by, paginator_cls=paginator_cls, on_results=lambda x: serialize(x, request.user, serializer_cls), )
def get(self, request, group, key): """ List a Tag's Values ``````````````````` Return a list of values associated with this key for an issue. :pparam string issue_id: the ID of the issue to retrieve. :pparam string key: the tag key to look the values up for. :auth: required """ # XXX(dcramer): kill sentry prefix for internal reserved tags if TagKey.is_reserved_key(key): lookup_key = 'sentry:{0}'.format(key) else: lookup_key = key tagkey = TagKey.objects.filter( project=group.project_id, key=lookup_key, status=TagKeyStatus.VISIBLE, ) if not tagkey.exists(): raise ResourceDoesNotExist queryset = GroupTagValue.objects.filter( group_id=group.id, key=lookup_key, ) sort = request.GET.get('sort') if sort == 'date': order_by = '-last_seen' paginator_cls = DateTimePaginator elif sort == 'age': order_by = '-first_seen' paginator_cls = DateTimePaginator elif sort == 'freq': order_by = '-times_seen' paginator_cls = OffsetPaginator else: order_by = '-id' paginator_cls = Paginator if key == 'user': serializer_cls = UserTagValueSerializer() else: serializer_cls = None return self.paginate( request=request, queryset=queryset, order_by=order_by, paginator_cls=paginator_cls, on_results=lambda x: serialize(x, request.user, serializer_cls), )
def get(self, request, organization, project, team, group_id, key): try: # TODO(tkaemming): This should *actually* redirect, see similar # comment in ``GroupEndpoint.convert_args``. group, _ = get_group_with_redirect( group_id, queryset=Group.objects.filter(project=project), ) except Group.DoesNotExist: raise Http404 if TagKey.is_reserved_key(key): lookup_key = 'sentry:{0}'.format(key) else: lookup_key = key # validate existance as it may be deleted try: TagKey.objects.get( project=group.project_id, key=lookup_key, status=TagKeyStatus.VISIBLE, ) except TagKey.DoesNotExist: raise Http404 queryset = GroupTagValue.objects.filter( group=group, key=lookup_key, ) def row_iter(): yield ('value', 'times_seen', 'last_seen', 'first_seen') for row in queryset.iterator(): yield ( row.value.encode('utf-8'), str(row.times_seen), row.last_seen.strftime('%Y-%m-%dT%H:%M:%S.%fZ'), row.first_seen.strftime('%Y-%m-%dT%H:%M:%S.%fZ'), ) pseudo_buffer = Echo() writer = csv.writer(pseudo_buffer) response = StreamingHttpResponse( (writer.writerow(r) for r in row_iter()), content_type='text/csv' ) response['Content-Disposition'] = 'attachment; filename="{}-{}.csv"'.format( group.qualified_short_id or group.id, slugify(key) ) return response
def get(self, request, group, key): """ Retrieve Tag Details ```````````````````` Returns details for given tag key related to an issue. :pparam string issue_id: the ID of the issue to retrieve. :pparam string key: the tag key to look the values up for. :auth: required """ # XXX(dcramer): kill sentry prefix for internal reserved tags if TagKey.is_reserved_key(key): lookup_key = 'sentry:{0}'.format(key) else: lookup_key = key try: tag_key = TagKey.objects.get( project=group.project_id, key=lookup_key, status=TagKeyStatus.VISIBLE, ) except TagKey.DoesNotExist: raise ResourceDoesNotExist try: group_tag_key = GroupTagKey.objects.get( group=group, key=lookup_key, ) except GroupTagKey.DoesNotExist: raise ResourceDoesNotExist total_values = GroupTagValue.get_value_count(group.id, lookup_key) top_values = GroupTagValue.get_top_values(group.id, lookup_key, limit=3) data = { 'id': str(tag_key.id), 'key': key, 'name': tag_key.get_label(), 'uniqueValues': group_tag_key.values_seen, 'totalValues': total_values, 'topValues': serialize(top_values, request.user), } return Response(data)
def get(self, request, organization, project, team, group_id, key): try: # TODO(tkaemming): This should *actually* redirect, see similar # comment in ``GroupEndpoint.convert_args``. group, _ = get_group_with_redirect( group_id, queryset=Group.objects.filter(project=project), ) except Group.DoesNotExist: raise Http404 if TagKey.is_reserved_key(key): lookup_key = 'sentry:{0}'.format(key) else: lookup_key = key # validate existance as it may be deleted try: TagKey.objects.get( project=group.project_id, key=lookup_key, status=TagKeyStatus.VISIBLE, ) except TagKey.DoesNotExist: raise Http404 queryset = GroupTagValue.objects.filter( group=group, key=lookup_key, ) def row_iter(): yield ('value', 'times_seen', 'last_seen', 'first_seen') for row in queryset.iterator(): yield ( row.value.encode('utf-8'), str(row.times_seen), row.last_seen.strftime('%Y-%m-%dT%H:%M:%S.%fZ'), row.first_seen.strftime('%Y-%m-%dT%H:%M:%S.%fZ'), ) pseudo_buffer = Echo() writer = csv.writer(pseudo_buffer) response = StreamingHttpResponse( (writer.writerow(r) for r in row_iter()), content_type='text/csv') response[ 'Content-Disposition'] = 'attachment; filename="{}-{}.csv"'.format( group.qualified_short_id or group.id, slugify(key)) return response
def get(self, request, group, key): """ Retrieve Tag Details ```````````````````` Returns details for given tag key related to an issue. :pparam string issue_id: the ID of the issue to retrieve. :pparam string key: the tag key to look the values up for. :auth: required """ # XXX(dcramer): kill sentry prefix for internal reserved tags if TagKey.is_reserved_key(key): lookup_key = 'sentry:{0}'.format(key) else: lookup_key = key try: tag_key = TagKey.objects.get( project=group.project_id, key=lookup_key, status=TagKeyStatus.VISIBLE, ) except TagKey.DoesNotExist: raise ResourceDoesNotExist try: group_tag_key = GroupTagKey.objects.get( group=group, key=lookup_key, ) except GroupTagKey.DoesNotExist: raise ResourceDoesNotExist total_values = GroupTagValue.get_value_count(group.id, lookup_key) top_values = GroupTagValue.get_top_values(group.id, lookup_key, limit=9) data = { 'id': str(tag_key.id), 'key': key, 'name': tag_key.get_label(), 'uniqueValues': group_tag_key.values_seen, 'totalValues': total_values, 'topValues': serialize(top_values, request.user), } return Response(data)
def get(self, request, project, key): if TagKey.is_reserved_key(key): lookup_key = 'sentry:{0}'.format(key) else: lookup_key = key try: tagkey = TagKey.objects.get( project_id=project.id, key=lookup_key, status=TagKeyStatus.VISIBLE, ) except TagKey.DoesNotExist: raise ResourceDoesNotExist return Response(serialize(tagkey, request.user))
def get(self, request, project, key): if TagKey.is_reserved_key(key): lookup_key = 'sentry:{0}'.format(key) else: lookup_key = key try: tagkey = TagKey.objects.get( project=project, key=lookup_key, status=TagKeyStatus.VISIBLE, ) except TagKey.DoesNotExist: raise ResourceDoesNotExist return Response(serialize(tagkey, request.user))
def get(self, request, organization, project, team, group_id, key): try: # TODO(tkaemming): This should *actually* redirect, see similar # comment in ``GroupEndpoint.convert_args``. group, _ = get_group_with_redirect( group_id, queryset=Group.objects.filter(project=project), ) except Group.DoesNotExist: raise Http404 if TagKey.is_reserved_key(key): lookup_key = 'sentry:{0}'.format(key) else: lookup_key = key # validate existance as it may be deleted try: TagKey.objects.get( project_id=group.project_id, key=lookup_key, status=TagKeyStatus.VISIBLE, ) except TagKey.DoesNotExist: raise Http404 if key == 'user': callbacks = [attach_eventuser(project.id)] else: callbacks = [] queryset = RangeQuerySetWrapper( GroupTagValue.objects.filter( group_id=group.id, key=lookup_key, ), callbacks=callbacks, ) filename = '{}-{}'.format( group.qualified_short_id or group.id, key, ) return self.to_csv_response(queryset, filename, key=key)
def get(self, request, organization, project, team, group_id, key): try: # TODO(tkaemming): This should *actually* redirect, see similar # comment in ``GroupEndpoint.convert_args``. group, _ = get_group_with_redirect( group_id, queryset=Group.objects.filter(project=project), ) except Group.DoesNotExist: raise Http404 if TagKey.is_reserved_key(key): lookup_key = 'sentry:{0}'.format(key) else: lookup_key = key # validate existance as it may be deleted try: TagKey.objects.get( project=group.project_id, key=lookup_key, status=TagKeyStatus.VISIBLE, ) except TagKey.DoesNotExist: raise Http404 if key == 'user': callbacks = [attach_eventuser(project.id)] else: callbacks = [] queryset = RangeQuerySetWrapper( GroupTagValue.objects.filter( group_id=group.id, key=lookup_key, ), callbacks=callbacks, ) filename = '{}-{}'.format( group.qualified_short_id or group.id, key, ) return self.to_csv_response(queryset, filename, key=key)
def get(self, request, project, key): """ List a Tag's Values ``````````````````` Return a list of values associated with this key. The `query` parameter can be used to to perform a "contains" match on values. :pparam string organization_slug: the slug of the organization. :pparam string project_slug: the slug of the project. :pparam string key: the tag key to look up. :auth: required """ if TagKey.is_reserved_key(key): lookup_key = 'sentry:{0}'.format(key) else: lookup_key = key try: tagkey = TagKey.objects.get( project_id=project.id, key=lookup_key, status=TagKeyStatus.VISIBLE, ) except TagKey.DoesNotExist: raise ResourceDoesNotExist queryset = TagValue.objects.filter( project_id=project.id, key=tagkey.key, ) query = request.GET.get('query') if query: queryset = queryset.filter(value__contains=query) return self.paginate( request=request, queryset=queryset, order_by='-last_seen', paginator_cls=DateTimePaginator, on_results=lambda x: serialize(x, request.user), )
def get(self, request, project, key): """ List a Tag's Values ``````````````````` Return a list of values associated with this key. The `query` parameter can be used to to perform a "starts with" match on values. :pparam string organization_slug: the slug of the organization. :pparam string project_slug: the slug of the project. :pparam string key: the tag key to look up. :auth: required """ if TagKey.is_reserved_key(key): lookup_key = "sentry:{0}".format(key) else: lookup_key = key try: tagkey = TagKey.objects.get(project=project, key=lookup_key, status=TagKeyStatus.VISIBLE) except TagKey.DoesNotExist: raise ResourceDoesNotExist base_queryset = TagValue.objects.filter(project=project, key=tagkey.key) query = request.GET.get("query") if query: if is_postgres(): # not quite optimal, but best we can do with ORM queryset = TagValue.objects.filter(id__in=base_queryset.order_by("-times_seen")[:10000]) else: # MySQL can't handle an `IN` with a `LIMIT` clause queryset = base_queryset queryset = queryset.filter(value__contains=query) else: queryset = TagValue.objects.filter(project=project, key=tagkey.key) return self.paginate( request=request, queryset=queryset, order_by="-times_seen", on_results=lambda x: serialize(x, request.user) )
def get(self, request, group, key): """ List a Tag's Values ``````````````````` Return a list of values associated with this key for an issue. :pparam string issue_id: the ID of the issue to retrieve. :pparam string key: the tag key to look the values up for. :auth: required """ # XXX(dcramer): kill sentry prefix for internal reserved tags if TagKey.is_reserved_key(key): lookup_key = 'sentry:{0}'.format(key) else: lookup_key = key tagkey = TagKey.objects.filter( project=group.project_id, key=lookup_key, status=TagKeyStatus.VISIBLE, ) if not tagkey.exists(): raise ResourceDoesNotExist queryset = GroupTagValue.objects.filter( group=group, key=lookup_key, ) return self.paginate( request=request, queryset=queryset, order_by='-id', on_results=lambda x: serialize(x, request.user), )
def delete(self, request, project, key): """ Remove all occurrences of the given tag key. {method} {path} """ if TagKey.is_reserved_key(key): lookup_key = 'sentry:{0}'.format(key) else: lookup_key = key try: tagkey = TagKey.objects.get( project=project, key=lookup_key, ) except TagKey.DoesNotExist: raise ResourceDoesNotExist updated = TagKey.objects.filter( id=tagkey.id, status=TagKeyStatus.VISIBLE, ).update(status=TagKeyStatus.PENDING_DELETION) if updated: delete_tag_key.delay(object_id=tagkey.id) self.create_audit_entry( request=request, organization=project.organization, target_object=tagkey.id, event=AuditLogEntryEvent.TAGKEY_REMOVE, data=tagkey.get_audit_log_data(), ) return Response(status=204)
def delete(self, request, project, key): """ Remove all occurrences of the given tag key. {method} {path} """ if TagKey.is_reserved_key(key): lookup_key = 'sentry:{0}'.format(key) else: lookup_key = key try: tagkey = TagKey.objects.get( project_id=project.id, key=lookup_key, ) except TagKey.DoesNotExist: raise ResourceDoesNotExist updated = TagKey.objects.filter( id=tagkey.id, status=TagKeyStatus.VISIBLE, ).update(status=TagKeyStatus.PENDING_DELETION) if updated: delete_tag_key.delay(object_id=tagkey.id) self.create_audit_entry( request=request, organization=project.organization, target_object=tagkey.id, event=AuditLogEntryEvent.TAGKEY_REMOVE, data=tagkey.get_audit_log_data(), ) return Response(status=204)
def validate_data(self, project, data): # TODO(dcramer): move project out of the data packet data['project'] = project.id data['errors'] = [] if data.get('culprit'): if not isinstance(data['culprit'], six.string_types): raise APIForbidden('Invalid value for culprit') if not data.get('event_id'): data['event_id'] = uuid.uuid4().hex elif not isinstance(data['event_id'], six.string_types): raise APIForbidden('Invalid value for event_id') if len(data['event_id']) > 32: self.log.debug( 'Discarded value for event_id due to length (%d chars)', len(data['event_id'])) data['errors'].append({ 'type': EventError.VALUE_TOO_LONG, 'name': 'event_id', 'value': data['event_id'], }) data['event_id'] = uuid.uuid4().hex elif not is_event_id(data['event_id']): self.log.debug('Discarded invalid value for event_id: %r', data['event_id'], exc_info=True) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'event_id', 'value': data['event_id'], }) data['event_id'] = uuid.uuid4().hex if 'timestamp' in data: try: self._process_data_timestamp(data) except InvalidTimestamp as e: self.log.debug('Discarded invalid value for timestamp: %r', data['timestamp'], exc_info=True) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'timestamp', 'value': data['timestamp'], }) del data['timestamp'] if 'fingerprint' in data: try: self._process_fingerprint(data) except InvalidFingerprint as e: self.log.debug('Discarded invalid value for fingerprint: %r', data['fingerprint'], exc_info=True) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'fingerprint', 'value': data['fingerprint'], }) del data['fingerprint'] if 'platform' not in data or data['platform'] not in VALID_PLATFORMS: data['platform'] = 'other' if data.get('modules') and type(data['modules']) != dict: self.log.debug('Discarded invalid type for modules: %s', type(data['modules'])) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'modules', 'value': data['modules'], }) del data['modules'] if data.get('extra') is not None and type(data['extra']) != dict: self.log.debug('Discarded invalid type for extra: %s', type(data['extra'])) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'extra', 'value': data['extra'], }) del data['extra'] if data.get('tags') is not None: if type(data['tags']) == dict: data['tags'] = list(data['tags'].items()) elif not isinstance(data['tags'], (list, tuple)): self.log.debug('Discarded invalid type for tags: %s', type(data['tags'])) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': data['tags'], }) del data['tags'] if data.get('tags'): # remove any values which are over 32 characters tags = [] for pair in data['tags']: try: k, v = pair except ValueError: self.log.debug('Discarded invalid tag value: %r', pair) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue if not isinstance(k, six.string_types): try: k = six.text_type(k) except Exception: self.log.debug('Discarded invalid tag key: %r', type(k)) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue if not isinstance(v, six.string_types): try: v = six.text_type(v) except Exception: self.log.debug('Discarded invalid tag value: %s=%r', k, type(v)) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue if len(k) > MAX_TAG_KEY_LENGTH or len( v) > MAX_TAG_VALUE_LENGTH: self.log.debug('Discarded invalid tag: %s=%s', k, v) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue # support tags with spaces by converting them k = k.replace(' ', '-') if TagKey.is_reserved_key(k): self.log.debug('Discarding reserved tag key: %s', k) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue if not TagKey.is_valid_key(k): self.log.debug('Discarded invalid tag key: %s', k) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue if not TagValue.is_valid_value(v): self.log.debug('Discard invalid tag value: %s', v) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue tags.append((k, v)) data['tags'] = tags for k in list(iter(data)): if k in CLIENT_RESERVED_ATTRS: continue value = data.pop(k) if not value: self.log.debug('Ignored empty interface value: %s', k) continue try: interface = get_interface(k) except ValueError: self.log.debug('Ignored unknown attribute: %s', k) data['errors'].append({ 'type': EventError.INVALID_ATTRIBUTE, 'name': k, }) continue if type(value) != dict: # HACK(dcramer): the exception/breadcrumbs interface supports a # list as the value. We should change this in a new protocol # version. if type(value) in (list, tuple): value = {'values': value} else: self.log.debug('Invalid parameter for value: %s (%r)', k, type(value)) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': k, 'value': value, }) continue try: inst = interface.to_python(value) data[inst.get_path()] = inst.to_json() except Exception as e: if isinstance(e, InterfaceValidationError): log = self.log.debug else: log = self.log.error log('Discarded invalid value for interface: %s (%r)', k, value, exc_info=True) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': k, 'value': value, }) # TODO(dcramer): ideally this logic would happen in normalize, but today # we don't do "validation" there (create errors) # message is coerced to an interface, as its used for pure # index of searchable strings # See GH-3248 message = data.pop('message', None) if message: if 'sentry.interfaces.Message' not in data: value = { 'message': message, } elif not data['sentry.interfaces.Message'].get('formatted'): value = data['sentry.interfaces.Message'] value['formatted'] = message else: value = None if value is not None: k = 'sentry.interfaces.Message' interface = get_interface(k) try: inst = interface.to_python(value) data[inst.get_path()] = inst.to_json() except Exception as e: if isinstance(e, InterfaceValidationError): log = self.log.debug else: log = self.log.error log('Discarded invalid value for interface: %s (%r)', k, value, exc_info=True) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': k, 'value': value, }) level = data.get('level') or DEFAULT_LOG_LEVEL if isinstance(level, six.string_types) and not level.isdigit(): # assume it's something like 'warning' try: data['level'] = LOG_LEVELS_MAP[level] except KeyError as e: self.log.debug('Discarded invalid logger value: %s', level) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'level', 'value': level, }) data['level'] = LOG_LEVELS_MAP.get(DEFAULT_LOG_LEVEL, DEFAULT_LOG_LEVEL) if data.get('release'): data['release'] = six.text_type(data['release']) if len(data['release']) > 64: data['errors'].append({ 'type': EventError.VALUE_TOO_LONG, 'name': 'release', 'value': data['release'], }) del data['release'] return data
def validate_data(self, project, data): # TODO(dcramer): move project out of the data packet data['project'] = project.id data['errors'] = [] if data.get('culprit'): if not isinstance(data['culprit'], six.string_types): raise APIForbidden('Invalid value for culprit') if not data.get('event_id'): data['event_id'] = uuid.uuid4().hex elif not isinstance(data['event_id'], six.string_types): raise APIForbidden('Invalid value for event_id') if len(data['event_id']) > 32: self.log.debug( 'Discarded value for event_id due to length (%d chars)', len(data['event_id'])) data['errors'].append({ 'type': EventError.VALUE_TOO_LONG, 'name': 'event_id', 'value': data['event_id'], }) data['event_id'] = uuid.uuid4().hex elif not is_event_id(data['event_id']): self.log.debug( 'Discarded invalid value for event_id: %r', data['event_id'], exc_info=True) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'event_id', 'value': data['event_id'], }) data['event_id'] = uuid.uuid4().hex if 'timestamp' in data: try: self._process_data_timestamp(data) except InvalidTimestamp as e: self.log.debug( 'Discarded invalid value for timestamp: %r', data['timestamp'], exc_info=True) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'timestamp', 'value': data['timestamp'], }) del data['timestamp'] if 'fingerprint' in data: try: self._process_fingerprint(data) except InvalidFingerprint as e: self.log.debug( 'Discarded invalid value for fingerprint: %r', data['fingerprint'], exc_info=True) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'fingerprint', 'value': data['fingerprint'], }) del data['fingerprint'] if 'platform' not in data or data['platform'] not in VALID_PLATFORMS: data['platform'] = 'other' if data.get('modules') and type(data['modules']) != dict: self.log.debug( 'Discarded invalid type for modules: %s', type(data['modules'])) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'modules', 'value': data['modules'], }) del data['modules'] if data.get('extra') is not None and type(data['extra']) != dict: self.log.debug( 'Discarded invalid type for extra: %s', type(data['extra'])) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'extra', 'value': data['extra'], }) del data['extra'] if data.get('tags') is not None: if type(data['tags']) == dict: data['tags'] = list(data['tags'].items()) elif not isinstance(data['tags'], (list, tuple)): self.log.debug( 'Discarded invalid type for tags: %s', type(data['tags'])) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': data['tags'], }) del data['tags'] if data.get('tags'): # remove any values which are over 32 characters tags = [] for pair in data['tags']: try: k, v = pair except ValueError: self.log.debug('Discarded invalid tag value: %r', pair) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue if not isinstance(k, six.string_types): try: k = six.text_type(k) except Exception: self.log.debug('Discarded invalid tag key: %r', type(k)) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue if not isinstance(v, six.string_types): try: v = six.text_type(v) except Exception: self.log.debug('Discarded invalid tag value: %s=%r', k, type(v)) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue if len(k) > MAX_TAG_KEY_LENGTH or len(v) > MAX_TAG_VALUE_LENGTH: self.log.debug('Discarded invalid tag: %s=%s', k, v) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue # support tags with spaces by converting them k = k.replace(' ', '-') if TagKey.is_reserved_key(k): self.log.debug('Discarding reserved tag key: %s', k) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue if not TagKey.is_valid_key(k): self.log.debug('Discarded invalid tag key: %s', k) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue if not TagValue.is_valid_value(v): self.log.debug('Discard invalid tag value: %s', v) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue tags.append((k, v)) data['tags'] = tags for k in list(iter(data)): if k in CLIENT_RESERVED_ATTRS: continue value = data.pop(k) if not value: self.log.debug('Ignored empty interface value: %s', k) continue try: interface = get_interface(k) except ValueError: self.log.debug('Ignored unknown attribute: %s', k) data['errors'].append({ 'type': EventError.INVALID_ATTRIBUTE, 'name': k, }) continue if type(value) != dict: # HACK(dcramer): the exception/breadcrumbs interface supports a # list as the value. We should change this in a new protocol # version. if type(value) in (list, tuple): value = {'values': value} else: self.log.debug( 'Invalid parameter for value: %s (%r)', k, type(value)) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': k, 'value': value, }) continue try: inst = interface.to_python(value) data[inst.get_path()] = inst.to_json() except Exception as e: if isinstance(e, InterfaceValidationError): log = self.log.debug else: log = self.log.error log('Discarded invalid value for interface: %s (%r)', k, value, exc_info=True) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': k, 'value': value, }) # TODO(dcramer): ideally this logic would happen in normalize, but today # we don't do "validation" there (create errors) # message is coerced to an interface, as its used for pure # index of searchable strings # See GH-3248 message = data.pop('message', None) if message: if 'sentry.interfaces.Message' not in data: value = { 'message': message, } elif not data['sentry.interfaces.Message'].get('formatted'): value = data['sentry.interfaces.Message'] value['formatted'] = message else: value = None if value is not None: k = 'sentry.interfaces.Message' interface = get_interface(k) try: inst = interface.to_python(value) data[inst.get_path()] = inst.to_json() except Exception as e: if isinstance(e, InterfaceValidationError): log = self.log.debug else: log = self.log.error log('Discarded invalid value for interface: %s (%r)', k, value, exc_info=True) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': k, 'value': value, }) level = data.get('level') or DEFAULT_LOG_LEVEL if isinstance(level, six.string_types) and not level.isdigit(): # assume it's something like 'warning' try: data['level'] = LOG_LEVELS_MAP[level] except KeyError as e: self.log.debug( 'Discarded invalid logger value: %s', level) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'level', 'value': level, }) data['level'] = LOG_LEVELS_MAP.get( DEFAULT_LOG_LEVEL, DEFAULT_LOG_LEVEL) if data.get('release'): data['release'] = six.text_type(data['release']) if len(data['release']) > 64: data['errors'].append({ 'type': EventError.VALUE_TOO_LONG, 'name': 'release', 'value': data['release'], }) del data['release'] if data.get('dist'): data['dist'] = six.text_type(data['dist']).strip() if not data.get('release'): data['dist'] = None elif len(data['dist']) > 64: data['errors'].append({ 'type': EventError.VALUE_TOO_LONG, 'name': 'dist', 'value': data['dist'], }) del data['dist'] elif _dist_re.match(data['dist']) is None: data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'dist', 'value': data['dist'], }) del data['dist'] if data.get('environment'): data['environment'] = six.text_type(data['environment']) if len(data['environment']) > 64: data['errors'].append({ 'type': EventError.VALUE_TOO_LONG, 'name': 'environment', 'value': data['environment'], }) del data['environment'] if data.get('time_spent'): try: data['time_spent'] = int(data['time_spent']) except (ValueError, TypeError): data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'time_spent', 'value': data['time_spent'], }) del data['time_spent'] else: if data['time_spent'] > BoundedIntegerField.MAX_VALUE: data['errors'].append({ 'type': EventError.VALUE_TOO_LONG, 'name': 'time_spent', 'value': data['time_spent'], }) del data['time_spent'] return data
def validate_data(self, project, data): # TODO(dcramer): move project out of the data packet data['project'] = project.id data['errors'] = [] if not data.get('message'): data['message'] = '<no message value>' elif not isinstance(data['message'], six.string_types): raise APIForbidden('Invalid value for message') if data.get('culprit'): if not isinstance(data['culprit'], six.string_types): raise APIForbidden('Invalid value for culprit') if not data.get('event_id'): data['event_id'] = uuid.uuid4().hex elif not isinstance(data['event_id'], six.string_types): raise APIForbidden('Invalid value for event_id') if len(data['event_id']) > 32: self.log.info( 'Discarded value for event_id due to length (%d chars)', len(data['event_id'])) data['errors'].append({ 'type': EventError.VALUE_TOO_LONG, 'name': 'event_id', 'value': data['event_id'], }) data['event_id'] = uuid.uuid4().hex if 'timestamp' in data: try: self._process_data_timestamp(data) except InvalidTimestamp as e: self.log.info( 'Discarded invalid value for timestamp: %r', data['timestamp'], exc_info=True) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'timestamp', 'value': data['timestamp'], }) del data['timestamp'] if 'fingerprint' in data: try: self._process_fingerprint(data) except InvalidFingerprint as e: self.log.info( 'Discarded invalid value for fingerprint: %r', data['fingerprint'], exc_info=True) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'fingerprint', 'value': data['fingerprint'], }) del data['fingerprint'] if 'platform' not in data or data['platform'] not in VALID_PLATFORMS: data['platform'] = 'other' if data.get('modules') and type(data['modules']) != dict: self.log.info( 'Discarded invalid type for modules: %s', type(data['modules'])) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'modules', 'value': data['modules'], }) del data['modules'] if data.get('extra') is not None and type(data['extra']) != dict: self.log.info( 'Discarded invalid type for extra: %s', type(data['extra'])) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'extra', 'value': data['extra'], }) del data['extra'] if data.get('tags') is not None: if type(data['tags']) == dict: data['tags'] = data['tags'].items() elif not isinstance(data['tags'], (list, tuple)): self.log.info( 'Discarded invalid type for tags: %s', type(data['tags'])) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': data['tags'], }) del data['tags'] if data.get('tags'): # remove any values which are over 32 characters tags = [] for pair in data['tags']: try: k, v = pair except ValueError: self.log.info('Discarded invalid tag value: %r', pair) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue if not isinstance(k, six.string_types): try: k = six.text_type(k) except Exception: self.log.info('Discarded invalid tag key: %r', type(k)) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue if not isinstance(v, six.string_types): try: v = six.text_type(v) except Exception: self.log.info('Discarded invalid tag value: %s=%r', k, type(v)) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue if len(k) > MAX_TAG_KEY_LENGTH or len(v) > MAX_TAG_VALUE_LENGTH: self.log.info('Discarded invalid tag: %s=%s', k, v) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue # support tags with spaces by converting them k = k.replace(' ', '-') if TagKey.is_reserved_key(k): self.log.info('Discarding reserved tag key: %s', k) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue if not TagKey.is_valid_key(k): self.log.info('Discarded invalid tag key: %s', k) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'tags', 'value': pair, }) continue tags.append((k, v)) data['tags'] = tags for k in data.keys(): if k in CLIENT_RESERVED_ATTRS: continue value = data.pop(k) if not value: self.log.info('Ignored empty interface value: %s', k) continue try: interface = get_interface(k) except ValueError: self.log.info('Ignored unknown attribute: %s', k) data['errors'].append({ 'type': EventError.INVALID_ATTRIBUTE, 'name': k, }) continue if type(value) != dict: # HACK(dcramer): the exception/breadcrumbs interface supports a # list as the value. We should change this in a new protocol # version. if type(value) in (list, tuple): value = {'values': value} else: self.log.info( 'Invalid parameter for value: %s (%r)', k, type(value)) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': k, 'value': value, }) continue try: inst = interface.to_python(value) data[inst.get_path()] = inst.to_json() except Exception as e: if isinstance(e, InterfaceValidationError): log = self.log.info else: log = self.log.error log('Discarded invalid value for interface: %s (%r)', k, value, exc_info=True) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': k, 'value': value, }) level = data.get('level') or DEFAULT_LOG_LEVEL if isinstance(level, six.string_types) and not level.isdigit(): # assume it's something like 'warning' try: data['level'] = LOG_LEVEL_REVERSE_MAP[level] except KeyError as e: self.log.info( 'Discarded invalid logger value: %s', level) data['errors'].append({ 'type': EventError.INVALID_DATA, 'name': 'level', 'value': level, }) data['level'] = LOG_LEVEL_REVERSE_MAP.get( DEFAULT_LOG_LEVEL, DEFAULT_LOG_LEVEL) if data.get('release'): data['release'] = unicode(data['release']) if len(data['release']) > 64: data['errors'].append({ 'type': EventError.VALUE_TOO_LONG, 'name': 'release', 'value': data['release'], }) del data['release'] return data
def validate_data(self, project, data): # TODO(dcramer): move project out of the data packet data["project"] = project.id data["errors"] = [] if not data.get("message"): data["message"] = "<no message value>" elif not isinstance(data["message"], six.string_types): raise APIForbidden("Invalid value for message") if data.get("culprit"): if not isinstance(data["culprit"], six.string_types): raise APIForbidden("Invalid value for culprit") if not data.get("event_id"): data["event_id"] = uuid.uuid4().hex elif not isinstance(data["event_id"], six.string_types): raise APIForbidden("Invalid value for event_id") if len(data["event_id"]) > 32: self.log.info("Discarded value for event_id due to length (%d chars)", len(data["event_id"])) data["errors"].append({"type": EventError.VALUE_TOO_LONG, "name": "event_id", "value": data["event_id"]}) data["event_id"] = uuid.uuid4().hex if "timestamp" in data: try: self._process_data_timestamp(data) except InvalidTimestamp as e: self.log.info("Discarded invalid value for timestamp: %r", data["timestamp"], exc_info=True) data["errors"].append( {"type": EventError.INVALID_DATA, "name": "timestamp", "value": data["timestamp"]} ) del data["timestamp"] if "fingerprint" in data: try: self._process_fingerprint(data) except InvalidFingerprint as e: self.log.info("Discarded invalid value for fingerprint: %r", data["fingerprint"], exc_info=True) data["errors"].append( {"type": EventError.INVALID_DATA, "name": "fingerprint", "value": data["fingerprint"]} ) del data["fingerprint"] if "platform" not in data or data["platform"] not in VALID_PLATFORMS: data["platform"] = "other" if data.get("modules") and type(data["modules"]) != dict: self.log.info("Discarded invalid type for modules: %s", type(data["modules"])) data["errors"].append({"type": EventError.INVALID_DATA, "name": "modules", "value": data["modules"]}) del data["modules"] if data.get("extra") is not None and type(data["extra"]) != dict: self.log.info("Discarded invalid type for extra: %s", type(data["extra"])) data["errors"].append({"type": EventError.INVALID_DATA, "name": "extra", "value": data["extra"]}) del data["extra"] if data.get("tags") is not None: if type(data["tags"]) == dict: data["tags"] = data["tags"].items() elif not isinstance(data["tags"], (list, tuple)): self.log.info("Discarded invalid type for tags: %s", type(data["tags"])) data["errors"].append({"type": EventError.INVALID_DATA, "name": "tags", "value": data["tags"]}) del data["tags"] if data.get("tags"): # remove any values which are over 32 characters tags = [] for pair in data["tags"]: try: k, v = pair except ValueError: self.log.info("Discarded invalid tag value: %r", pair) data["errors"].append({"type": EventError.INVALID_DATA, "name": "tags", "value": pair}) continue if not isinstance(k, six.string_types): try: k = six.text_type(k) except Exception: self.log.info("Discarded invalid tag key: %r", type(k)) data["errors"].append({"type": EventError.INVALID_DATA, "name": "tags", "value": pair}) continue if not isinstance(v, six.string_types): try: v = six.text_type(v) except Exception: self.log.info("Discarded invalid tag value: %s=%r", k, type(v)) data["errors"].append({"type": EventError.INVALID_DATA, "name": "tags", "value": pair}) continue if len(k) > MAX_TAG_KEY_LENGTH or len(v) > MAX_TAG_VALUE_LENGTH: self.log.info("Discarded invalid tag: %s=%s", k, v) data["errors"].append({"type": EventError.INVALID_DATA, "name": "tags", "value": pair}) continue # support tags with spaces by converting them k = k.replace(" ", "-") if TagKey.is_reserved_key(k): self.log.info("Discarding reserved tag key: %s", k) data["errors"].append({"type": EventError.INVALID_DATA, "name": "tags", "value": pair}) continue if not TagKey.is_valid_key(k): self.log.info("Discarded invalid tag key: %s", k) data["errors"].append({"type": EventError.INVALID_DATA, "name": "tags", "value": pair}) continue tags.append((k, v)) data["tags"] = tags for k in data.keys(): if k in CLIENT_RESERVED_ATTRS: continue value = data.pop(k) if not value: self.log.info("Ignored empty interface value: %s", k) continue try: interface = get_interface(k) except ValueError: self.log.info("Ignored unknown attribute: %s", k) data["errors"].append({"type": EventError.INVALID_ATTRIBUTE, "name": k}) continue if type(value) != dict: # HACK(dcramer): the exception interface supports a list as the # value. We should change this in a new protocol version. if type(value) in (list, tuple): value = {"values": value} else: self.log.info("Invalid parameter for value: %s (%r)", k, type(value)) data["errors"].append({"type": EventError.INVALID_DATA, "name": k, "value": value}) continue try: inst = interface.to_python(value) data[inst.get_path()] = inst.to_json() except Exception as e: if isinstance(e, InterfaceValidationError): log = self.log.info else: log = self.log.error log("Discarded invalid value for interface: %s (%r)", k, value, exc_info=True) data["errors"].append({"type": EventError.INVALID_DATA, "name": k, "value": value}) level = data.get("level") or DEFAULT_LOG_LEVEL if isinstance(level, six.string_types) and not level.isdigit(): # assume it's something like 'warning' try: data["level"] = LOG_LEVEL_REVERSE_MAP[level] except KeyError as e: self.log.info("Discarded invalid logger value: %s", level) data["errors"].append({"type": EventError.INVALID_DATA, "name": "level", "value": level}) data["level"] = LOG_LEVEL_REVERSE_MAP.get(DEFAULT_LOG_LEVEL, DEFAULT_LOG_LEVEL) if data.get("release"): data["release"] = unicode(data["release"]) if len(data["release"]) > 64: data["errors"].append({"type": EventError.VALUE_TOO_LONG, "name": "release", "value": data["release"]}) del data["release"] return data