Example #1
0
 def serialize(self, obj, attrs, user):
     return {
         'id': six.text_type(obj.id),
         'key': TagKey.get_standardized_key(obj.key),
         'name': obj.get_label(),
         'uniqueValues': obj.values_seen,
     }
Example #2
0
    def get(self, request, group):
        tag_keys = TagKey.objects.filter(
            project=group.project,
            status=TagKeyStatus.VISIBLE,
            key__in=GroupTagKey.objects.filter(group=group, ).values('key'),
        )

        # O(N) db access
        data = []
        all_top_values = []
        for tag_key in tag_keys:
            total_values = GroupTagValue.get_value_count(group.id, tag_key.key)
            top_values = GroupTagValue.get_top_values(group.id,
                                                      tag_key.key,
                                                      limit=10)

            all_top_values.extend(top_values)

            data.append({
                'id': six.text_type(tag_key.id),
                'key': TagKey.get_standardized_key(tag_key.key),
                'name': tag_key.get_label(),
                'uniqueValues': tag_key.values_seen,
                'totalValues': total_values,
            })

        # Serialize all of the values at once to avoid O(n) serialize/db queries
        top_values_by_key = defaultdict(list)
        for value in serialize(all_top_values, request.user):
            top_values_by_key[value['key']].append(value)

        for d in data:
            d['topValues'] = top_values_by_key[d['key']]

        return Response(data)
Example #3
0
    def get(self, request, group):
        tag_keys = TagKey.objects.filter(
            project=group.project,
            status=TagKeyStatus.VISIBLE,
            key__in=GroupTagKey.objects.filter(
                group=group,
            ).values('key'),
        )

        # O(N) db access
        data = []
        all_top_values = []
        for tag_key in tag_keys:
            total_values = GroupTagValue.get_value_count(group.id, tag_key.key)
            top_values = GroupTagValue.get_top_values(group.id, tag_key.key, limit=10)

            all_top_values.extend(top_values)

            data.append({
                'id': six.text_type(tag_key.id),
                'key': TagKey.get_standardized_key(tag_key.key),
                'name': tag_key.get_label(),
                'uniqueValues': tag_key.values_seen,
                'totalValues': total_values,
            })

        # Serialize all of the values at once to avoid O(n) serialize/db queries
        top_values_by_key = defaultdict(list)
        for value in serialize(all_top_values, request.user):
            top_values_by_key[value['key']].append(value)

        for d in data:
            d['topValues'] = top_values_by_key[d['key']]

        return Response(data)
    def get(self, request, project, key):
        """
        List a Tag's Values
        ```````````````````

        Return a list of values associated with this key.  The `query`
        parameter can be used to to perform a "starts with" match on
        values.

        :pparam string organization_slug: the slug of the organization.
        :pparam string project_slug: the slug of the project.
        :pparam string key: the tag key to look up.
        :auth: required
        """
        if TagKey.is_reserved_key(key):
            lookup_key = 'sentry:{0}'.format(key)
        else:
            lookup_key = key

        try:
            tagkey = TagKey.objects.get(
                project=project,
                key=lookup_key,
                status=TagKeyStatus.VISIBLE,
            )
        except TagKey.DoesNotExist:
            raise ResourceDoesNotExist

        base_queryset = TagValue.objects.filter(
            project=project,
            key=tagkey.key,
        )

        query = request.GET.get('query')
        if query:
            if is_postgres():
                # not quite optimal, but best we can do with ORM
                queryset = TagValue.objects.filter(
                    id__in=base_queryset.order_by('-times_seen')[:10000]
                )
            else:
                # MySQL can't handle an `IN` with a `LIMIT` clause
                queryset = base_queryset
            queryset = queryset.filter(value__contains=query)

        else:
            queryset = TagValue.objects.filter(
                project=project,
                key=tagkey.key,
            )

        return self.paginate(
            request=request,
            queryset=queryset,
            order_by='-times_seen',
            on_results=lambda x: serialize(x, request.user),
        )
Example #5
0
 def serialize(self, obj, attrs, user):
     return {
         'key': TagKey.get_standardized_key(obj.key),
         'name': attrs['name'],
         'value': obj.value,
         'count': obj.times_seen,
         'lastSeen': obj.last_seen,
         'firstSeen': obj.first_seen,
     }
Example #6
0
    def get(self, request, group, key):
        """
        List a Tag's Values
        ```````````````````

        Return a list of values associated with this key for an issue.

        :pparam string issue_id: the ID of the issue to retrieve.
        :pparam string key: the tag key to look the values up for.
        :auth: required
        """
        # XXX(dcramer): kill sentry prefix for internal reserved tags
        if TagKey.is_reserved_key(key):
            lookup_key = 'sentry:{0}'.format(key)
        else:
            lookup_key = key

        tagkey = TagKey.objects.filter(
            project=group.project_id,
            key=lookup_key,
            status=TagKeyStatus.VISIBLE,
        )
        if not tagkey.exists():
            raise ResourceDoesNotExist

        queryset = GroupTagValue.objects.filter(
            group_id=group.id,
            key=lookup_key,
        )

        sort = request.GET.get('sort')
        if sort == 'date':
            order_by = '-last_seen'
            paginator_cls = DateTimePaginator
        elif sort == 'age':
            order_by = '-first_seen'
            paginator_cls = DateTimePaginator
        elif sort == 'freq':
            order_by = '-times_seen'
            paginator_cls = OffsetPaginator
        else:
            order_by = '-id'
            paginator_cls = Paginator

        if key == 'user':
            serializer_cls = UserTagValueSerializer()
        else:
            serializer_cls = None

        return self.paginate(
            request=request,
            queryset=queryset,
            order_by=order_by,
            paginator_cls=paginator_cls,
            on_results=lambda x: serialize(x, request.user, serializer_cls),
        )
Example #7
0
    def get(self, request, group, key):
        """
        List a Tag's Values
        ```````````````````

        Return a list of values associated with this key for an issue.

        :pparam string issue_id: the ID of the issue to retrieve.
        :pparam string key: the tag key to look the values up for.
        :auth: required
        """
        # XXX(dcramer): kill sentry prefix for internal reserved tags
        if TagKey.is_reserved_key(key):
            lookup_key = 'sentry:{0}'.format(key)
        else:
            lookup_key = key

        tagkey = TagKey.objects.filter(
            project=group.project_id,
            key=lookup_key,
            status=TagKeyStatus.VISIBLE,
        )
        if not tagkey.exists():
            raise ResourceDoesNotExist

        queryset = GroupTagValue.objects.filter(
            group=group,
            key=lookup_key,
        )

        sort = request.GET.get('sort')
        if sort == 'date':
            order_by = '-last_seen'
            paginator_cls = DateTimePaginator
        elif sort == 'age':
            order_by = '-first_seen'
            paginator_cls = DateTimePaginator
        elif sort == 'freq':
            order_by = '-times_seen'
            paginator_cls = OffsetPaginator
        else:
            order_by = '-id'
            paginator_cls = Paginator

        if key == 'user':
            serializer_cls = UserTagValueSerializer()
        else:
            serializer_cls = None

        return self.paginate(
            request=request,
            queryset=queryset,
            order_by=order_by,
            paginator_cls=paginator_cls,
            on_results=lambda x: serialize(x, request.user, serializer_cls),
        )
Example #8
0
 def serialize(self, obj, attrs, user):
     return {
         "id": six.text_type(obj.id),
         "key": TagKey.get_standardized_key(obj.key),
         "name": attrs["name"],
         "value": obj.value,
         "count": obj.times_seen,
         "lastSeen": obj.last_seen,
         "firstSeen": obj.first_seen,
     }
Example #9
0
    def passes(self, event, state, **kwargs):
        key = self.get_option('key')
        match = self.get_option('match')
        value = self.get_option('value')

        if not (key and match and value):
            return False

        value = value.lower()
        key = key.lower()

        tags = (
            v.lower()
            for k, v in event.get_tags()
            if k.lower() == key or TagKey.get_standardized_key(k) == key
        )

        if match == MatchType.EQUAL:
            for t_value in tags:
                if t_value == value:
                    return True
            return False

        elif match == MatchType.NOT_EQUAL:
            for t_value in tags:
                if t_value == value:
                    return False
            return True

        elif match == MatchType.STARTS_WITH:
            for t_value in tags:
                if t_value.startswith(value):
                    return True
            return False

        elif match == MatchType.ENDS_WITH:
            for t_value in tags:
                if t_value.endswith(value):
                    return True
            return False

        elif match == MatchType.CONTAINS:
            for t_value in tags:
                if value in t_value:
                    return True
            return False

        elif match == MatchType.NOT_CONTAINS:
            for t_value in tags:
                if value in t_value:
                    return False
            return True
Example #10
0
    def get(self, request, organization, project, team, group_id, key):
        try:
            # TODO(tkaemming): This should *actually* redirect, see similar
            # comment in ``GroupEndpoint.convert_args``.
            group, _ = get_group_with_redirect(
                group_id,
                queryset=Group.objects.filter(project=project),
            )
        except Group.DoesNotExist:
            raise Http404

        if TagKey.is_reserved_key(key):
            lookup_key = 'sentry:{0}'.format(key)
        else:
            lookup_key = key

        # validate existance as it may be deleted
        try:
            TagKey.objects.get(
                project=group.project_id,
                key=lookup_key,
                status=TagKeyStatus.VISIBLE,
            )
        except TagKey.DoesNotExist:
            raise Http404

        queryset = GroupTagValue.objects.filter(
            group=group,
            key=lookup_key,
        )

        def row_iter():
            yield ('value', 'times_seen', 'last_seen', 'first_seen')
            for row in queryset.iterator():
                yield (
                    row.value.encode('utf-8'),
                    str(row.times_seen),
                    row.last_seen.strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
                    row.first_seen.strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
                )

        pseudo_buffer = Echo()
        writer = csv.writer(pseudo_buffer)
        response = StreamingHttpResponse(
            (writer.writerow(r) for r in row_iter()),
            content_type='text/csv'
        )
        response['Content-Disposition'] = 'attachment; filename="{}-{}.csv"'.format(
            group.qualified_short_id or group.id, slugify(key)
        )
        return response
Example #11
0
    def get(self, request, group, key):
        """
        Retrieve Tag Details
        ````````````````````

        Returns details for given tag key related to an issue.

        :pparam string issue_id: the ID of the issue to retrieve.
        :pparam string key: the tag key to look the values up for.
        :auth: required
        """
        # XXX(dcramer): kill sentry prefix for internal reserved tags
        if TagKey.is_reserved_key(key):
            lookup_key = 'sentry:{0}'.format(key)
        else:
            lookup_key = key

        try:
            tag_key = TagKey.objects.get(
                project=group.project_id,
                key=lookup_key,
                status=TagKeyStatus.VISIBLE,
            )
        except TagKey.DoesNotExist:
            raise ResourceDoesNotExist

        try:
            group_tag_key = GroupTagKey.objects.get(
                group=group,
                key=lookup_key,
            )
        except GroupTagKey.DoesNotExist:
            raise ResourceDoesNotExist

        total_values = GroupTagValue.get_value_count(group.id, lookup_key)

        top_values = GroupTagValue.get_top_values(group.id,
                                                  lookup_key,
                                                  limit=3)

        data = {
            'id': str(tag_key.id),
            'key': key,
            'name': tag_key.get_label(),
            'uniqueValues': group_tag_key.values_seen,
            'totalValues': total_values,
            'topValues': serialize(top_values, request.user),
        }

        return Response(data)
Example #12
0
    def get(self, request, organization, project, team, group_id, key):
        try:
            # TODO(tkaemming): This should *actually* redirect, see similar
            # comment in ``GroupEndpoint.convert_args``.
            group, _ = get_group_with_redirect(
                group_id,
                queryset=Group.objects.filter(project=project),
            )
        except Group.DoesNotExist:
            raise Http404

        if TagKey.is_reserved_key(key):
            lookup_key = 'sentry:{0}'.format(key)
        else:
            lookup_key = key

        # validate existance as it may be deleted
        try:
            TagKey.objects.get(
                project=group.project_id,
                key=lookup_key,
                status=TagKeyStatus.VISIBLE,
            )
        except TagKey.DoesNotExist:
            raise Http404

        queryset = GroupTagValue.objects.filter(
            group=group,
            key=lookup_key,
        )

        def row_iter():
            yield ('value', 'times_seen', 'last_seen', 'first_seen')
            for row in queryset.iterator():
                yield (
                    row.value.encode('utf-8'),
                    str(row.times_seen),
                    row.last_seen.strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
                    row.first_seen.strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
                )

        pseudo_buffer = Echo()
        writer = csv.writer(pseudo_buffer)
        response = StreamingHttpResponse(
            (writer.writerow(r) for r in row_iter()), content_type='text/csv')
        response[
            'Content-Disposition'] = 'attachment; filename="{}-{}.csv"'.format(
                group.qualified_short_id or group.id, slugify(key))
        return response
Example #13
0
    def get(self, request, group, key):
        """
        Retrieve Tag Details
        ````````````````````

        Returns details for given tag key related to an issue.

        :pparam string issue_id: the ID of the issue to retrieve.
        :pparam string key: the tag key to look the values up for.
        :auth: required
        """
        # XXX(dcramer): kill sentry prefix for internal reserved tags
        if TagKey.is_reserved_key(key):
            lookup_key = 'sentry:{0}'.format(key)
        else:
            lookup_key = key

        try:
            tag_key = TagKey.objects.get(
                project=group.project_id,
                key=lookup_key,
                status=TagKeyStatus.VISIBLE,
            )
        except TagKey.DoesNotExist:
            raise ResourceDoesNotExist

        try:
            group_tag_key = GroupTagKey.objects.get(
                group=group,
                key=lookup_key,
            )
        except GroupTagKey.DoesNotExist:
            raise ResourceDoesNotExist

        total_values = GroupTagValue.get_value_count(group.id, lookup_key)

        top_values = GroupTagValue.get_top_values(group.id, lookup_key, limit=9)

        data = {
            'id': str(tag_key.id),
            'key': key,
            'name': tag_key.get_label(),
            'uniqueValues': group_tag_key.values_seen,
            'totalValues': total_values,
            'topValues': serialize(top_values, request.user),
        }

        return Response(data)
Example #14
0
    def passes(self, event, state, **kwargs):
        key = self.get_option('key')
        match = self.get_option('match')
        value = self.get_option('value')

        if not (key and match and value):
            return False

        value = value.lower()
        key = key.lower()

        tags = (v.lower() for k, v in event.get_tags() if k.lower() == key or TagKey.get_standardized_key(k) == key)

        if match == MatchType.EQUAL:
            for t_value in tags:
                if t_value == value:
                    return True
            return False

        elif match == MatchType.NOT_EQUAL:
            for t_value in tags:
                if t_value == value:
                    return False
            return True

        elif match == MatchType.STARTS_WITH:
            for t_value in tags:
                if t_value.startswith(value):
                    return True
            return False

        elif match == MatchType.ENDS_WITH:
            for t_value in tags:
                if t_value.endswith(value):
                    return True
            return False

        elif match == MatchType.CONTAINS:
            for t_value in tags:
                if value in t_value:
                    return True
            return False

        elif match == MatchType.NOT_CONTAINS:
            for t_value in tags:
                if value in t_value:
                    return False
            return True
    def get(self, request, project, key):
        if TagKey.is_reserved_key(key):
            lookup_key = 'sentry:{0}'.format(key)
        else:
            lookup_key = key

        try:
            tagkey = TagKey.objects.get(
                project=project,
                key=lookup_key,
                status=TagKeyStatus.VISIBLE,
            )
        except TagKey.DoesNotExist:
            raise ResourceDoesNotExist

        return Response(serialize(tagkey, request.user))
Example #16
0
    def get(self, request, project):
        tag_keys = TagKey.objects.filter(
            project=project,
            status=TagKeyStatus.VISIBLE,
        )

        data = []
        for tag_key in tag_keys:
            data.append({
                'id': str(tag_key.id),
                'key': TagKey.get_standardized_key(tag_key.key),
                'name': tag_key.get_label(),
                'uniqueValues': tag_key.values_seen,
            })

        return Response(data)
Example #17
0
    def get(self, request, project, key):
        if TagKey.is_reserved_key(key):
            lookup_key = 'sentry:{0}'.format(key)
        else:
            lookup_key = key

        try:
            tagkey = TagKey.objects.get(
                project_id=project.id,
                key=lookup_key,
                status=TagKeyStatus.VISIBLE,
            )
        except TagKey.DoesNotExist:
            raise ResourceDoesNotExist

        return Response(serialize(tagkey, request.user))
Example #18
0
    def get(self, request, organization, project, team, group_id, key):
        try:
            # TODO(tkaemming): This should *actually* redirect, see similar
            # comment in ``GroupEndpoint.convert_args``.
            group, _ = get_group_with_redirect(
                group_id,
                queryset=Group.objects.filter(project=project),
            )
        except Group.DoesNotExist:
            raise Http404

        if TagKey.is_reserved_key(key):
            lookup_key = 'sentry:{0}'.format(key)
        else:
            lookup_key = key

        # validate existance as it may be deleted
        try:
            TagKey.objects.get(
                project=group.project_id,
                key=lookup_key,
                status=TagKeyStatus.VISIBLE,
            )
        except TagKey.DoesNotExist:
            raise Http404

        if key == 'user':
            callbacks = [attach_eventuser(project.id)]
        else:
            callbacks = []

        queryset = RangeQuerySetWrapper(
            GroupTagValue.objects.filter(
                group_id=group.id,
                key=lookup_key,
            ),
            callbacks=callbacks,
        )

        filename = '{}-{}'.format(
            group.qualified_short_id or group.id,
            key,
        )

        return self.to_csv_response(queryset, filename, key=key)
Example #19
0
    def get(self, request, organization, project, team, group_id, key):
        try:
            # TODO(tkaemming): This should *actually* redirect, see similar
            # comment in ``GroupEndpoint.convert_args``.
            group, _ = get_group_with_redirect(
                group_id,
                queryset=Group.objects.filter(project=project),
            )
        except Group.DoesNotExist:
            raise Http404

        if TagKey.is_reserved_key(key):
            lookup_key = 'sentry:{0}'.format(key)
        else:
            lookup_key = key

        # validate existance as it may be deleted
        try:
            TagKey.objects.get(
                project_id=group.project_id,
                key=lookup_key,
                status=TagKeyStatus.VISIBLE,
            )
        except TagKey.DoesNotExist:
            raise Http404

        if key == 'user':
            callbacks = [attach_eventuser(project.id)]
        else:
            callbacks = []

        queryset = RangeQuerySetWrapper(
            GroupTagValue.objects.filter(
                group_id=group.id,
                key=lookup_key,
            ),
            callbacks=callbacks,
        )

        filename = '{}-{}'.format(
            group.qualified_short_id or group.id,
            key,
        )

        return self.to_csv_response(queryset, filename, key=key)
Example #20
0
    def get(self, request, project, key):
        """
        List a Tag's Values
        ```````````````````

        Return a list of values associated with this key.  The `query`
        parameter can be used to to perform a "contains" match on
        values.

        :pparam string organization_slug: the slug of the organization.
        :pparam string project_slug: the slug of the project.
        :pparam string key: the tag key to look up.
        :auth: required
        """
        if TagKey.is_reserved_key(key):
            lookup_key = 'sentry:{0}'.format(key)
        else:
            lookup_key = key

        try:
            tagkey = TagKey.objects.get(
                project_id=project.id,
                key=lookup_key,
                status=TagKeyStatus.VISIBLE,
            )
        except TagKey.DoesNotExist:
            raise ResourceDoesNotExist

        queryset = TagValue.objects.filter(
            project_id=project.id,
            key=tagkey.key,
        )

        query = request.GET.get('query')
        if query:
            queryset = queryset.filter(value__contains=query)

        return self.paginate(
            request=request,
            queryset=queryset,
            order_by='-last_seen',
            paginator_cls=DateTimePaginator,
            on_results=lambda x: serialize(x, request.user),
        )
Example #21
0
    def get_attrs(self, item_list, user):
        tag_labels = {
            t.key: t.get_label()
            for t in TagKey.objects.filter(project=item_list[0].project,
                                           key__in=[i.key for i in item_list])
        }

        result = {}
        for item in item_list:
            key = TagKey.get_standardized_key(item.key)
            try:
                label = tag_labels[item.key]
            except KeyError:
                label = key
            result[item] = {
                'name': label,
                'key': key,
            }
        return result
Example #22
0
    def get(self, request, project, key):
        """
        List a Tag's Values
        ```````````````````

        Return a list of values associated with this key.  The `query`
        parameter can be used to to perform a "starts with" match on
        values.

        :pparam string organization_slug: the slug of the organization.
        :pparam string project_slug: the slug of the project.
        :pparam string key: the tag key to look up.
        :auth: required
        """
        if TagKey.is_reserved_key(key):
            lookup_key = "sentry:{0}".format(key)
        else:
            lookup_key = key

        try:
            tagkey = TagKey.objects.get(project=project, key=lookup_key, status=TagKeyStatus.VISIBLE)
        except TagKey.DoesNotExist:
            raise ResourceDoesNotExist

        base_queryset = TagValue.objects.filter(project=project, key=tagkey.key)

        query = request.GET.get("query")
        if query:
            if is_postgres():
                # not quite optimal, but best we can do with ORM
                queryset = TagValue.objects.filter(id__in=base_queryset.order_by("-times_seen")[:10000])
            else:
                # MySQL can't handle an `IN` with a `LIMIT` clause
                queryset = base_queryset
            queryset = queryset.filter(value__contains=query)

        else:
            queryset = TagValue.objects.filter(project=project, key=tagkey.key)

        return self.paginate(
            request=request, queryset=queryset, order_by="-times_seen", on_results=lambda x: serialize(x, request.user)
        )
Example #23
0
    def get_attrs(self, item_list, user):
        tag_labels = {
            t.key: t.get_label()
            for t in TagKey.objects.filter(
                project=item_list[0].project,
                key__in=[i.key for i in item_list]
            )
        }

        result = {}
        for item in item_list:
            key = TagKey.get_standardized_key(item.key)
            try:
                label = tag_labels[item.key]
            except KeyError:
                label = key
            result[item] = {
                'name': label,
                'key': key,
            }
        return result
    def delete(self, request, project, key):
        """
        Remove all occurrences of the given tag key.

            {method} {path}

        """
        if TagKey.is_reserved_key(key):
            lookup_key = 'sentry:{0}'.format(key)
        else:
            lookup_key = key

        try:
            tagkey = TagKey.objects.get(
                project=project,
                key=lookup_key,
            )
        except TagKey.DoesNotExist:
            raise ResourceDoesNotExist

        updated = TagKey.objects.filter(
            id=tagkey.id,
            status=TagKeyStatus.VISIBLE,
        ).update(status=TagKeyStatus.PENDING_DELETION)
        if updated:
            delete_tag_key.delay(object_id=tagkey.id)

            self.create_audit_entry(
                request=request,
                organization=project.organization,
                target_object=tagkey.id,
                event=AuditLogEntryEvent.TAGKEY_REMOVE,
                data=tagkey.get_audit_log_data(),
            )

        return Response(status=204)
Example #25
0
    def get(self, request, group, key):
        """
        List a Tag's Values
        ```````````````````

        Return a list of values associated with this key for an issue.

        :pparam string issue_id: the ID of the issue to retrieve.
        :pparam string key: the tag key to look the values up for.
        :auth: required
        """
        # XXX(dcramer): kill sentry prefix for internal reserved tags
        if TagKey.is_reserved_key(key):
            lookup_key = 'sentry:{0}'.format(key)
        else:
            lookup_key = key

        tagkey = TagKey.objects.filter(
            project=group.project_id,
            key=lookup_key,
            status=TagKeyStatus.VISIBLE,
        )
        if not tagkey.exists():
            raise ResourceDoesNotExist

        queryset = GroupTagValue.objects.filter(
            group=group,
            key=lookup_key,
        )

        return self.paginate(
            request=request,
            queryset=queryset,
            order_by='-id',
            on_results=lambda x: serialize(x, request.user),
        )
Example #26
0
    def delete(self, request, project, key):
        """
        Remove all occurrences of the given tag key.

            {method} {path}

        """
        if TagKey.is_reserved_key(key):
            lookup_key = 'sentry:{0}'.format(key)
        else:
            lookup_key = key

        try:
            tagkey = TagKey.objects.get(
                project_id=project.id,
                key=lookup_key,
            )
        except TagKey.DoesNotExist:
            raise ResourceDoesNotExist

        updated = TagKey.objects.filter(
            id=tagkey.id,
            status=TagKeyStatus.VISIBLE,
        ).update(status=TagKeyStatus.PENDING_DELETION)
        if updated:
            delete_tag_key.delay(object_id=tagkey.id)

            self.create_audit_entry(
                request=request,
                organization=project.organization,
                target_object=tagkey.id,
                event=AuditLogEntryEvent.TAGKEY_REMOVE,
                data=tagkey.get_audit_log_data(),
            )

        return Response(status=204)
Example #27
0
    def get(self, request, group, key):
        """
        List a Tag's Values
        ```````````````````

        Return a list of values associated with this key for an issue.

        :pparam string issue_id: the ID of the issue to retrieve.
        :pparam string key: the tag key to look the values up for.
        :auth: required
        """
        # XXX(dcramer): kill sentry prefix for internal reserved tags
        if TagKey.is_reserved_key(key):
            lookup_key = 'sentry:{0}'.format(key)
        else:
            lookup_key = key

        tagkey = TagKey.objects.filter(
            project=group.project_id,
            key=lookup_key,
            status=TagKeyStatus.VISIBLE,
        )
        if not tagkey.exists():
            raise ResourceDoesNotExist

        queryset = GroupTagValue.objects.filter(
            group=group,
            key=lookup_key,
        )

        return self.paginate(
            request=request,
            queryset=queryset,
            order_by='-id',
            on_results=lambda x: serialize(x, request.user),
        )
Example #28
0
    def normalize(self):
        # TODO(dcramer): store http.env.REMOTE_ADDR as user.ip
        # First we pull out our top-level (non-data attr) kwargs
        data = self.data

        if not isinstance(data.get('level'), (six.string_types, int)):
            data['level'] = logging.ERROR
        elif data['level'] not in LOG_LEVELS:
            data['level'] = logging.ERROR

        if not data.get('logger'):
            data['logger'] = DEFAULT_LOGGER_NAME
        else:
            logger = trim(data['logger'].strip(), 64)
            if TagKey.is_valid_key(logger):
                data['logger'] = logger
            else:
                data['logger'] = DEFAULT_LOGGER_NAME

        if data.get('platform'):
            data['platform'] = trim(data['platform'], 64)

        current_timestamp = timezone.now()
        timestamp = data.get('timestamp')
        if not timestamp:
            timestamp = current_timestamp

        if isinstance(timestamp, datetime):
            # We must convert date to local time so Django doesn't mess it up
            # based on TIME_ZONE
            if settings.TIME_ZONE:
                if not timezone.is_aware(timestamp):
                    timestamp = timestamp.replace(tzinfo=timezone.utc)
            elif timezone.is_aware(timestamp):
                timestamp = timestamp.replace(tzinfo=None)
            timestamp = float(timestamp.strftime('%s'))

        data['timestamp'] = timestamp
        data['received'] = float(timezone.now().strftime('%s'))

        if not data.get('event_id'):
            data['event_id'] = uuid4().hex

        data.setdefault('culprit', None)
        data.setdefault('server_name', None)
        data.setdefault('site', None)
        data.setdefault('checksum', None)
        data.setdefault('fingerprint', None)
        data.setdefault('platform', None)
        data.setdefault('environment', None)
        data.setdefault('extra', {})
        data.setdefault('errors', [])

        tags = data.get('tags')
        if not tags:
            tags = []
        # full support for dict syntax
        elif isinstance(tags, dict):
            tags = list(tags.items())
        # prevent [tag, tag, tag] (invalid) syntax
        elif not all(len(t) == 2 for t in tags):
            tags = []
        else:
            tags = list(tags)

        data['tags'] = []
        for key, value in tags:
            key = six.text_type(key).strip()
            value = six.text_type(value).strip()
            if not (key and value):
                continue

            # XXX(dcramer): many legacy apps are using the environment tag
            # rather than the key itself
            if key == 'environment' and not data.get('environment'):
                data['environment'] = value
            else:
                data['tags'].append((key, value))

        if not isinstance(data['extra'], dict):
            # throw it away
            data['extra'] = {}

        trim_dict(
            data['extra'], max_size=settings.SENTRY_MAX_EXTRA_VARIABLE_SIZE)

        # TODO(dcramer): more of validate data needs stuffed into the manager
        for key in list(iter(data)):
            if key in CLIENT_RESERVED_ATTRS:
                continue

            value = data.pop(key)

            try:
                interface = get_interface(key)()
            except ValueError:
                continue

            try:
                inst = interface.to_python(value)
                data[inst.get_path()] = inst.to_json()
            except Exception:
                # XXX: we should consider logging this.
                pass

        # TODO(dcramer): this logic is duplicated in ``validate_data`` from
        # coreapi

        # message is coerced to an interface, as its used for pure
        # index of searchable strings
        # See GH-3248
        message = data.pop('message', None)
        if message:
            if 'sentry.interfaces.Message' not in data:
                interface = get_interface('sentry.interfaces.Message')
                try:
                    inst = interface.to_python({
                        'message': message,
                    })
                    data[inst.get_path()] = inst.to_json()
                except Exception:
                    pass
            elif not data['sentry.interfaces.Message'].get('formatted'):
                interface = get_interface('sentry.interfaces.Message')
                try:
                    inst = interface.to_python(dict(
                        data['sentry.interfaces.Message'],
                        formatted=message,
                    ))
                    data[inst.get_path()] = inst.to_json()
                except Exception:
                    pass

        # the SDKs currently do not describe event types, and we must infer
        # them from available attributes
        data['type'] = eventtypes.infer(data).key

        data['version'] = self.version

        # TODO(dcramer): find a better place for this logic
        exception = data.get('sentry.interfaces.Exception')
        stacktrace = data.get('sentry.interfaces.Stacktrace')
        if exception and len(exception['values']) == 1 and stacktrace:
            exception['values'][0]['stacktrace'] = stacktrace
            del data['sentry.interfaces.Stacktrace']

        if 'sentry.interfaces.Http' in data:
            try:
                ip_address = validate_ip(
                    data['sentry.interfaces.Http'].get(
                        'env', {}).get('REMOTE_ADDR'),
                    required=False,
                )
            except ValueError:
                ip_address = None
            if ip_address:
                data.setdefault('sentry.interfaces.User', {})
                data['sentry.interfaces.User'].setdefault(
                    'ip_address', ip_address)

        if data['culprit']:
            data['culprit'] = trim(data['culprit'], MAX_CULPRIT_LENGTH)

        return data
Example #29
0
    def notify(self, notification):
        event = notification.event
        group = event.group
        project = group.project

        if not self.is_configured(project):
            return

        webhook = self.get_option('webhook', project)
        username = (self.get_option('username', project) or 'Sentry').strip()
        icon_url = self.get_option('icon_url', project)
        channel = (self.get_option('channel', project) or '').strip()
        sort_on_tag = self.get_option('sort_on_tag', project)
        send_to_root_too = self.get_option('send_to_root_too', project)

        title = event.message_short.encode('utf-8')
        # TODO(dcramer): we'd like this to be the event culprit, but Sentry
        # does not currently retain it
        if group.culprit:
            culprit = group.culprit.encode('utf-8')
        else:
            culprit = None
        project_name = get_project_full_name(project).encode('utf-8')

        fields = []

        # They can be the same if there is no culprit
        # So we set culprit to an empty string instead of duplicating the text
        if culprit and title != culprit:
            fields.append({
                'title': 'Culprit',
                'value': culprit,
                'short': False,
            })

        fields.append({
            'title': 'Project',
            'value': project_name,
            'short': True,
        })

        if self.get_option('include_rules', project):
            rules = []
            for rule in notification.rules:
                rule_link = reverse('sentry-edit-project-rule', args=[
                    group.organization.slug, project.slug, rule.id
                ])
                # Make sure it's an absolute uri since we're sending this
                # outside of Sentry into Slack
                rule_link = absolute_uri(rule_link)
                rules.append((rule_link, rule.label.encode('utf-8')))

            if rules:
                fields.append({
                    'title': 'Triggered By',
                    'value': ', '.join('<%s | %s>' % r for r in rules),
                    'short': False,
                })

        if self.get_option('include_tags', project):
            included_tags = set(self.get_tag_list('included_tag_keys', project) or [])
            excluded_tags = set(self.get_tag_list('excluded_tag_keys', project) or [])
            for tag_key, tag_value in self._get_tags(event):
                key = tag_key.lower()
                std_key = TagKey.get_standardized_key(key)
                if included_tags and key not in included_tags and std_key not in included_tags:
                    continue
                if excluded_tags and (key in excluded_tags or std_key in excluded_tags):
                    continue
                fields.append({
                    'title': tag_key.encode('utf-8'),
                    'value': tag_value.encode('utf-8'),
                    'short': True,
                })

        payload = {
            'parse': 'none',
            'attachments': [{
                'fallback': '[%s] %s' % (project_name, title),
                'title': title,
                'title_link': group.get_absolute_url(),
                'color': self.color_for_group(group),
                'fields': fields,
            }]
        }

        # Apparently we've stored some bad data from before we used `URLField`.
        webhook = webhook.strip(' ')

        if username:
            payload['username'] = username.encode('utf-8')

        if channel:
            payload['channel'] = channel

        if icon_url:
            payload['icon_url'] = icon_url

        if sort_on_tag:
            if send_to_root_too:
                http.safe_urlopen(webhook, method='POST', data={'payload': json.dumps(payload)})

            sort_on_tag_key = (self.get_option('sort_on_tag_key', project) or 'application_name').strip()
            groups = [
                {"tag_values": (self.get_option('group_1_tag_values', project).split(',') or []),
                 "channel": (self.get_option('group_1_channel', project) or '').strip()},
                {"tag_values": (self.get_option('group_2_tag_values', project).split(',') or []),
                 "channel": (self.get_option('group_2_channel', project) or '').strip()},
                {"tag_values": (self.get_option('group_3_tag_values', project).split(',') or []),
                 "channel": (self.get_option('group_3_channel', project) or '').strip()}]

            for key, value in self._get_tags(event):
                if sort_on_tag_key == key:
                    tag_value = value
                    break
            else:
                # Tag does not exist, no need to check the groups.
                return

            for group in groups:
                if tag_value in group["tag_values"]:
                    payload['channel'] = group["channel"]

                    http.safe_urlopen(webhook, method='POST', data={'payload': json.dumps(payload)})
            return
        else:
            return http.safe_urlopen(webhook, method='POST', data={'payload': json.dumps(payload)})
Example #30
0
    def validate_data(self, project, data):
        # TODO(dcramer): move project out of the data packet
        data['project'] = project.id

        data['errors'] = []

        if data.get('culprit'):
            if not isinstance(data['culprit'], six.string_types):
                raise APIForbidden('Invalid value for culprit')

        if not data.get('event_id'):
            data['event_id'] = uuid.uuid4().hex
        elif not isinstance(data['event_id'], six.string_types):
            raise APIForbidden('Invalid value for event_id')

        if len(data['event_id']) > 32:
            self.log.debug(
                'Discarded value for event_id due to length (%d chars)',
                len(data['event_id']))
            data['errors'].append({
                'type': EventError.VALUE_TOO_LONG,
                'name': 'event_id',
                'value': data['event_id'],
            })
            data['event_id'] = uuid.uuid4().hex
        elif not is_event_id(data['event_id']):
            self.log.debug('Discarded invalid value for event_id: %r',
                           data['event_id'],
                           exc_info=True)
            data['errors'].append({
                'type': EventError.INVALID_DATA,
                'name': 'event_id',
                'value': data['event_id'],
            })
            data['event_id'] = uuid.uuid4().hex

        if 'timestamp' in data:
            try:
                self._process_data_timestamp(data)
            except InvalidTimestamp as e:
                self.log.debug('Discarded invalid value for timestamp: %r',
                               data['timestamp'],
                               exc_info=True)
                data['errors'].append({
                    'type': EventError.INVALID_DATA,
                    'name': 'timestamp',
                    'value': data['timestamp'],
                })
                del data['timestamp']

        if 'fingerprint' in data:
            try:
                self._process_fingerprint(data)
            except InvalidFingerprint as e:
                self.log.debug('Discarded invalid value for fingerprint: %r',
                               data['fingerprint'],
                               exc_info=True)
                data['errors'].append({
                    'type': EventError.INVALID_DATA,
                    'name': 'fingerprint',
                    'value': data['fingerprint'],
                })
                del data['fingerprint']

        if 'platform' not in data or data['platform'] not in VALID_PLATFORMS:
            data['platform'] = 'other'

        if data.get('modules') and type(data['modules']) != dict:
            self.log.debug('Discarded invalid type for modules: %s',
                           type(data['modules']))
            data['errors'].append({
                'type': EventError.INVALID_DATA,
                'name': 'modules',
                'value': data['modules'],
            })
            del data['modules']

        if data.get('extra') is not None and type(data['extra']) != dict:
            self.log.debug('Discarded invalid type for extra: %s',
                           type(data['extra']))
            data['errors'].append({
                'type': EventError.INVALID_DATA,
                'name': 'extra',
                'value': data['extra'],
            })
            del data['extra']

        if data.get('tags') is not None:
            if type(data['tags']) == dict:
                data['tags'] = list(data['tags'].items())
            elif not isinstance(data['tags'], (list, tuple)):
                self.log.debug('Discarded invalid type for tags: %s',
                               type(data['tags']))
                data['errors'].append({
                    'type': EventError.INVALID_DATA,
                    'name': 'tags',
                    'value': data['tags'],
                })
                del data['tags']

        if data.get('tags'):
            # remove any values which are over 32 characters
            tags = []
            for pair in data['tags']:
                try:
                    k, v = pair
                except ValueError:
                    self.log.debug('Discarded invalid tag value: %r', pair)
                    data['errors'].append({
                        'type': EventError.INVALID_DATA,
                        'name': 'tags',
                        'value': pair,
                    })
                    continue

                if not isinstance(k, six.string_types):
                    try:
                        k = six.text_type(k)
                    except Exception:
                        self.log.debug('Discarded invalid tag key: %r',
                                       type(k))
                        data['errors'].append({
                            'type': EventError.INVALID_DATA,
                            'name': 'tags',
                            'value': pair,
                        })
                        continue

                if not isinstance(v, six.string_types):
                    try:
                        v = six.text_type(v)
                    except Exception:
                        self.log.debug('Discarded invalid tag value: %s=%r', k,
                                       type(v))
                        data['errors'].append({
                            'type': EventError.INVALID_DATA,
                            'name': 'tags',
                            'value': pair,
                        })
                        continue

                if len(k) > MAX_TAG_KEY_LENGTH or len(
                        v) > MAX_TAG_VALUE_LENGTH:
                    self.log.debug('Discarded invalid tag: %s=%s', k, v)
                    data['errors'].append({
                        'type': EventError.INVALID_DATA,
                        'name': 'tags',
                        'value': pair,
                    })
                    continue

                # support tags with spaces by converting them
                k = k.replace(' ', '-')

                if TagKey.is_reserved_key(k):
                    self.log.debug('Discarding reserved tag key: %s', k)
                    data['errors'].append({
                        'type': EventError.INVALID_DATA,
                        'name': 'tags',
                        'value': pair,
                    })
                    continue

                if not TagKey.is_valid_key(k):
                    self.log.debug('Discarded invalid tag key: %s', k)
                    data['errors'].append({
                        'type': EventError.INVALID_DATA,
                        'name': 'tags',
                        'value': pair,
                    })
                    continue

                if not TagValue.is_valid_value(v):
                    self.log.debug('Discard invalid tag value: %s', v)
                    data['errors'].append({
                        'type': EventError.INVALID_DATA,
                        'name': 'tags',
                        'value': pair,
                    })
                    continue

                tags.append((k, v))
            data['tags'] = tags

        for k in list(iter(data)):
            if k in CLIENT_RESERVED_ATTRS:
                continue

            value = data.pop(k)

            if not value:
                self.log.debug('Ignored empty interface value: %s', k)
                continue

            try:
                interface = get_interface(k)
            except ValueError:
                self.log.debug('Ignored unknown attribute: %s', k)
                data['errors'].append({
                    'type': EventError.INVALID_ATTRIBUTE,
                    'name': k,
                })
                continue

            if type(value) != dict:
                # HACK(dcramer): the exception/breadcrumbs interface supports a
                # list as the value. We should change this in a new protocol
                # version.
                if type(value) in (list, tuple):
                    value = {'values': value}
                else:
                    self.log.debug('Invalid parameter for value: %s (%r)', k,
                                   type(value))
                    data['errors'].append({
                        'type': EventError.INVALID_DATA,
                        'name': k,
                        'value': value,
                    })
                    continue

            try:
                inst = interface.to_python(value)
                data[inst.get_path()] = inst.to_json()
            except Exception as e:
                if isinstance(e, InterfaceValidationError):
                    log = self.log.debug
                else:
                    log = self.log.error
                log('Discarded invalid value for interface: %s (%r)',
                    k,
                    value,
                    exc_info=True)
                data['errors'].append({
                    'type': EventError.INVALID_DATA,
                    'name': k,
                    'value': value,
                })

        # TODO(dcramer): ideally this logic would happen in normalize, but today
        # we don't do "validation" there (create errors)

        # message is coerced to an interface, as its used for pure
        # index of searchable strings
        # See GH-3248
        message = data.pop('message', None)
        if message:
            if 'sentry.interfaces.Message' not in data:
                value = {
                    'message': message,
                }
            elif not data['sentry.interfaces.Message'].get('formatted'):
                value = data['sentry.interfaces.Message']
                value['formatted'] = message
            else:
                value = None

            if value is not None:
                k = 'sentry.interfaces.Message'
                interface = get_interface(k)
                try:
                    inst = interface.to_python(value)
                    data[inst.get_path()] = inst.to_json()
                except Exception as e:
                    if isinstance(e, InterfaceValidationError):
                        log = self.log.debug
                    else:
                        log = self.log.error
                    log('Discarded invalid value for interface: %s (%r)',
                        k,
                        value,
                        exc_info=True)
                    data['errors'].append({
                        'type': EventError.INVALID_DATA,
                        'name': k,
                        'value': value,
                    })

        level = data.get('level') or DEFAULT_LOG_LEVEL
        if isinstance(level, six.string_types) and not level.isdigit():
            # assume it's something like 'warning'
            try:
                data['level'] = LOG_LEVELS_MAP[level]
            except KeyError as e:
                self.log.debug('Discarded invalid logger value: %s', level)
                data['errors'].append({
                    'type': EventError.INVALID_DATA,
                    'name': 'level',
                    'value': level,
                })
                data['level'] = LOG_LEVELS_MAP.get(DEFAULT_LOG_LEVEL,
                                                   DEFAULT_LOG_LEVEL)

        if data.get('release'):
            data['release'] = six.text_type(data['release'])
            if len(data['release']) > 64:
                data['errors'].append({
                    'type': EventError.VALUE_TOO_LONG,
                    'name': 'release',
                    'value': data['release'],
                })
                del data['release']
        return data
Example #31
0
    def normalize(self):
        # TODO(dcramer): store http.env.REMOTE_ADDR as user.ip
        # First we pull out our top-level (non-data attr) kwargs
        data = self.data

        if not isinstance(data.get('level'), (six.string_types, int)):
            data['level'] = logging.ERROR
        elif data['level'] not in LOG_LEVELS:
            data['level'] = logging.ERROR

        if not data.get('logger'):
            data['logger'] = DEFAULT_LOGGER_NAME
        else:
            logger = trim(data['logger'].strip(), 64)
            if TagKey.is_valid_key(logger):
                data['logger'] = logger
            else:
                data['logger'] = DEFAULT_LOGGER_NAME

        if data.get('platform'):
            data['platform'] = trim(data['platform'], 64)

        timestamp = data.get('timestamp')
        if not timestamp:
            timestamp = timezone.now()

        if isinstance(timestamp, datetime):
            # We must convert date to local time so Django doesn't mess it up
            # based on TIME_ZONE
            if settings.TIME_ZONE:
                if not timezone.is_aware(timestamp):
                    timestamp = timestamp.replace(tzinfo=timezone.utc)
            elif timezone.is_aware(timestamp):
                timestamp = timestamp.replace(tzinfo=None)
            timestamp = float(timestamp.strftime('%s'))

        data['timestamp'] = timestamp

        if not data.get('event_id'):
            data['event_id'] = uuid4().hex

        data.setdefault('message', '')
        data.setdefault('culprit', None)
        data.setdefault('time_spent', None)
        data.setdefault('server_name', None)
        data.setdefault('site', None)
        data.setdefault('checksum', None)
        data.setdefault('fingerprint', None)
        data.setdefault('platform', None)
        data.setdefault('environment', None)
        data.setdefault('extra', {})
        data.setdefault('errors', [])

        tags = data.get('tags')
        if not tags:
            tags = []
        # full support for dict syntax
        elif isinstance(tags, dict):
            tags = tags.items()
        # prevent [tag, tag, tag] (invalid) syntax
        elif not all(len(t) == 2 for t in tags):
            tags = []
        else:
            tags = list(tags)

        data['tags'] = []
        for key, value in tags:
            key = six.text_type(key).strip()
            value = six.text_type(value).strip()
            if not (key and value):
                continue

            data['tags'].append((key, value))

        if not isinstance(data['extra'], dict):
            # throw it away
            data['extra'] = {}

        trim_dict(
            data['extra'], max_size=settings.SENTRY_MAX_EXTRA_VARIABLE_SIZE)

        # TODO(dcramer): more of validate data needs stuffed into the manager
        for key in data.keys():
            if key in CLIENT_RESERVED_ATTRS:
                continue

            value = data.pop(key)

            try:
                interface = get_interface(key)()
            except ValueError:
                continue

            try:
                inst = interface.to_python(value)
                data[inst.get_path()] = inst.to_json()
            except Exception:
                pass

        data['version'] = self.version

        # TODO(dcramer): find a better place for this logic
        exception = data.get('sentry.interfaces.Exception')
        stacktrace = data.get('sentry.interfaces.Stacktrace')
        if exception and len(exception['values']) == 1 and stacktrace:
            exception['values'][0]['stacktrace'] = stacktrace
            del data['sentry.interfaces.Stacktrace']

        if 'sentry.interfaces.Http' in data:
            try:
                ip_address = validate_ip(
                    data['sentry.interfaces.Http'].get(
                        'env', {}).get('REMOTE_ADDR'),
                    required=False,
                )
            except ValueError:
                ip_address = None
            if ip_address:
                data.setdefault('sentry.interfaces.User', {})
                data['sentry.interfaces.User'].setdefault(
                    'ip_address', ip_address)

        if data['time_spent']:
            data['time_spent'] = int(data['time_spent'])

        if data['culprit']:
            data['culprit'] = trim(data['culprit'], MAX_CULPRIT_LENGTH)

        if data['message']:
            data['message'] = trim(
                data['message'], settings.SENTRY_MAX_MESSAGE_LENGTH)

        return data
Example #32
0
    def normalize(self):
        # TODO(dcramer): store http.env.REMOTE_ADDR as user.ip
        # First we pull out our top-level (non-data attr) kwargs
        data = self.data

        if not isinstance(data.get('level'), (six.string_types, int)):
            data['level'] = logging.ERROR
        elif data['level'] not in LOG_LEVELS:
            data['level'] = logging.ERROR

        if not data.get('logger'):
            data['logger'] = DEFAULT_LOGGER_NAME
        else:
            logger = trim(data['logger'].strip(), 64)
            if TagKey.is_valid_key(logger):
                data['logger'] = logger
            else:
                data['logger'] = DEFAULT_LOGGER_NAME

        if data.get('platform'):
            data['platform'] = trim(data['platform'], 64)

        timestamp = data.get('timestamp')
        if not timestamp:
            timestamp = timezone.now()

        if isinstance(timestamp, datetime):
            # We must convert date to local time so Django doesn't mess it up
            # based on TIME_ZONE
            if settings.TIME_ZONE:
                if not timezone.is_aware(timestamp):
                    timestamp = timestamp.replace(tzinfo=timezone.utc)
            elif timezone.is_aware(timestamp):
                timestamp = timestamp.replace(tzinfo=None)
            timestamp = float(timestamp.strftime('%s'))

        data['timestamp'] = timestamp

        if not data.get('event_id'):
            data['event_id'] = uuid4().hex

        data.setdefault('message', '')
        data.setdefault('culprit', None)
        data.setdefault('time_spent', None)
        data.setdefault('server_name', None)
        data.setdefault('site', None)
        data.setdefault('checksum', None)
        data.setdefault('fingerprint', None)
        data.setdefault('platform', None)
        data.setdefault('environment', None)
        data.setdefault('extra', {})
        data.setdefault('errors', [])

        tags = data.get('tags')
        if not tags:
            tags = []
        # full support for dict syntax
        elif isinstance(tags, dict):
            tags = tags.items()
        # prevent [tag, tag, tag] (invalid) syntax
        elif not all(len(t) == 2 for t in tags):
            tags = []
        else:
            tags = list(tags)

        data['tags'] = []
        for key, value in tags:
            key = six.text_type(key).strip()
            value = six.text_type(value).strip()
            if not (key and value):
                continue

            data['tags'].append((key, value))

        if not isinstance(data['extra'], dict):
            # throw it away
            data['extra'] = {}

        trim_dict(data['extra'],
                  max_size=settings.SENTRY_MAX_EXTRA_VARIABLE_SIZE)

        # TODO(dcramer): more of validate data needs stuffed into the manager
        for key in data.keys():
            if key in CLIENT_RESERVED_ATTRS:
                continue

            value = data.pop(key)

            try:
                interface = get_interface(key)()
            except ValueError:
                continue

            try:
                inst = interface.to_python(value)
                data[inst.get_path()] = inst.to_json()
            except Exception:
                pass

        data['version'] = self.version

        # TODO(dcramer): find a better place for this logic
        exception = data.get('sentry.interfaces.Exception')
        stacktrace = data.get('sentry.interfaces.Stacktrace')
        if exception and len(exception['values']) == 1 and stacktrace:
            exception['values'][0]['stacktrace'] = stacktrace
            del data['sentry.interfaces.Stacktrace']

        if 'sentry.interfaces.Http' in data:
            try:
                ip_address = validate_ip(
                    data['sentry.interfaces.Http'].get('env',
                                                       {}).get('REMOTE_ADDR'),
                    required=False,
                )
            except ValueError:
                ip_address = None
            if ip_address:
                data.setdefault('sentry.interfaces.User', {})
                data['sentry.interfaces.User'].setdefault(
                    'ip_address', ip_address)

        if data['time_spent']:
            data['time_spent'] = int(data['time_spent'])

        if data['culprit']:
            data['culprit'] = trim(data['culprit'], MAX_CULPRIT_LENGTH)

        if data['message']:
            data['message'] = trim(data['message'],
                                   settings.SENTRY_MAX_MESSAGE_LENGTH)

        return data
Example #33
0
    def notify(self, notification):
        event = notification.event
        group = event.group
        project = group.project

        if not self.is_configured(project):
            return

        webhook = self.get_option('webhook', project)
        username = (self.get_option('username', project) or 'Sentry').strip()
        icon_url = self.get_option('icon_url', project)
        channel = (self.get_option('channel', project) or '').strip()

        title = event.message_short.encode('utf-8')
        # TODO(dcramer): we'd like this to be the event culprit, but Sentry
        # does not currently retain it
        if group.culprit:
            culprit = group.culprit.encode('utf-8')
        else:
            culprit = None
        project_name = project.get_full_name().encode('utf-8')

        fields = []

        # They can be the same if there is no culprit
        # So we set culprit to an empty string instead of duplicating the text
        if not self.get_option('exclude_culprit',
                               project) and culprit and title != culprit:
            fields.append({
                'title': 'Culprit',
                'value': culprit,
                'short': False,
            })
        if not self.get_option('exclude_project', project):
            fields.append({
                'title': 'Project',
                'value': project_name,
                'short': True,
            })

        if self.get_option('include_rules', project):
            rules = []
            for rule in notification.rules:
                rule_link = reverse(
                    'sentry-edit-project-rule',
                    args=[group.organization.slug, project.slug, rule.id])
                # Make sure it's an absolute uri since we're sending this
                # outside of Sentry into Slack
                rule_link = absolute_uri(rule_link)
                rules.append((rule_link, rule.label.encode('utf-8')))

            if rules:
                fields.append({
                    'title':
                    'Triggered By',
                    'value':
                    ', '.join('<%s | %s>' % r for r in rules),
                    'short':
                    False,
                })

        if self.get_option('include_tags', project):
            included_tags = set(
                self.get_tag_list('included_tag_keys', project) or [])
            excluded_tags = set(
                self.get_tag_list('excluded_tag_keys', project) or [])
            for tag_key, tag_value in self._get_tags(event):
                key = tag_key.lower()
                std_key = TagKey.get_standardized_key(key)
                if included_tags and key not in included_tags and std_key not in included_tags:
                    continue
                if excluded_tags and (key in excluded_tags
                                      or std_key in excluded_tags):
                    continue
                fields.append({
                    'title': tag_key.encode('utf-8'),
                    'value': tag_value.encode('utf-8'),
                    'short': True,
                })

        payload = {
            'attachments': [{
                'fallback':
                '[%s] %s' % (project_name, title),
                'title':
                title,
                'title_link':
                self.add_notification_referrer_param(group.get_absolute_url()),
                'color':
                self.color_for_event(event),
                'fields':
                fields,
            }]
        }

        if username:
            payload['username'] = username.encode('utf-8')

        if channel:
            payload['channel'] = channel

        if icon_url:
            payload['icon_url'] = icon_url

        values = {'payload': json.dumps(payload)}

        # Apparently we've stored some bad data from before we used `URLField`.
        webhook = webhook.strip(' ')
        return http.safe_urlopen(webhook, method='POST', data=values)
Example #34
0
    def validate_data(self, project, data):
        # TODO(dcramer): move project out of the data packet
        data['project'] = project.id

        data['errors'] = []

        if data.get('culprit'):
            if not isinstance(data['culprit'], six.string_types):
                raise APIForbidden('Invalid value for culprit')

        if not data.get('event_id'):
            data['event_id'] = uuid.uuid4().hex
        elif not isinstance(data['event_id'], six.string_types):
            raise APIForbidden('Invalid value for event_id')

        if len(data['event_id']) > 32:
            self.log.debug(
                'Discarded value for event_id due to length (%d chars)',
                len(data['event_id']))
            data['errors'].append({
                'type': EventError.VALUE_TOO_LONG,
                'name': 'event_id',
                'value': data['event_id'],
            })
            data['event_id'] = uuid.uuid4().hex
        elif not is_event_id(data['event_id']):
            self.log.debug(
                'Discarded invalid value for event_id: %r',
                data['event_id'], exc_info=True)
            data['errors'].append({
                'type': EventError.INVALID_DATA,
                'name': 'event_id',
                'value': data['event_id'],
            })
            data['event_id'] = uuid.uuid4().hex

        if 'timestamp' in data:
            try:
                self._process_data_timestamp(data)
            except InvalidTimestamp as e:
                self.log.debug(
                    'Discarded invalid value for timestamp: %r',
                    data['timestamp'], exc_info=True)
                data['errors'].append({
                    'type': EventError.INVALID_DATA,
                    'name': 'timestamp',
                    'value': data['timestamp'],
                })
                del data['timestamp']

        if 'fingerprint' in data:
            try:
                self._process_fingerprint(data)
            except InvalidFingerprint as e:
                self.log.debug(
                    'Discarded invalid value for fingerprint: %r',
                    data['fingerprint'], exc_info=True)
                data['errors'].append({
                    'type': EventError.INVALID_DATA,
                    'name': 'fingerprint',
                    'value': data['fingerprint'],
                })
                del data['fingerprint']

        if 'platform' not in data or data['platform'] not in VALID_PLATFORMS:
            data['platform'] = 'other'

        if data.get('modules') and type(data['modules']) != dict:
            self.log.debug(
                'Discarded invalid type for modules: %s',
                type(data['modules']))
            data['errors'].append({
                'type': EventError.INVALID_DATA,
                'name': 'modules',
                'value': data['modules'],
            })
            del data['modules']

        if data.get('extra') is not None and type(data['extra']) != dict:
            self.log.debug(
                'Discarded invalid type for extra: %s',
                type(data['extra']))
            data['errors'].append({
                'type': EventError.INVALID_DATA,
                'name': 'extra',
                'value': data['extra'],
            })
            del data['extra']

        if data.get('tags') is not None:
            if type(data['tags']) == dict:
                data['tags'] = list(data['tags'].items())
            elif not isinstance(data['tags'], (list, tuple)):
                self.log.debug(
                    'Discarded invalid type for tags: %s', type(data['tags']))
                data['errors'].append({
                    'type': EventError.INVALID_DATA,
                    'name': 'tags',
                    'value': data['tags'],
                })
                del data['tags']

        if data.get('tags'):
            # remove any values which are over 32 characters
            tags = []
            for pair in data['tags']:
                try:
                    k, v = pair
                except ValueError:
                    self.log.debug('Discarded invalid tag value: %r', pair)
                    data['errors'].append({
                        'type': EventError.INVALID_DATA,
                        'name': 'tags',
                        'value': pair,
                    })
                    continue

                if not isinstance(k, six.string_types):
                    try:
                        k = six.text_type(k)
                    except Exception:
                        self.log.debug('Discarded invalid tag key: %r', type(k))
                        data['errors'].append({
                            'type': EventError.INVALID_DATA,
                            'name': 'tags',
                            'value': pair,
                        })
                        continue

                if not isinstance(v, six.string_types):
                    try:
                        v = six.text_type(v)
                    except Exception:
                        self.log.debug('Discarded invalid tag value: %s=%r',
                                      k, type(v))
                        data['errors'].append({
                            'type': EventError.INVALID_DATA,
                            'name': 'tags',
                            'value': pair,
                        })
                        continue

                if len(k) > MAX_TAG_KEY_LENGTH or len(v) > MAX_TAG_VALUE_LENGTH:
                    self.log.debug('Discarded invalid tag: %s=%s', k, v)
                    data['errors'].append({
                        'type': EventError.INVALID_DATA,
                        'name': 'tags',
                        'value': pair,
                    })
                    continue

                # support tags with spaces by converting them
                k = k.replace(' ', '-')

                if TagKey.is_reserved_key(k):
                    self.log.debug('Discarding reserved tag key: %s', k)
                    data['errors'].append({
                        'type': EventError.INVALID_DATA,
                        'name': 'tags',
                        'value': pair,
                    })
                    continue

                if not TagKey.is_valid_key(k):
                    self.log.debug('Discarded invalid tag key: %s', k)
                    data['errors'].append({
                        'type': EventError.INVALID_DATA,
                        'name': 'tags',
                        'value': pair,
                    })
                    continue

                if not TagValue.is_valid_value(v):
                    self.log.debug('Discard invalid tag value: %s', v)
                    data['errors'].append({
                        'type': EventError.INVALID_DATA,
                        'name': 'tags',
                        'value': pair,
                    })
                    continue

                tags.append((k, v))
            data['tags'] = tags

        for k in list(iter(data)):
            if k in CLIENT_RESERVED_ATTRS:
                continue

            value = data.pop(k)

            if not value:
                self.log.debug('Ignored empty interface value: %s', k)
                continue

            try:
                interface = get_interface(k)
            except ValueError:
                self.log.debug('Ignored unknown attribute: %s', k)
                data['errors'].append({
                    'type': EventError.INVALID_ATTRIBUTE,
                    'name': k,
                })
                continue

            if type(value) != dict:
                # HACK(dcramer): the exception/breadcrumbs interface supports a
                # list as the value. We should change this in a new protocol
                # version.
                if type(value) in (list, tuple):
                    value = {'values': value}
                else:
                    self.log.debug(
                        'Invalid parameter for value: %s (%r)', k, type(value))
                    data['errors'].append({
                        'type': EventError.INVALID_DATA,
                        'name': k,
                        'value': value,
                    })
                    continue

            try:
                inst = interface.to_python(value)
                data[inst.get_path()] = inst.to_json()
            except Exception as e:
                if isinstance(e, InterfaceValidationError):
                    log = self.log.debug
                else:
                    log = self.log.error
                log('Discarded invalid value for interface: %s (%r)', k, value,
                    exc_info=True)
                data['errors'].append({
                    'type': EventError.INVALID_DATA,
                    'name': k,
                    'value': value,
                })

        # TODO(dcramer): ideally this logic would happen in normalize, but today
        # we don't do "validation" there (create errors)

        # message is coerced to an interface, as its used for pure
        # index of searchable strings
        # See GH-3248
        message = data.pop('message', None)
        if message:
            if 'sentry.interfaces.Message' not in data:
                value = {
                    'message': message,
                }
            elif not data['sentry.interfaces.Message'].get('formatted'):
                value = data['sentry.interfaces.Message']
                value['formatted'] = message
            else:
                value = None

            if value is not None:
                k = 'sentry.interfaces.Message'
                interface = get_interface(k)
                try:
                    inst = interface.to_python(value)
                    data[inst.get_path()] = inst.to_json()
                except Exception as e:
                    if isinstance(e, InterfaceValidationError):
                        log = self.log.debug
                    else:
                        log = self.log.error
                    log('Discarded invalid value for interface: %s (%r)', k, value,
                        exc_info=True)
                    data['errors'].append({
                        'type': EventError.INVALID_DATA,
                        'name': k,
                        'value': value,
                    })

        level = data.get('level') or DEFAULT_LOG_LEVEL
        if isinstance(level, six.string_types) and not level.isdigit():
            # assume it's something like 'warning'
            try:
                data['level'] = LOG_LEVELS_MAP[level]
            except KeyError as e:
                self.log.debug(
                    'Discarded invalid logger value: %s', level)
                data['errors'].append({
                    'type': EventError.INVALID_DATA,
                    'name': 'level',
                    'value': level,
                })
                data['level'] = LOG_LEVELS_MAP.get(
                    DEFAULT_LOG_LEVEL, DEFAULT_LOG_LEVEL)

        if data.get('release'):
            data['release'] = six.text_type(data['release'])
            if len(data['release']) > 64:
                data['errors'].append({
                    'type': EventError.VALUE_TOO_LONG,
                    'name': 'release',
                    'value': data['release'],
                })
                del data['release']

        if data.get('dist'):
            data['dist'] = six.text_type(data['dist']).strip()
            if not data.get('release'):
                data['dist'] = None
            elif len(data['dist']) > 64:
                data['errors'].append({
                    'type': EventError.VALUE_TOO_LONG,
                    'name': 'dist',
                    'value': data['dist'],
                })
                del data['dist']
            elif _dist_re.match(data['dist']) is None:
                data['errors'].append({
                    'type': EventError.INVALID_DATA,
                    'name': 'dist',
                    'value': data['dist'],
                })
                del data['dist']

        if data.get('environment'):
            data['environment'] = six.text_type(data['environment'])
            if len(data['environment']) > 64:
                data['errors'].append({
                    'type': EventError.VALUE_TOO_LONG,
                    'name': 'environment',
                    'value': data['environment'],
                })
                del data['environment']

        if data.get('time_spent'):
            try:
                data['time_spent'] = int(data['time_spent'])
            except (ValueError, TypeError):
                data['errors'].append({
                    'type': EventError.INVALID_DATA,
                    'name': 'time_spent',
                    'value': data['time_spent'],
                })
                del data['time_spent']
            else:
                if data['time_spent'] > BoundedIntegerField.MAX_VALUE:
                    data['errors'].append({
                        'type': EventError.VALUE_TOO_LONG,
                        'name': 'time_spent',
                        'value': data['time_spent'],
                    })
                    del data['time_spent']

        return data
Example #35
0
    def validate_data(self, project, data):
        # TODO(dcramer): move project out of the data packet
        data["project"] = project.id

        data["errors"] = []

        if not data.get("message"):
            data["message"] = "<no message value>"
        elif not isinstance(data["message"], six.string_types):
            raise APIForbidden("Invalid value for message")

        if data.get("culprit"):
            if not isinstance(data["culprit"], six.string_types):
                raise APIForbidden("Invalid value for culprit")

        if not data.get("event_id"):
            data["event_id"] = uuid.uuid4().hex
        elif not isinstance(data["event_id"], six.string_types):
            raise APIForbidden("Invalid value for event_id")

        if len(data["event_id"]) > 32:
            self.log.info("Discarded value for event_id due to length (%d chars)", len(data["event_id"]))
            data["errors"].append({"type": EventError.VALUE_TOO_LONG, "name": "event_id", "value": data["event_id"]})
            data["event_id"] = uuid.uuid4().hex

        if "timestamp" in data:
            try:
                self._process_data_timestamp(data)
            except InvalidTimestamp as e:
                self.log.info("Discarded invalid value for timestamp: %r", data["timestamp"], exc_info=True)
                data["errors"].append(
                    {"type": EventError.INVALID_DATA, "name": "timestamp", "value": data["timestamp"]}
                )
                del data["timestamp"]

        if "fingerprint" in data:
            try:
                self._process_fingerprint(data)
            except InvalidFingerprint as e:
                self.log.info("Discarded invalid value for fingerprint: %r", data["fingerprint"], exc_info=True)
                data["errors"].append(
                    {"type": EventError.INVALID_DATA, "name": "fingerprint", "value": data["fingerprint"]}
                )

        if "platform" not in data or data["platform"] not in VALID_PLATFORMS:
            data["platform"] = "other"

        if data.get("modules") and type(data["modules"]) != dict:
            self.log.info("Discarded invalid type for modules: %s", type(data["modules"]))
            data["errors"].append({"type": EventError.INVALID_DATA, "name": "modules", "value": data["modules"]})
            del data["modules"]

        if data.get("extra") is not None and type(data["extra"]) != dict:
            self.log.info("Discarded invalid type for extra: %s", type(data["extra"]))
            data["errors"].append({"type": EventError.INVALID_DATA, "name": "extra", "value": data["extra"]})
            del data["extra"]

        if data.get("tags") is not None:
            if type(data["tags"]) == dict:
                data["tags"] = data["tags"].items()
            elif not isinstance(data["tags"], (list, tuple)):
                self.log.info("Discarded invalid type for tags: %s", type(data["tags"]))
                data["errors"].append({"type": EventError.INVALID_DATA, "name": "tags", "value": data["tags"]})
                del data["tags"]

        if data.get("tags"):
            # remove any values which are over 32 characters
            tags = []
            for pair in data["tags"]:
                try:
                    k, v = pair
                except ValueError:
                    self.log.info("Discarded invalid tag value: %r", pair)
                    data["errors"].append({"type": EventError.INVALID_DATA, "name": "tags", "value": pair})
                    continue

                if not isinstance(k, six.string_types):
                    try:
                        k = six.text_type(k)
                    except Exception:
                        self.log.info("Discarded invalid tag key: %r", type(k))
                        data["errors"].append({"type": EventError.INVALID_DATA, "name": "tags", "value": pair})
                        continue

                if not isinstance(v, six.string_types):
                    try:
                        v = six.text_type(v)
                    except Exception:
                        self.log.info("Discarded invalid tag value: %s=%r", k, type(v))
                        data["errors"].append({"type": EventError.INVALID_DATA, "name": "tags", "value": pair})
                        continue

                if len(k) > MAX_TAG_KEY_LENGTH or len(v) > MAX_TAG_VALUE_LENGTH:
                    self.log.info("Discarded invalid tag: %s=%s", k, v)
                    data["errors"].append({"type": EventError.INVALID_DATA, "name": "tags", "value": pair})
                    continue

                # support tags with spaces by converting them
                k = k.replace(" ", "-")

                if not TagKey.is_valid_key(k):
                    self.log.info("Discarded invalid tag key: %s", k)
                    data["errors"].append({"type": EventError.INVALID_DATA, "name": "tags", "value": pair})
                    continue

                tags.append((k, v))
            data["tags"] = tags

        for k in data.keys():
            if k in CLIENT_RESERVED_ATTRS:
                continue

            value = data.pop(k)

            if not value:
                self.log.info("Ignored empty interface value: %s", k)
                continue

            try:
                interface = get_interface(k)
            except ValueError:
                self.log.info("Ignored unknown attribute: %s", k)
                data["errors"].append({"type": EventError.INVALID_ATTRIBUTE, "name": k})
                continue

            if type(value) != dict:
                # HACK(dcramer): the exception interface supports a list as the
                # value. We should change this in a new protocol version.
                if type(value) in (list, tuple):
                    value = {"values": value}
                else:
                    self.log.info("Invalid parameter for value: %s (%r)", k, type(value))
                    data["errors"].append({"type": EventError.INVALID_DATA, "name": k, "value": value})
                    continue

            try:
                inst = interface.to_python(value)
                data[inst.get_path()] = inst.to_json()
            except Exception as e:
                if isinstance(e, InterfaceValidationError):
                    log = self.log.info
                else:
                    log = self.log.error
                log("Discarded invalid value for interface: %s (%r)", k, value, exc_info=True)
                data["errors"].append({"type": EventError.INVALID_DATA, "name": k, "value": value})

        level = data.get("level") or DEFAULT_LOG_LEVEL
        if isinstance(level, six.string_types) and not level.isdigit():
            # assume it's something like 'warning'
            try:
                data["level"] = LOG_LEVEL_REVERSE_MAP[level]
            except KeyError as e:
                self.log.info("Discarded invalid logger value: %s", level)
                data["errors"].append({"type": EventError.INVALID_DATA, "name": "level", "value": level})
                data["level"] = LOG_LEVEL_REVERSE_MAP.get(DEFAULT_LOG_LEVEL, DEFAULT_LOG_LEVEL)

        if data.get("release"):
            data["release"] = unicode(data["release"])
            if len(data["release"]) > 64:
                data["errors"].append({"type": EventError.VALUE_TOO_LONG, "name": "release", "value": data["release"]})
                del data["release"]

        return data
Example #36
0
    def validate_data(self, project, data):
        # TODO(dcramer): move project out of the data packet
        data['project'] = project.id

        data['errors'] = []

        if not data.get('message'):
            data['message'] = '<no message value>'
        elif not isinstance(data['message'], six.string_types):
            raise APIForbidden('Invalid value for message')

        if data.get('culprit'):
            if not isinstance(data['culprit'], six.string_types):
                raise APIForbidden('Invalid value for culprit')

        if not data.get('event_id'):
            data['event_id'] = uuid.uuid4().hex
        elif not isinstance(data['event_id'], six.string_types):
            raise APIForbidden('Invalid value for event_id')

        if len(data['event_id']) > 32:
            self.log.info(
                'Discarded value for event_id due to length (%d chars)',
                len(data['event_id']))
            data['errors'].append({
                'type': EventError.VALUE_TOO_LONG,
                'name': 'event_id',
                'value': data['event_id'],
            })
            data['event_id'] = uuid.uuid4().hex

        if 'timestamp' in data:
            try:
                self._process_data_timestamp(data)
            except InvalidTimestamp as e:
                self.log.info(
                    'Discarded invalid value for timestamp: %r',
                    data['timestamp'], exc_info=True)
                data['errors'].append({
                    'type': EventError.INVALID_DATA,
                    'name': 'timestamp',
                    'value': data['timestamp'],
                })
                del data['timestamp']

        if 'fingerprint' in data:
            try:
                self._process_fingerprint(data)
            except InvalidFingerprint as e:
                self.log.info(
                    'Discarded invalid value for fingerprint: %r',
                    data['fingerprint'], exc_info=True)
                data['errors'].append({
                    'type': EventError.INVALID_DATA,
                    'name': 'fingerprint',
                    'value': data['fingerprint'],
                })
                del data['fingerprint']

        if 'platform' not in data or data['platform'] not in VALID_PLATFORMS:
            data['platform'] = 'other'

        if data.get('modules') and type(data['modules']) != dict:
            self.log.info(
                'Discarded invalid type for modules: %s',
                type(data['modules']))
            data['errors'].append({
                'type': EventError.INVALID_DATA,
                'name': 'modules',
                'value': data['modules'],
            })
            del data['modules']

        if data.get('extra') is not None and type(data['extra']) != dict:
            self.log.info(
                'Discarded invalid type for extra: %s',
                type(data['extra']))
            data['errors'].append({
                'type': EventError.INVALID_DATA,
                'name': 'extra',
                'value': data['extra'],
            })
            del data['extra']

        if data.get('tags') is not None:
            if type(data['tags']) == dict:
                data['tags'] = data['tags'].items()
            elif not isinstance(data['tags'], (list, tuple)):
                self.log.info(
                    'Discarded invalid type for tags: %s', type(data['tags']))
                data['errors'].append({
                    'type': EventError.INVALID_DATA,
                    'name': 'tags',
                    'value': data['tags'],
                })
                del data['tags']

        if data.get('tags'):
            # remove any values which are over 32 characters
            tags = []
            for pair in data['tags']:
                try:
                    k, v = pair
                except ValueError:
                    self.log.info('Discarded invalid tag value: %r', pair)
                    data['errors'].append({
                        'type': EventError.INVALID_DATA,
                        'name': 'tags',
                        'value': pair,
                    })
                    continue

                if not isinstance(k, six.string_types):
                    try:
                        k = six.text_type(k)
                    except Exception:
                        self.log.info('Discarded invalid tag key: %r', type(k))
                        data['errors'].append({
                            'type': EventError.INVALID_DATA,
                            'name': 'tags',
                            'value': pair,
                        })
                        continue

                if not isinstance(v, six.string_types):
                    try:
                        v = six.text_type(v)
                    except Exception:
                        self.log.info('Discarded invalid tag value: %s=%r',
                                      k, type(v))
                        data['errors'].append({
                            'type': EventError.INVALID_DATA,
                            'name': 'tags',
                            'value': pair,
                        })
                        continue

                if len(k) > MAX_TAG_KEY_LENGTH or len(v) > MAX_TAG_VALUE_LENGTH:
                    self.log.info('Discarded invalid tag: %s=%s', k, v)
                    data['errors'].append({
                        'type': EventError.INVALID_DATA,
                        'name': 'tags',
                        'value': pair,
                    })
                    continue

                # support tags with spaces by converting them
                k = k.replace(' ', '-')

                if TagKey.is_reserved_key(k):
                    self.log.info('Discarding reserved tag key: %s', k)
                    data['errors'].append({
                        'type': EventError.INVALID_DATA,
                        'name': 'tags',
                        'value': pair,
                    })
                    continue

                if not TagKey.is_valid_key(k):
                    self.log.info('Discarded invalid tag key: %s', k)
                    data['errors'].append({
                        'type': EventError.INVALID_DATA,
                        'name': 'tags',
                        'value': pair,
                    })
                    continue

                tags.append((k, v))
            data['tags'] = tags

        for k in data.keys():
            if k in CLIENT_RESERVED_ATTRS:
                continue

            value = data.pop(k)

            if not value:
                self.log.info('Ignored empty interface value: %s', k)
                continue

            try:
                interface = get_interface(k)
            except ValueError:
                self.log.info('Ignored unknown attribute: %s', k)
                data['errors'].append({
                    'type': EventError.INVALID_ATTRIBUTE,
                    'name': k,
                })
                continue

            if type(value) != dict:
                # HACK(dcramer): the exception/breadcrumbs interface supports a
                # list as the value. We should change this in a new protocol
                # version.
                if type(value) in (list, tuple):
                    value = {'values': value}
                else:
                    self.log.info(
                        'Invalid parameter for value: %s (%r)', k, type(value))
                    data['errors'].append({
                        'type': EventError.INVALID_DATA,
                        'name': k,
                        'value': value,
                    })
                    continue

            try:
                inst = interface.to_python(value)
                data[inst.get_path()] = inst.to_json()
            except Exception as e:
                if isinstance(e, InterfaceValidationError):
                    log = self.log.info
                else:
                    log = self.log.error
                log('Discarded invalid value for interface: %s (%r)', k, value,
                    exc_info=True)
                data['errors'].append({
                    'type': EventError.INVALID_DATA,
                    'name': k,
                    'value': value,
                })

        level = data.get('level') or DEFAULT_LOG_LEVEL
        if isinstance(level, six.string_types) and not level.isdigit():
            # assume it's something like 'warning'
            try:
                data['level'] = LOG_LEVEL_REVERSE_MAP[level]
            except KeyError as e:
                self.log.info(
                    'Discarded invalid logger value: %s', level)
                data['errors'].append({
                    'type': EventError.INVALID_DATA,
                    'name': 'level',
                    'value': level,
                })
                data['level'] = LOG_LEVEL_REVERSE_MAP.get(
                    DEFAULT_LOG_LEVEL, DEFAULT_LOG_LEVEL)

        if data.get('release'):
            data['release'] = unicode(data['release'])
            if len(data['release']) > 64:
                data['errors'].append({
                    'type': EventError.VALUE_TOO_LONG,
                    'name': 'release',
                    'value': data['release'],
                })
                del data['release']

        return data
Example #37
0
    def normalize(self):
        # TODO(dcramer): store http.env.REMOTE_ADDR as user.ip
        # First we pull out our top-level (non-data attr) kwargs
        data = self.data

        if not isinstance(data.get("level"), (six.string_types, int)):
            data["level"] = logging.ERROR
        elif data["level"] not in LOG_LEVELS:
            data["level"] = logging.ERROR

        if not data.get("logger"):
            data["logger"] = DEFAULT_LOGGER_NAME
        else:
            logger = trim(data["logger"].strip(), 64)
            if TagKey.is_valid_key(logger):
                data["logger"] = logger
            else:
                data["logger"] = DEFAULT_LOGGER_NAME

        if data.get("platform"):
            data["platform"] = trim(data["platform"], 64)

        current_timestamp = timezone.now()
        timestamp = data.get("timestamp")
        if not timestamp:
            timestamp = current_timestamp

        if isinstance(timestamp, datetime):
            # We must convert date to local time so Django doesn't mess it up
            # based on TIME_ZONE
            if settings.TIME_ZONE:
                if not timezone.is_aware(timestamp):
                    timestamp = timestamp.replace(tzinfo=timezone.utc)
            elif timezone.is_aware(timestamp):
                timestamp = timestamp.replace(tzinfo=None)
            timestamp = float(timestamp.strftime("%s"))

        data["timestamp"] = timestamp
        data["received"] = float(timezone.now().strftime("%s"))

        if not data.get("event_id"):
            data["event_id"] = uuid4().hex

        data.setdefault("message", "")
        data.setdefault("culprit", None)
        data.setdefault("server_name", None)
        data.setdefault("site", None)
        data.setdefault("checksum", None)
        data.setdefault("fingerprint", None)
        data.setdefault("platform", None)
        data.setdefault("environment", None)
        data.setdefault("extra", {})
        data.setdefault("errors", [])

        tags = data.get("tags")
        if not tags:
            tags = []
        # full support for dict syntax
        elif isinstance(tags, dict):
            tags = tags.items()
        # prevent [tag, tag, tag] (invalid) syntax
        elif not all(len(t) == 2 for t in tags):
            tags = []
        else:
            tags = list(tags)

        data["tags"] = []
        for key, value in tags:
            key = six.text_type(key).strip()
            value = six.text_type(value).strip()
            if not (key and value):
                continue

            data["tags"].append((key, value))

        if not isinstance(data["extra"], dict):
            # throw it away
            data["extra"] = {}

        trim_dict(data["extra"], max_size=settings.SENTRY_MAX_EXTRA_VARIABLE_SIZE)

        # TODO(dcramer): more of validate data needs stuffed into the manager
        for key in data.keys():
            if key in CLIENT_RESERVED_ATTRS:
                continue

            value = data.pop(key)

            try:
                interface = get_interface(key)()
            except ValueError:
                continue

            try:
                inst = interface.to_python(value)
                data[inst.get_path()] = inst.to_json()
            except Exception:
                pass

        # the SDKs currently do not describe event types, and we must infer
        # them from available attributes
        data["type"] = eventtypes.infer(data).key

        data["version"] = self.version

        # TODO(dcramer): find a better place for this logic
        exception = data.get("sentry.interfaces.Exception")
        stacktrace = data.get("sentry.interfaces.Stacktrace")
        if exception and len(exception["values"]) == 1 and stacktrace:
            exception["values"][0]["stacktrace"] = stacktrace
            del data["sentry.interfaces.Stacktrace"]

        if "sentry.interfaces.Http" in data:
            try:
                ip_address = validate_ip(
                    data["sentry.interfaces.Http"].get("env", {}).get("REMOTE_ADDR"), required=False
                )
            except ValueError:
                ip_address = None
            if ip_address:
                data.setdefault("sentry.interfaces.User", {})
                data["sentry.interfaces.User"].setdefault("ip_address", ip_address)

        if data["culprit"]:
            data["culprit"] = trim(data["culprit"], MAX_CULPRIT_LENGTH)

        if data["message"]:
            data["message"] = trim(data["message"], settings.SENTRY_MAX_MESSAGE_LENGTH)

        return data