class ProjectEventsEndpoint(ProjectEndpoint):

    rate_limits = {
        "GET": {
            RateLimitCategory.IP: RateLimit(5, 1),
            RateLimitCategory.USER: RateLimit(5, 1),
            RateLimitCategory.ORGANIZATION: RateLimit(5, 1),
        }
    }

    @rate_limit_endpoint(limit=5, window=1)
    def get(self, request: Request, project) -> Response:
        """
        List a Project's Events
        ```````````````````````

        Return a list of events bound to a project.

        Note: This endpoint is experimental and may be removed without notice.

        :qparam bool full: if this is set to true then the event payload will
                           include the full event body, including the stacktrace.
                           Set to 1 to enable.

        :pparam string organization_slug: the slug of the organization the
                                          groups belong to.
        :pparam string project_slug: the slug of the project the groups
                                     belong to.
        """
        from sentry.api.paginator import GenericOffsetPaginator

        query = request.GET.get("query")
        conditions = []
        if query:
            conditions.append(
                [["positionCaseInsensitive", ["message", f"'{query}'"]], "!=",
                 0])

        event_filter = eventstore.Filter(conditions=conditions,
                                         project_ids=[project.id])
        if features.has("organizations:project-event-date-limit",
                        project.organization,
                        actor=request.user):
            event_filter.start = timezone.now() - timedelta(days=7)

        full = request.GET.get("full", False)

        data_fn = partial(
            eventstore.get_events,
            filter=event_filter,
            referrer="api.project-events",
        )

        serializer = EventSerializer() if full else SimpleEventSerializer()
        return self.paginate(
            request=request,
            on_results=lambda results: serialize(results, request.user,
                                                 serializer),
            paginator=GenericOffsetPaginator(data_fn=data_fn),
        )
class ProjectGroupStatsEndpoint(ProjectEndpoint, EnvironmentMixin, StatsMixin):

    rate_limits = {
        "GET": {
            RateLimitCategory.IP: RateLimit(20, 1),
            RateLimitCategory.USER: RateLimit(20, 1),
            RateLimitCategory.ORGANIZATION: RateLimit(20, 1),
        }
    }

    @rate_limit_endpoint(limit=20, window=1)
    def get(self, request: Request, project) -> Response:
        try:
            environment_id = self._get_environment_id_from_request(
                request, project.organization_id)
        except Environment.DoesNotExist:
            raise ResourceDoesNotExist

        group_ids = request.GET.getlist("id")
        if not group_ids:
            return Response(status=204)

        group_list = Group.objects.filter(project=project, id__in=group_ids)
        group_ids = [g.id for g in group_list]

        if not group_ids:
            return Response(status=204)

        data = tsdb.get_range(model=tsdb.models.group,
                              keys=group_ids,
                              **self._parse_args(request, environment_id))

        return Response({str(k): v for k, v in data.items()})
Beispiel #3
0
    def test_override_rate_limit(self):
        """Override one or more of the default rate limits"""
        class TestEndpoint(Endpoint):
            rate_limits = {
                "GET": {
                    RateLimitCategory.IP: RateLimit(100, 5)
                },
                "POST": {
                    RateLimitCategory.USER: RateLimit(20, 4)
                },
            }

        assert get_rate_limit_value("GET", TestEndpoint,
                                    "ip") == RateLimit(100, 5)
        # get is not overriddent for user, hence we use the default
        assert get_rate_limit_value(
            "GET", TestEndpoint, "user") == get_default_rate_limits_for_group(
                "default", category=RateLimitCategory.USER)
        # get is not overriddent for IP, hence we use the default
        assert get_rate_limit_value("POST", TestEndpoint,
                                    "ip") == get_default_rate_limits_for_group(
                                        "default",
                                        category=RateLimitCategory.IP)
        assert get_rate_limit_value("POST", TestEndpoint,
                                    "user") == RateLimit(20, 4)
class UserEmailsConfirmEndpoint(UserEndpoint):
    rate_limits = {
        "POST": {
            RateLimitCategory.USER: RateLimit(10, 60),
            RateLimitCategory.ORGANIZATION: RateLimit(10, 60),
        }
    }

    def post(self, request: Request, user) -> Response:
        """
        Sends a confirmation email to user
        ``````````````````````````````````

        :auth required:
        """

        from sentry.app import ratelimiter

        if ratelimiter.is_limited(
                f"auth:confirm-email:{user.id}",
                limit=10,
                window=60,  # 10 per minute should be enough for anyone
        ):
            return self.respond(
                {
                    "detail":
                    "You have made too many email confirmation requests. Please try again later."
                },
                status=status.HTTP_429_TOO_MANY_REQUESTS,
            )

        serializer = EmailSerializer(data=request.data)

        if not serializer.is_valid():
            return InvalidEmailResponse()

        # If email is specified then try to only send one confirmation email
        try:
            email_to_send = UserEmail.objects.get(
                user=user,
                email__iexact=serializer.validated_data["email"].strip())
        except UserEmail.DoesNotExist:
            return InvalidEmailResponse()
        else:
            if email_to_send.is_verified:
                return self.respond({"detail": "Email is already verified"},
                                    status=status.HTTP_400_BAD_REQUEST)

            user.send_confirm_email_singular(email_to_send)

            logger.info(
                "user.email.start_confirm",
                extra={
                    "user_id": user.id,
                    "ip_address": request.META["REMOTE_ADDR"],
                    "email": email_to_send,
                },
            )
            return self.respond(status=status.HTTP_204_NO_CONTENT)
Beispiel #5
0
 class TestEndpoint(Endpoint):
     rate_limits = {
         "GET": {
             RateLimitCategory.IP: RateLimit(100, 5)
         },
         "POST": {
             RateLimitCategory.USER: RateLimit(20, 4)
         },
     }
class ProjectKeyStatsEndpoint(ProjectEndpoint, StatsMixin):

    rate_limits = {
        "GET": {
            RateLimitCategory.IP: RateLimit(20, 1),
            RateLimitCategory.USER: RateLimit(20, 1),
            RateLimitCategory.ORGANIZATION: RateLimit(20, 1),
        }
    }

    @rate_limit_endpoint(limit=20, window=1)
    def get(self, request: Request, project, key_id) -> Response:
        try:
            key = ProjectKey.objects.get(project=project,
                                         public_key=key_id,
                                         roles=F("roles").bitor(
                                             ProjectKey.roles.store))
        except ProjectKey.DoesNotExist:
            raise ResourceDoesNotExist

        try:
            stat_args = self._parse_args(request)
        except ValueError:
            return Response({"detail": "Invalid request data"}, status=400)

        stats = OrderedDict()
        for model, name in (
            (tsdb.models.key_total_received, "total"),
            (tsdb.models.key_total_blacklisted, "filtered"),
            (tsdb.models.key_total_rejected, "dropped"),
        ):
            # XXX (alex, 08/05/19) key stats were being stored under either key_id or str(key_id)
            # so merge both of those back into one stats result.
            result = tsdb.get_range(model=model,
                                    keys=[key.id, str(key.id)],
                                    **stat_args)
            for key_id, points in result.items():
                for ts, count in points:
                    bucket = stats.setdefault(int(ts), {})
                    bucket.setdefault(name, 0)
                    bucket[name] += count

        return Response([{
            "ts":
            ts,
            "total":
            data["total"],
            "dropped":
            data["dropped"],
            "filtered":
            data["filtered"],
            "accepted":
            data["total"] - data["dropped"] - data["filtered"],
        } for ts, data in stats.items()])
class EventIdLookupEndpoint(OrganizationEndpoint):

    rate_limits = {
        "GET": {
            RateLimitCategory.IP: RateLimit(1, 1),
            RateLimitCategory.USER: RateLimit(1, 1),
            RateLimitCategory.ORGANIZATION: RateLimit(1, 1),
        }
    }

    @rate_limit_endpoint(limit=1, window=1)
    def get(self, request: Request, organization, event_id) -> Response:
        """
        Resolve an Event ID
        ``````````````````

        This resolves an event ID to the project slug and internal issue ID and internal event ID.

        :pparam string organization_slug: the slug of the organization the
                                          event ID should be looked up in.
        :param string event_id: the event ID to look up. validated by a
                                regex in the URL.
        :auth: required
        """
        if event_id and not is_event_id(event_id):
            return Response({"detail": INVALID_ID_DETAILS.format("Event ID")},
                            status=400)

        project_slugs_by_id = dict(
            Project.objects.filter(organization=organization).values_list(
                "id", "slug"))

        try:
            snuba_filter = eventstore.Filter(
                conditions=[["event.type", "!=", "transaction"]],
                project_ids=list(project_slugs_by_id.keys()),
                event_ids=[event_id],
            )
            event = eventstore.get_events(filter=snuba_filter, limit=1)[0]
        except IndexError:
            raise ResourceDoesNotExist()
        else:
            return Response({
                "organizationSlug":
                organization.slug,
                "projectSlug":
                project_slugs_by_id[event.project_id],
                "groupId":
                str(event.group_id),
                "eventId":
                str(event.event_id),
                "event":
                serialize(event, request.user),
            })
Beispiel #8
0
class OrganizationJoinRequestEndpoint(OrganizationEndpoint):
    # Disable authentication and permission requirements.
    permission_classes = []

    rate_limits = {
        "POST": {
            RateLimitCategory.IP: RateLimit(5, 86400),
            RateLimitCategory.USER: RateLimit(5, 86400),
            RateLimitCategory.ORGANIZATION: RateLimit(5, 86400),
        }
    }

    def post(self, request: Request, organization) -> Response:
        if organization.get_option("sentry:join_requests") is False:
            return Response(
                {"detail": "Your organization does not allow join requests."},
                status=403)

        # users can already join organizations with SSO enabled without an invite
        # so they should join that way and not through a request to the admins
        if AuthProvider.objects.filter(organization=organization).exists():
            return Response(status=403)

        ip_address = request.META["REMOTE_ADDR"]

        if ratelimiter.is_limited(
                f"org-join-request:ip:{ip_address}",
                limit=5,
                window=86400,  # 5 per day, 60 x 60 x 24
        ):
            return Response({"detail": "Rate limit exceeded."}, status=429)

        serializer = JoinRequestSerializer(data=request.data)

        if not serializer.is_valid():
            return Response(serializer.errors, status=400)

        result = serializer.validated_data
        email = result["email"]

        member = create_organization_join_request(organization, email,
                                                  ip_address)

        if member:
            async_send_notification(JoinRequestNotification, member,
                                    request.user)
            # legacy analytics
            join_request_created.send_robust(sender=self, member=member)

        return Response(status=204)
Beispiel #9
0
    def test_negative_rate_limit_check(self, default_rate_limit_mock):
        request = self.factory.get("/")
        default_rate_limit_mock.return_value = RateLimit(10, 100)
        self.middleware.process_view(request, self._test_endpoint, [], {})
        assert not request.will_be_rate_limited

        # Requests outside the current window should not be rate limited
        default_rate_limit_mock.return_value = RateLimit(1, 1)
        with freeze_time("2000-01-01") as frozen_time:
            self.middleware.process_view(request, self._test_endpoint, [], {})
            assert not request.will_be_rate_limited
            frozen_time.tick(1)
            self.middleware.process_view(request, self._test_endpoint, [], {})
            assert not request.will_be_rate_limited
Beispiel #10
0
 def test_override(self):
     config = RateLimitConfig(
         group="default",
         limit_overrides={"GET": {
             RateLimitCategory.IP: RateLimit(1, 1)
         }})
     assert config.get_rate_limit("GET",
                                  RateLimitCategory.IP) == RateLimit(1, 1)
     assert config.get_rate_limit(
         "POST", RateLimitCategory.IP) == get_default_rate_limits_for_group(
             "default", RateLimitCategory.IP)
     assert config.get_rate_limit("GET", RateLimitCategory.ORGANIZATION
                                  ) == get_default_rate_limits_for_group(
                                      "default",
                                      RateLimitCategory.ORGANIZATION)
Beispiel #11
0
    def test_above_rate_limit_check(self):

        return_val = above_rate_limit_check("foo", RateLimit(10, 100))
        assert return_val == dict(is_limited=False,
                                  current=1,
                                  limit=10,
                                  window=100)
class RateLimitTestEndpoint(Endpoint):
    permission_classes = (AllowAny,)

    rate_limits = {"GET": {RateLimitCategory.IP: RateLimit(1, 100)}}

    def get(self, request):
        return Response({"ok": True})
Beispiel #13
0
    def test_positive_rate_limit_check(self, default_rate_limit_mock):
        request = self.factory.get("/")
        with freeze_time("2000-01-01"):
            default_rate_limit_mock.return_value = RateLimit(0, 100)
            self.middleware.process_view(request, self._test_endpoint, [], {})
            assert request.will_be_rate_limited

        with freeze_time("2000-01-02"):
            # 10th request in a 10 request window should get rate limited
            default_rate_limit_mock.return_value = RateLimit(10, 100)
            for _ in range(10):
                self.middleware.process_view(request, self._test_endpoint, [], {})
                assert not request.will_be_rate_limited

            self.middleware.process_view(request, self._test_endpoint, [], {})
            assert request.will_be_rate_limited
class RateLimitedEndpoint(Endpoint):
    permission_classes = (AllowAny,)
    enforce_rate_limit = True
    rate_limits = RateLimitConfig(
        group="foo",
        limit_overrides={
            "GET": {
                RateLimitCategory.IP: RateLimit(0, 1),
                RateLimitCategory.USER: RateLimit(0, 1),
                RateLimitCategory.ORGANIZATION: RateLimit(0, 1),
            },
        },
    )

    def get(self, request):
        return Response({"ok": True})
Beispiel #15
0
 def test_fails_open(self, default_rate_limit_mock):
     """Test that if something goes wrong in the rate limit middleware,
     the request still goes through"""
     request = self.factory.get("/")
     with freeze_time("2000-01-01"):
         default_rate_limit_mock.return_value = RateLimit(0, 100)
         self.middleware.process_view(request, self._test_endpoint, [], {})
Beispiel #16
0
 def test_window_and_concurrent_limit(self):
     """Test that if there is a window limit and a concurrent limit, the
     FIXED_WINDOW limit takes precedence"""
     return_val = above_rate_limit_check("xar", RateLimit(0, 100, 0),
                                         "request_uid")
     assert return_val.rate_limit_type == RateLimitType.FIXED_WINDOW
     assert return_val.concurrent_remaining is None
Beispiel #17
0
class RaceConditionEndpoint(Endpoint):
    permission_classes = (AllowAny,)

    enforce_rate_limit = False
    rate_limits = {"GET": {RateLimitCategory.IP: RateLimit(40, 100)}}

    def get(self, request):
        return Response({"ok": True})
Beispiel #18
0
class GroupEventsLatestEndpoint(GroupEndpoint):
    enforce_rate_limit = True
    rate_limits = {
        "GET": {
            RateLimitCategory.IP: RateLimit(15, 1),
            RateLimitCategory.USER: RateLimit(15, 1),
            RateLimitCategory.ORGANIZATION: RateLimit(15, 1),
        }
    }

    def get(self, request: Request, group) -> Response:
        """
        Retrieve the Latest Event for an Issue
        ``````````````````````````````````````

        Retrieves the details of the latest event for an issue.

        :pparam string group_id: the ID of the issue
        """
        environments = [
            e.name
            for e in get_environments(request, group.project.organization)
        ]

        event = group.get_latest_event_for_environments(environments)

        if not event:
            return Response({"detail": "No events found for group"},
                            status=404)

        collapse = request.GET.getlist("collapse", [])
        if "stacktraceOnly" in collapse:
            return Response(serialize(event, request.user, EventSerializer()))

        try:
            return client.get(
                f"/projects/{event.organization.slug}/{event.project.slug}/events/{event.event_id}/",
                request=request,
                data={
                    "environment": environments,
                    "group_id": event.group_id
                },
            )
        except client.ApiError as e:
            return Response(e.body, status=e.status_code)
Beispiel #19
0
    def test_above_rate_limit_check(self):
        with freeze_time("2000-01-01"):
            expected_reset_time = int(time() + 100)
            return_val = above_rate_limit_check("foo", RateLimit(10, 100),
                                                "request_uid")
            assert return_val == RateLimitMeta(
                rate_limit_type=RateLimitType.NOT_LIMITED,
                current=1,
                limit=10,
                window=100,
                reset_time=expected_reset_time,
                remaining=9,
                concurrent_limit=None,
                concurrent_requests=None,
            )
            for i in range(10):
                return_val = above_rate_limit_check("foo", RateLimit(10, 100),
                                                    f"request_uid{i}")
            assert return_val == RateLimitMeta(
                rate_limit_type=RateLimitType.FIXED_WINDOW,
                current=11,
                limit=10,
                window=100,
                reset_time=expected_reset_time,
                remaining=0,
                concurrent_limit=None,
                concurrent_requests=None,
            )

            for i in range(10):
                return_val = above_rate_limit_check("bar",
                                                    RateLimit(120, 100, 9),
                                                    f"request_uid{i}")
            assert return_val == RateLimitMeta(
                rate_limit_type=RateLimitType.CONCURRENT,
                current=10,
                limit=120,
                window=100,
                reset_time=expected_reset_time,
                remaining=110,
                concurrent_limit=9,
                concurrent_requests=9,
            )
Beispiel #20
0
    def test_override_rate_limit(self):
        """Override one or more of the default rate limits."""

        class TestEndpoint(Endpoint):
            rate_limits = {
                "GET": {RateLimitCategory.IP: RateLimit(100, 5)},
                "POST": {RateLimitCategory.USER: RateLimit(20, 4)},
            }

        assert get_rate_limit_value("GET", TestEndpoint, RateLimitCategory.IP) == RateLimit(100, 5)
        assert get_rate_limit_value(
            "GET", TestEndpoint, RateLimitCategory.USER
        ) == get_default_rate_limits_for_group("default", RateLimitCategory.USER)
        assert get_rate_limit_value(
            "POST", TestEndpoint, RateLimitCategory.IP
        ) == get_default_rate_limits_for_group("default", RateLimitCategory.IP)
        assert get_rate_limit_value("POST", TestEndpoint, RateLimitCategory.USER) == RateLimit(
            20, 4
        )
Beispiel #21
0
class ConcurrentRateLimitedEndpoint(Endpoint):
    permission_classes = (AllowAny, )
    enforce_rate_limit = True
    rate_limits = RateLimitConfig(
        group="foo",
        limit_overrides={
            "GET": {
                RateLimitCategory.IP:
                RateLimit(20, 1, CONCURRENT_RATE_LIMIT),
                RateLimitCategory.USER:
                RateLimit(20, 1, CONCURRENT_RATE_LIMIT),
                RateLimitCategory.ORGANIZATION:
                RateLimit(20, 1, CONCURRENT_RATE_LIMIT),
            },
        },
    )

    def get(self, request):
        sleep(CONCURRENT_ENDPOINT_DURATION)
        return Response({"ok": True})
Beispiel #22
0
class RateLimitHeaderTestEndpoint(Endpoint):
    permission_classes = (AllowAny,)

    enforce_rate_limit = True
    rate_limits = {"GET": {RateLimitCategory.IP: RateLimit(2, 100)}}

    def inject_call(self):
        return

    def get(self, request):
        self.inject_call()
        return Response({"ok": True})
 def test_above_rate_limit_check(self):
     with freeze_time("2000-01-01"):
         expected_reset_time = int(time() + 100)
         return_val = above_rate_limit_check("foo", RateLimit(10, 100))
         assert return_val == RateLimitMeta(
             is_limited=False,
             current=1,
             limit=10,
             window=100,
             reset_time=expected_reset_time,
             remaining=9,
         )
         for _ in range(10):
             return_val = above_rate_limit_check("foo", RateLimit(10, 100))
         assert return_val == RateLimitMeta(
             is_limited=True,
             current=11,
             limit=10,
             window=100,
             reset_time=expected_reset_time,
             remaining=0,
         )
Beispiel #24
0
    def test_override_rate_limit(self):
        """Override one or more of the default rate limits"""
        class TestEndpoint(Endpoint):
            rate_limits = {
                "GET": {
                    RateLimitCategory.IP: RateLimit(100, 5)
                },
                "POST": {
                    RateLimitCategory.USER: RateLimit(20, 4)
                },
            }

        assert get_rate_limit_value("GET", TestEndpoint,
                                    "ip") == RateLimit(100, 5)
        assert (get_rate_limit_value(
            "GET", TestEndpoint,
            "user") == settings.SENTRY_RATELIMITER_DEFAULTS["user"])
        assert (get_rate_limit_value(
            "POST", TestEndpoint,
            "ip") == settings.SENTRY_RATELIMITER_DEFAULTS["ip"])
        assert get_rate_limit_value("POST", TestEndpoint,
                                    "user") == RateLimit(20, 4)
Beispiel #25
0
class TestRateLimitConfig(TestCase):
    @mock.patch(
        "sentry.ratelimits.config._get_group_defaults",
        return_value={
            "blz": {
                RateLimitCategory.ORGANIZATION: RateLimit(420, 69)
            }
        },
    )
    def test_grouping(self, *m):
        config = RateLimitConfig(group="blz")
        assert config.get_rate_limit(
            "GET", RateLimitCategory.ORGANIZATION) == RateLimit(420, 69)

    def test_defaults(self):
        config = RateLimitConfig()
        for c in RateLimitCategory:
            for method in ("POST", "GET", "PUT", "DELETE"):
                assert isinstance(config.get_rate_limit(method, c), RateLimit)

    def test_override(self):
        config = RateLimitConfig(
            group="default",
            limit_overrides={"GET": {
                RateLimitCategory.IP: RateLimit(1, 1)
            }})
        assert config.get_rate_limit("GET",
                                     RateLimitCategory.IP) == RateLimit(1, 1)
        assert config.get_rate_limit(
            "POST", RateLimitCategory.IP) == get_default_rate_limits_for_group(
                "default", RateLimitCategory.IP)
        assert config.get_rate_limit("GET", RateLimitCategory.ORGANIZATION
                                     ) == get_default_rate_limits_for_group(
                                         "default",
                                         RateLimitCategory.ORGANIZATION)

    def test_backwards_compatibility(self):
        override_dict = {"GET": {RateLimitCategory.IP: RateLimit(1, 1)}}
        assert RateLimitConfig.from_rate_limit_override_dict(
            override_dict) == RateLimitConfig(group="default",
                                              limit_overrides=override_dict)

    def test_invalid_config(self):
        config = RateLimitConfig(
            group="default", limit_overrides={"GET": {
                "invalid": "invalid"
            }})  # type: ignore
        assert config.get_rate_limit(
            "bloop", "badcategory") == get_default_rate_limits_for_group(
                "default", RateLimitCategory.ORGANIZATION)
Beispiel #26
0
class GroupFirstLastReleaseEndpoint(GroupEndpoint, EnvironmentMixin):
    enforce_rate_limit = True
    rate_limits = {
        "GET": {
            RateLimitCategory.IP: RateLimit(5, 1),
            RateLimitCategory.USER: RateLimit(5, 1),
            RateLimitCategory.ORGANIZATION: RateLimit(5, 1),
        }
    }

    def get(self, request: Request, group) -> Response:
        """Get the first and last release for a group.

        This data used to be returned by default in group_details.py, but now that we
        can collapse it, we're providing this endpoint to fetch the data separately.
        """
        first_release, last_release = get_first_last_release(request, group)
        data = {
            "id": str(group.id),
            "firstRelease": first_release,
            "lastRelease": last_release,
        }
        return Response(data)
Beispiel #27
0
    def test_multiple_inheritance(self):
        class ParentEndpoint(Endpoint):
            rate_limits = {"GET": {RateLimitCategory.IP: RateLimit(100, 5)}}

        class Mixin:
            rate_limits = {"GET": {RateLimitCategory.IP: RateLimit(2, 4)}}

        class ChildEndpoint(ParentEndpoint, Mixin):
            pass

        class ChildEndpointReverse(Mixin, ParentEndpoint):
            pass

        assert get_rate_limit_value("GET", ChildEndpoint, RateLimitCategory.IP) == RateLimit(100, 5)
        assert get_rate_limit_value("GET", ChildEndpointReverse, RateLimitCategory.IP) == RateLimit(
            2, 4
        )
Beispiel #28
0
class GroupDetailsEndpoint(GroupEndpoint, EnvironmentMixin):
    enforce_rate_limit = True
    rate_limits = {
        "GET": {
            RateLimitCategory.IP: RateLimit(5, 1),
            RateLimitCategory.USER: RateLimit(5, 1),
            RateLimitCategory.ORGANIZATION: RateLimit(5, 1),
        },
        "PUT": {
            RateLimitCategory.IP: RateLimit(5, 1),
            RateLimitCategory.USER: RateLimit(5, 1),
            RateLimitCategory.ORGANIZATION: RateLimit(5, 1),
        },
        "DELETE": {
            RateLimitCategory.IP: RateLimit(5, 5),
            RateLimitCategory.USER: RateLimit(5, 5),
            RateLimitCategory.ORGANIZATION: RateLimit(5, 5),
        },
    }

    def _get_activity(self, request: Request, group, num):
        return Activity.objects.get_activities_for_group(group, num)

    def _get_seen_by(self, request: Request, group):
        seen_by = list(
            GroupSeen.objects.filter(
                group=group).select_related("user").order_by("-last_seen"))
        return serialize(seen_by, request.user)

    def _get_actions(self, request: Request, group):
        project = group.project

        action_list = []
        for plugin in plugins.for_project(project, version=1):
            if is_plugin_deprecated(plugin, project):
                continue

            results = safe_execute(plugin.actions,
                                   request,
                                   group,
                                   action_list,
                                   _with_transaction=False)

            if not results:
                continue

            action_list = results

        for plugin in plugins.for_project(project, version=2):
            if is_plugin_deprecated(plugin, project):
                continue
            for action in (safe_execute(
                    plugin.get_actions, request, group,
                    _with_transaction=False) or ()):
                action_list.append(action)

        return action_list

    def _get_available_issue_plugins(self, request: Request, group):
        project = group.project

        plugin_issues = []
        for plugin in plugins.for_project(project, version=1):
            if isinstance(plugin, IssueTrackingPlugin2):
                if is_plugin_deprecated(plugin, project):
                    continue
                plugin_issues = safe_execute(plugin.plugin_issues,
                                             request,
                                             group,
                                             plugin_issues,
                                             _with_transaction=False)
        return plugin_issues

    def _get_context_plugins(self, request: Request, group):
        project = group.project
        return serialize(
            [
                plugin for plugin in plugins.for_project(project, version=None)
                if plugin.has_project_conf()
                and hasattr(plugin, "get_custom_contexts")
                and plugin.get_custom_contexts()
            ],
            request.user,
            PluginSerializer(project),
        )

    def get(self, request: Request, group) -> Response:
        """
        Retrieve an Issue
        `````````````````

        Return details on an individual issue. This returns the basic stats for
        the issue (title, last seen, first seen), some overall numbers (number
        of comments, user reports) as well as the summarized event data.

        :pparam string issue_id: the ID of the issue to retrieve.
        :auth: required
        """
        from sentry.utils import snuba

        try:
            # TODO(dcramer): handle unauthenticated/public response

            organization = group.project.organization
            environments = get_environments(request, organization)
            environment_ids = [e.id for e in environments]
            expand = request.GET.getlist("expand", [])
            collapse = request.GET.getlist("collapse", [])

            # WARNING: the rest of this endpoint relies on this serializer
            # populating the cache SO don't move this :)
            data = serialize(
                group, request.user,
                GroupSerializerSnuba(environment_ids=environment_ids))

            # TODO: these probably should be another endpoint
            activity = self._get_activity(request, group, num=100)
            seen_by = self._get_seen_by(request, group)

            if "release" not in collapse:
                first_release, last_release = get_first_last_release(
                    request, group)
                data.update({
                    "firstRelease": first_release,
                    "lastRelease": last_release,
                })

            get_range = functools.partial(tsdb.get_range,
                                          environment_ids=environment_ids)

            tags = tagstore.get_group_tag_keys(group.project_id,
                                               group.id,
                                               environment_ids,
                                               limit=100)
            if not environment_ids:
                user_reports = UserReport.objects.filter(group_id=group.id)
            else:
                user_reports = UserReport.objects.filter(
                    group_id=group.id, environment_id__in=environment_ids)

            now = timezone.now()
            hourly_stats = tsdb.rollup(
                get_range(model=tsdb.models.group,
                          keys=[group.id],
                          end=now,
                          start=now - timedelta(days=1)),
                3600,
            )[group.id]
            daily_stats = tsdb.rollup(
                get_range(
                    model=tsdb.models.group,
                    keys=[group.id],
                    end=now,
                    start=now - timedelta(days=30),
                ),
                3600 * 24,
            )[group.id]

            participants = GroupSubscriptionManager.get_participating_users(
                group)

            if "inbox" in expand:
                inbox_map = get_inbox_details([group])
                inbox_reason = inbox_map.get(group.id)
                data.update({"inbox": inbox_reason})

            action_list = self._get_actions(request, group)
            data.update({
                "activity":
                serialize(activity, request.user),
                "seenBy":
                seen_by,
                "participants":
                serialize(participants, request.user),
                "pluginActions":
                action_list,
                "pluginIssues":
                self._get_available_issue_plugins(request, group),
                "pluginContexts":
                self._get_context_plugins(request, group),
                "userReportCount":
                user_reports.count(),
                "tags":
                sorted(serialize(tags, request.user), key=lambda x: x["name"]),
                "stats": {
                    "24h": hourly_stats,
                    "30d": daily_stats
                },
            })

            metrics.incr(
                "group.update.http_response",
                sample_rate=1.0,
                tags={
                    "status": 200,
                    "detail": "group_details:get:response"
                },
            )
            return Response(data)
        except snuba.RateLimitExceeded:
            metrics.incr(
                "group.update.http_response",
                sample_rate=1.0,
                tags={
                    "status": 429,
                    "detail": "group_details:get:snuba.RateLimitExceeded"
                },
            )
            raise
        except Exception:
            metrics.incr(
                "group.update.http_response",
                sample_rate=1.0,
                tags={
                    "status": 500,
                    "detail": "group_details:get:Exception"
                },
            )
            raise

    def put(self, request: Request, group) -> Response:
        """
        Update an Issue
        ```````````````

        Updates an individual issue's attributes. Only the attributes submitted
        are modified.

        :pparam string issue_id: the ID of the group to retrieve.
        :param string status: the new status for the issue.  Valid values
                              are ``"resolved"``, ``resolvedInNextRelease``,
                              ``"unresolved"``, and ``"ignored"``.
        :param string assignedTo: the user or team that should be assigned to
                                  this issue. Can be of the form ``"<user_id>"``,
                                  ``"user:<user_id>"``, ``"<username>"``,
                                  ``"<user_primary_email>"``, or ``"team:<team_id>"``.
        :param string assignedBy: ``"suggested_assignee"`` | ``"assignee_selector"``
        :param boolean hasSeen: in case this API call is invoked with a user
                                context this allows changing of the flag
                                that indicates if the user has seen the
                                event.
        :param boolean isBookmarked: in case this API call is invoked with a
                                     user context this allows changing of
                                     the bookmark flag.
        :param boolean isSubscribed:
        :param boolean isPublic: sets the issue to public or private.
        :auth: required
        """
        try:
            discard = request.data.get("discard")
            project = group.project
            search_fn = functools.partial(prep_search, self, request, project)
            response = update_groups(request, [group.id], [project],
                                     project.organization_id, search_fn)
            # if action was discard, there isn't a group to serialize anymore
            # if response isn't 200, return the response update_groups gave us (i.e. helpful error)
            # instead of serializing the updated group
            if discard or response.status_code != 200:
                return response

            # we need to fetch the object against as the bulk mutation endpoint
            # only returns a delta, and object mutation returns a complete updated
            # entity.
            # TODO(dcramer): we should update the API and have this be an explicit
            # flag (or remove it entirely) so that delta's are the primary response
            # for mutation.
            group = Group.objects.get(id=group.id)

            serialized = serialize(
                group,
                request.user,
                GroupSerializer(environment_func=self._get_environment_func(
                    request, group.project.organization_id)),
            )
            return Response(serialized, status=response.status_code)
        except client.ApiError as e:
            logging.error(
                "group_details:put client.ApiError",
                exc_info=True,
            )
            return Response(e.body, status=e.status_code)
        except Exception:
            raise

    def delete(self, request: Request, group) -> Response:
        """
        Remove an Issue
        ```````````````

        Removes an individual issue.

        :pparam string issue_id: the ID of the issue to delete.
        :auth: required
        """
        from sentry.utils import snuba

        try:
            delete_group_list(request, group.project, [group], "delete")

            metrics.incr(
                "group.update.http_response",
                sample_rate=1.0,
                tags={
                    "status": 200,
                    "detail": "group_details:delete:Response"
                },
            )
            return Response(status=202)
        except snuba.RateLimitExceeded:
            metrics.incr(
                "group.update.http_response",
                sample_rate=1.0,
                tags={
                    "status": 429,
                    "detail": "group_details:delete:snuba.RateLimitExceeded"
                },
            )
            raise
        except Exception:
            metrics.incr(
                "group.update.http_response",
                sample_rate=1.0,
                tags={
                    "status": 500,
                    "detail": "group_details:delete:Exception"
                },
            )
            raise
Beispiel #29
0
class DiscoverQueryEndpoint(OrganizationEndpoint):
    permission_classes = (DiscoverQueryPermission,)
    enforce_rate_limit = True

    rate_limits = {
        "POST": {
            RateLimitCategory.IP: RateLimit(4, 1),
            RateLimitCategory.USER: RateLimit(4, 1),
            RateLimitCategory.ORGANIZATION: RateLimit(4, 1),
        }
    }

    def has_feature(self, request: Request, organization):
        return features.has(
            "organizations:discover", organization, actor=request.user
        ) or features.has("organizations:discover-basic", organization, actor=request.user)

    def handle_results(self, snuba_results, requested_query, projects):
        if "project.name" in requested_query["selected_columns"]:
            project_name_index = requested_query["selected_columns"].index("project.name")
            snuba_results["meta"].insert(
                project_name_index, {"name": "project.name", "type": "String"}
            )
            if "project.id" not in requested_query["selected_columns"]:
                snuba_results["meta"] = [
                    field for field in snuba_results["meta"] if field["name"] != "project.id"
                ]

            for result in snuba_results["data"]:
                if "project.id" in result:
                    result["project.name"] = projects[result["project.id"]]
                    if "project.id" not in requested_query["selected_columns"]:
                        del result["project.id"]

        if "project.name" in requested_query["groupby"]:
            project_name_index = requested_query["groupby"].index("project.name")
            snuba_results["meta"].insert(
                project_name_index, {"name": "project.name", "type": "String"}
            )
            if "project.id" not in requested_query["groupby"]:
                snuba_results["meta"] = [
                    field for field in snuba_results["meta"] if field["name"] != "project.id"
                ]

            for result in snuba_results["data"]:
                if "project.id" in result:
                    result["project.name"] = projects[result["project.id"]]
                    if "project.id" not in requested_query["groupby"]:
                        del result["project.id"]

        # Convert snuba types to json types
        for col in snuba_results["meta"]:
            col["type"] = snuba.get_json_type(col.get("type"))

        return snuba_results

    def do_query(self, projects, request, **kwargs):
        requested_query = deepcopy(kwargs)

        selected_columns = kwargs["selected_columns"]
        groupby_columns = kwargs["groupby"]

        if "project.name" in requested_query["selected_columns"]:
            selected_columns.remove("project.name")
            if "project.id" not in selected_columns:
                selected_columns.append("project.id")

        if "project.name" in requested_query["groupby"]:
            groupby_columns.remove("project.name")
            if "project.id" not in groupby_columns:
                groupby_columns.append("project.id")

        for aggregation in kwargs["aggregations"]:
            if aggregation[1] == "project.name":
                aggregation[1] = "project.id"

        if not kwargs["aggregations"]:

            data_fn = partial(transform_aliases_and_query, referrer="discover", **kwargs)
            return self.paginate(
                request=request,
                on_results=lambda results: self.handle_results(results, requested_query, projects),
                paginator=GenericOffsetPaginator(data_fn=data_fn),
                max_per_page=1000,
            )
        else:
            snuba_results = transform_aliases_and_query(referrer="discover", **kwargs)
            return Response(
                self.handle_results(snuba_results, requested_query, projects), status=200
            )

    def post(self, request: Request, organization) -> Response:
        if not self.has_feature(request, organization):
            return Response(status=404)
        logger.info("discover1.request", extra={"organization_id": organization.id})

        try:
            requested_projects = set(map(int, request.data.get("projects", [])))
        except (ValueError, TypeError):
            raise ResourceDoesNotExist()
        projects = self._get_projects_by_id(requested_projects, request, organization)

        serializer = DiscoverQuerySerializer(data=request.data)

        if not serializer.is_valid():
            return Response(serializer.errors, status=400)

        serialized = serializer.validated_data

        has_aggregations = len(serialized.get("aggregations")) > 0

        selected_columns = (
            serialized.get("conditionFields", []) + []
            if has_aggregations
            else serialized.get("fields", [])
        )

        projects_map = {}
        for project in projects:
            projects_map[project.id] = project.slug

        # Make sure that all selected fields are in the group by clause if there
        # are aggregations
        groupby = serialized.get("groupby") or []
        fields = serialized.get("fields") or []
        if has_aggregations:
            for field in fields:
                if field not in groupby:
                    groupby.append(field)

        return self.do_query(
            projects=projects_map,
            start=serialized.get("start"),
            end=serialized.get("end"),
            groupby=groupby,
            selected_columns=selected_columns,
            conditions=serialized.get("conditions"),
            orderby=serialized.get("orderby"),
            limit=serialized.get("limit"),
            aggregations=serialized.get("aggregations"),
            rollup=serialized.get("rollup"),
            filter_keys={"project.id": list(projects_map.keys())},
            arrayjoin=serialized.get("arrayjoin"),
            request=request,
            turbo=serialized.get("turbo"),
        )
Beispiel #30
0
 def do_request():
     uid = uuid.uuid4().hex
     meta = above_rate_limit_check("foo", RateLimit(10, 1, 3), uid)
     sleep(0.2)
     finish_request("foo", uid)
     return meta