Ejemplo n.º 1
0
    def test_bind_nodes(self):
        """
        Test that bind_nodes populates _node_data
        """
        min_ago = (timezone.now() - timedelta(minutes=1)).isoformat()[:19]
        self.store_event(
            data={
                'event_id': 'a' * 32,
                'timestamp': min_ago,
                'user': {
                    'id': u'user1',
                },
            },
            project_id=self.project.id,
        )
        self.store_event(
            data={
                'event_id': 'b' * 32,
                'timestamp': min_ago,
                'user': {
                    'id': u'user2',
                },
            },
            project_id=self.project.id,
        )

        event = eventstore.get_event_by_id(self.project.id, 'a' * 32)
        event2 = eventstore.get_event_by_id(self.project.id, 'b' * 32)
        assert event.data._node_data is None
        self.eventstorage.bind_nodes([event, event2], 'data')
        assert event.data._node_data is not None
        assert event.data['user']['id'] == u'user1'
Ejemplo n.º 2
0
    def serialize(self, result, root, event_id=None):
        parent_map = {item["trace.parent_span"]: item for item in result}
        trace_results = [serialize_event(root, None, True)]

        snuba_event = find_event(result, lambda item: item["id"] == event_id)
        if snuba_event is None:
            sentry_sdk.set_tag("query.error_reason", "Matching event not found")
            raise ParseError("event matching matching requested id not found")

        if root["id"] != event_id:
            # Get the root event and see if the current event's span is in the root event
            root_event = eventstore.get_event_by_id(root["project_id"], root["id"])
            root_span = find_event(
                root_event.data.get("spans", []),
                lambda item: item["span_id"] == snuba_event["trace.parent_span"],
            )

            # For the light response, the parent will be unknown unless it is a direct descendent of the root
            trace_results.append(
                serialize_event(snuba_event, root["id"] if root_span is not None else None)
            )

        event = eventstore.get_event_by_id(snuba_event["project_id"], event_id)
        for span in event.data.get("spans", []):
            if span["span_id"] in parent_map:
                child_event = parent_map[span["span_id"]]
                trace_results.append(serialize_event(child_event, event_id))

        return trace_results
Ejemplo n.º 3
0
def test_max_events(
    default_project,
    reset_snuba,
    register_event_preprocessor,
    process_and_save,
    burst_task_runner,
    monkeypatch,
    remaining_events,
    max_events,
):
    @register_event_preprocessor
    def event_preprocessor(data):
        extra = data.setdefault("extra", {})
        extra.setdefault("processing_counter", 0)
        extra["processing_counter"] += 1
        return data

    event_ids = [
        process_and_save({"message": "hello world"}, seconds_ago=i + 1) for i in reversed(range(5))
    ]

    old_events = {
        event_id: eventstore.get_event_by_id(default_project.id, event_id) for event_id in event_ids
    }

    (group_id,) = {e.group_id for e in old_events.values()}

    with burst_task_runner() as burst:
        reprocess_group(
            default_project.id,
            group_id,
            max_events=max_events,
            remaining_events=remaining_events,
        )

    burst(max_jobs=100)

    for i, event_id in enumerate(event_ids):
        event = eventstore.get_event_by_id(default_project.id, event_id)
        if max_events is not None and i < (len(event_ids) - max_events):
            if remaining_events == "delete":
                assert event is None
            elif remaining_events == "keep":
                assert event.group_id != group_id
                assert dict(event.data) == dict(old_events[event_id].data)
            else:
                raise ValueError(remaining_events)
        else:
            assert event.group_id != group_id
            assert int(event.data["contexts"]["reprocessing"]["original_issue_id"]) == group_id
            assert dict(event.data) != dict(old_events[event_id].data)

    if remaining_events == "delete":
        assert event.group.times_seen == (max_events or 5)
    elif remaining_events == "keep":
        assert event.group.times_seen == 5
    else:
        raise ValueError(remaining_events)

    assert is_group_finished(group_id)
Ejemplo n.º 4
0
    def serialize(self, parent_map, root, warning_extra, params, snuba_event, event_id=None):
        """ Because the light endpoint could potentially have gaps between root and event we return a flattened list """
        trace_results = [self.serialize_event(root, None, 0, True)]

        with sentry_sdk.start_span(op="building.trace", description="light trace"):
            if root["id"] != event_id:
                # Get the root event and see if the current event's span is in the root event
                root_event = eventstore.get_event_by_id(root["project.id"], root["id"])
                root_span = find_event(
                    root_event.data.get("spans", []),
                    lambda item: item["span_id"] == snuba_event["trace.parent_span"],
                )

                # For the light response, the parent will be unknown unless it is a direct descendent of the root
                is_root_child = root_span is not None
                trace_results.append(
                    self.serialize_event(
                        snuba_event,
                        root["id"] if is_root_child else None,
                        1 if is_root_child else None,
                    )
                )

            event = eventstore.get_event_by_id(snuba_event["project.id"], event_id)
            for span in event.data.get("spans", []):
                if span["span_id"] in parent_map:
                    child_event = parent_map[span["span_id"]]
                    trace_results.append(self.serialize_event(child_event, event_id))

        return trace_results
Ejemplo n.º 5
0
def test_nodestore_missing(
    default_project, reset_snuba, process_and_save, burst_task_runner, monkeypatch, remaining_events
):
    logs = []
    monkeypatch.setattr("sentry.reprocessing2.logger.error", logs.append)

    event_id = process_and_save({"message": "hello world", "platform": "python"})
    event = eventstore.get_event_by_id(default_project.id, event_id)
    old_group = event.group

    with burst_task_runner() as burst:
        reprocess_group(
            default_project.id, event.group_id, max_events=1, remaining_events=remaining_events
        )

    burst(max_jobs=100)

    assert is_group_finished(event.group_id)

    new_event = eventstore.get_event_by_id(default_project.id, event_id)

    if remaining_events == "delete":
        assert new_event is None
    else:
        assert not new_event.data.get("errors")
        assert new_event.group_id != event.group_id

        assert new_event.group.times_seen == 1

        assert not Group.objects.filter(id=old_group.id).exists()
        assert (
            GroupRedirect.objects.get(previous_group_id=old_group.id).group_id == new_event.group_id
        )

    assert logs == ["reprocessing2.unprocessed_event.not_found"]
Ejemplo n.º 6
0
    def test_logs_differences(self):
        logger = logging.getLogger("sentry.eventstore")

        with mock.patch.object(logger, "info") as mock_logger:
            # No differences to log
            filter = eventstore.Filter(project_ids=[self.project.id])
            eventstore.get_events(filter=filter)
            eventstore.get_event_by_id(self.project.id, "a" * 32)
            assert mock_logger.call_count == 0

            # Here we expect a difference since the original implementation handles type as a tag
            event = eventstore.get_event_by_id(self.project.id, "a" * 32)
            filter = eventstore.Filter(
                project_ids=[self.project.id],
                conditions=[["type", "=", "transaction"]])
            eventstore.get_next_event_id(event, filter)
            assert mock_logger.call_count == 1
            mock_logger.assert_called_with(
                "discover.result-mismatch",
                extra={
                    "snuba_result":
                    None,
                    "snuba_discover_result":
                    (six.text_type(self.project.id), "b" * 32),
                    "method":
                    "get_next_event_id",
                    "event_id":
                    event.event_id,
                    "filter_keys":
                    filter.filter_keys,
                    "conditions":
                    filter.conditions,
                },
            )
Ejemplo n.º 7
0
    def test_bind_nodes(self):
        """
        Test that bind_nodes populates _node_data
        """
        min_ago = (timezone.now() - timedelta(minutes=1)).isoformat()[:19]
        self.store_event(
            data={
                "event_id": "a" * 32,
                "timestamp": min_ago,
                "user": {
                    "id": u"user1"
                }
            },
            project_id=self.project.id,
        )
        self.store_event(
            data={
                "event_id": "b" * 32,
                "timestamp": min_ago,
                "user": {
                    "id": u"user2"
                }
            },
            project_id=self.project.id,
        )

        event = eventstore.get_event_by_id(self.project.id, "a" * 32)
        event2 = eventstore.get_event_by_id(self.project.id, "b" * 32)
        assert event.data._node_data is None
        self.eventstorage.bind_nodes([event, event2], "data")
        assert event.data._node_data is not None
        assert event.data["user"]["id"] == u"user1"
Ejemplo n.º 8
0
    def serialize(self, result, root, snuba_event, event_id=None):
        parent_map = {item["trace.parent_span"]: item for item in result}
        trace_results = [serialize_event(root, None, True)]

        if root["id"] != event_id:
            # Get the root event and see if the current event's span is in the root event
            root_event = eventstore.get_event_by_id(root["project_id"],
                                                    root["id"])
            root_span = find_event(
                root_event.data.get("spans", []),
                lambda item: item["span_id"] == snuba_event["trace.parent_span"
                                                            ],
            )

            # For the light response, the parent will be unknown unless it is a direct descendent of the root
            trace_results.append(
                serialize_event(snuba_event,
                                root["id"] if root_span is not None else None))

        event = eventstore.get_event_by_id(snuba_event["project_id"], event_id)
        for span in event.data.get("spans", []):
            if span["span_id"] in parent_map:
                child_event = parent_map[span["span_id"]]
                trace_results.append(serialize_event(child_event, event_id))

        return trace_results
Ejemplo n.º 9
0
def test_concurrent_events_go_into_new_group(
    default_project,
    reset_snuba,
    register_event_preprocessor,
    process_and_save,
    burst_task_runner,
    default_user,
):
    """
    Assert that both unmodified and concurrently inserted events go into "the
    new group", i.e. the successor of the reprocessed (old) group that
    inherited the group hashes.
    """

    @register_event_preprocessor
    def event_preprocessor(data):
        extra = data.setdefault("extra", {})
        extra.setdefault("processing_counter", 0)
        extra["processing_counter"] += 1
        return data

    event_id = process_and_save({"message": "hello world"})

    event = eventstore.get_event_by_id(default_project.id, event_id)
    original_short_id = event.group.short_id
    assert original_short_id
    original_issue_id = event.group.id

    original_assignee = GroupAssignee.objects.create(
        group_id=original_issue_id, project=default_project, user=default_user
    )

    with burst_task_runner() as burst_reprocess:
        reprocess_group(default_project.id, event.group_id)

    assert not is_group_finished(event.group_id)

    event_id2 = process_and_save({"message": "hello world"})
    event2 = eventstore.get_event_by_id(default_project.id, event_id2)
    assert event2.event_id != event.event_id
    assert event2.group_id != event.group_id

    burst_reprocess(max_jobs=100)

    event3 = eventstore.get_event_by_id(default_project.id, event_id)
    assert event3.event_id == event.event_id
    assert event3.group_id != event.group_id

    assert is_group_finished(event.group_id)

    assert event2.group_id == event3.group_id
    assert event.get_hashes() == event2.get_hashes() == event3.get_hashes()

    group = event3.group

    assert group.short_id == original_short_id
    assert GroupAssignee.objects.get(group=group) == original_assignee
    activity = Activity.objects.get(group=group, type=Activity.REPROCESS)
    assert activity.ident == str(original_issue_id)
Ejemplo n.º 10
0
def test_ingest_consumer_reads_from_topic_and_calls_celery_task(
    executor,
    task_runner,
    kafka_producer,
    kafka_admin,
    requires_kafka,
    default_project,
    get_test_message,
    random_group_id,
):
    topic_event_name = ConsumerType.get_topic_name(ConsumerType.Events)

    admin = kafka_admin(settings)
    admin.delete_topic(topic_event_name)
    producer = kafka_producer(settings)

    message, event_id = get_test_message(type="event")
    producer.produce(topic_event_name, message)

    transaction_message, transaction_event_id = get_test_message(type="transaction")
    producer.produce(topic_event_name, transaction_message)

    with override_settings(KAFKA_CONSUMER_AUTO_CREATE_TOPICS=True):
        consumer = get_ingest_consumer(
            max_batch_size=2,
            max_batch_time=5000,
            group_id=random_group_id,
            consumer_types={ConsumerType.Events},
            auto_offset_reset="earliest",
        )

    with task_runner():
        i = 0
        while i < MAX_POLL_ITERATIONS:
            transaction_message = eventstore.get_event_by_id(
                default_project.id, transaction_event_id
            )
            message = eventstore.get_event_by_id(default_project.id, event_id)

            if transaction_message and message:
                break

            consumer._run_once()
            i += 1

    # check that we got the messages
    assert message.data["event_id"] == event_id
    assert message.data["extra"]["the_id"] == event_id

    assert transaction_message.data["event_id"] == transaction_event_id
    assert transaction_message.data["spans"] == []
    assert transaction_message.data["contexts"]["trace"]
Ejemplo n.º 11
0
def test_ingest_consumer_reads_from_topic_and_calls_celery_task(
    task_runner,
    kafka_producer,
    kafka_admin,
    requires_kafka,
    default_project,
    get_test_message,
    inline_transactions,
):
    group_id = "test-consumer"
    topic_event_name = ConsumerType.get_topic_name(ConsumerType.Events)

    admin = kafka_admin(settings)
    admin.delete_topic(topic_event_name)
    producer = kafka_producer(settings)

    event_ids = set()
    for _ in range(3):
        message, event_id = get_test_message()
        producer.produce(topic_event_name, message)
        event_ids.add(event_id)

    consumer = get_ingest_consumer(
        max_batch_size=2,
        max_batch_time=5000,
        group_id=group_id,
        consumer_types=set([ConsumerType.Events]),
        auto_offset_reset="earliest",
    )

    options.set("store.transactions-celery", not inline_transactions)
    with task_runner():
        i = 0
        while i < MAX_POLL_ITERATIONS:
            if eventstore.get_event_by_id(default_project.id, event_id):
                break

            consumer._run_once()
            i += 1

    # check that we got the messages
    for event_id in event_ids:
        message = eventstore.get_event_by_id(default_project.id, event_id)
        assert message is not None
        assert message.data["event_id"] == event_id
        if message.data["type"] == "transaction":
            assert message.data["spans"] == []
            assert message.data["contexts"]["trace"]
        else:
            assert message.data["extra"]["the_id"] == event_id
Ejemplo n.º 12
0
    def get_current_transaction(
            transactions: Sequence[SnubaTransaction],
            errors: Sequence[SnubaError],
            event_id: str) -> Tuple[SnubaTransaction, Event]:
        """Given an event_id return the related transaction event

        The event_id could be for an error, since we show the quick-trace
        for both event types
        We occasionally have to get the nodestore data, so this function returns
        the nodestore event as well so that we're doing that in one location.
        """
        transaction_event = find_event(
            transactions,
            lambda item: item is not None and item["id"] == event_id)
        if transaction_event is not None:
            return transaction_event, eventstore.get_event_by_id(
                transaction_event["project.id"], transaction_event["id"])

        # The event couldn't be found, it might be an error
        error_event = find_event(
            errors, lambda item: item is not None and item["id"] == event_id)
        # Alright so we're looking at an error, time to see if we can find its transaction
        if error_event is not None:
            # Unfortunately the only association from an event back to its transaction is name & span_id
            # First maybe we got lucky and the error happened on the transaction's "span"
            error_span = error_event["trace.span"]
            transaction_event = find_event(
                transactions, lambda item: item is not None and item[
                    "trace.span"] == error_span)
            if transaction_event is not None:
                return transaction_event, eventstore.get_event_by_id(
                    transaction_event["project.id"], transaction_event["id"])
            # We didn't get lucky, time to talk to nodestore...
            for transaction_event in transactions:
                if transaction_event["transaction"] != error_event[
                        "transaction"]:
                    continue

                nodestore_event = eventstore.get_event_by_id(
                    transaction_event["project.id"], transaction_event["id"])
                transaction_spans: NodeSpans = nodestore_event.data.get(
                    "spans", [])
                for span in transaction_spans:
                    if span["span_id"] == error_event["trace.span"]:
                        return transaction_event, nodestore_event

        # The current event couldn't be found in errors or transactions
        raise Http404()
Ejemplo n.º 13
0
    def inner(*args, **kwargs):
        event_id = sentry_sdk.capture_event(*args, **kwargs)
        Hub.current.client.flush()

        with push_scope():
            return wait_for_ingest_consumer(lambda: eventstore.get_event_by_id(
                settings.SENTRY_PROJECT, event_id))
Ejemplo n.º 14
0
    def test_full_minidump(self):
        self.project.update_option("sentry:store_crash_reports",
                                   STORE_CRASH_REPORTS_ALL)
        self.upload_symbols()

        with self.feature("organizations:event-attachments"):
            attachment = BytesIO(b"Hello World!")
            attachment.name = "hello.txt"
            with open(get_fixture_path("windows.dmp"), "rb") as f:
                resp = self._postMinidumpWithHeader(f, {
                    "sentry[logger]": "test-logger",
                    "some_file": attachment
                })
                assert resp.status_code == 200
                event_id = resp.content

        event = eventstore.get_event_by_id(self.project.id, event_id)
        insta_snapshot_stacktrace_data(self, event.data)
        assert event.data.get("logger") == "test-logger"
        # assert event.data.get("extra") == {"foo": "bar"}

        attachments = sorted(
            EventAttachment.objects.filter(event_id=event.event_id),
            key=lambda x: x.name)
        hello, minidump = attachments

        assert hello.name == "hello.txt"
        assert hello.file.type == "event.attachment"
        assert hello.file.checksum == "2ef7bde608ce5404e97d5f042f95f89f1c232871"

        assert minidump.name == "windows.dmp"
        assert minidump.file.type == "event.minidump"
        assert minidump.file.checksum == "74bb01c850e8d65d3ffbc5bad5cabc4668fce247"
Ejemplo n.º 15
0
    def test_event_with_no_body(self):
        # remove the event from nodestore to simulate an event with no body.
        node_id = SnubaEvent.generate_node_id(self.proj1.id, self.event_id)
        nodestore.delete(node_id)
        assert nodestore.get(node_id) is None

        # Check that we can still serialize it
        event = eventstore.get_event_by_id(
            self.proj1.id,
            self.event_id,
            additional_columns=eventstore.full_columns)
        serialized = serialize(event)
        assert event.data == {}

        # Check that the regular serializer still gives us back tags
        assert serialized['tags'] == [{
            '_meta': None,
            'key': 'baz',
            'value': 'quux'
        }, {
            '_meta': None,
            'key': 'foo',
            'value': 'bar'
        }, {
            '_meta': None,
            'key': 'release',
            'value': 'release1'
        }, {
            '_meta': None,
            'key': 'user',
            'query': 'user.id:user1',
            'value': 'id:user1'
        }]
Ejemplo n.º 16
0
    def __handle_result(self, user, project_id, group_id, result):
        event = eventstore.get_event_by_id(project_id, result["event_id"])

        return {
            "id": result["primary_hash"],
            "latestEvent": serialize(event, user, EventSerializer()),
        }
Ejemplo n.º 17
0
def test_max_events(
    default_project,
    reset_snuba,
    register_event_preprocessor,
    process_and_save,
    task_runner,
    monkeypatch,
):
    @register_event_preprocessor
    def event_preprocessor(data):
        extra = data.setdefault("extra", {})
        extra.setdefault("processing_counter", 0)
        extra["processing_counter"] += 1
        return data

    event_id = process_and_save({"message": "hello world"})

    event = eventstore.get_event_by_id(default_project.id, event_id)

    # Make sure it never gets called
    monkeypatch.setattr("sentry.tasks.reprocessing2.reprocess_event", None)

    with task_runner():
        reprocess_group(default_project.id, event.group_id, max_events=0)

    assert is_group_finished(event.group_id)
Ejemplo n.º 18
0
    def get(self, request, organization, project_slug, event_id):
        if not self.has_feature(organization, request):
            return Response(status=404)

        try:
            project = Project.objects.get(slug=project_slug,
                                          organization_id=organization.id,
                                          status=ProjectStatus.VISIBLE)
        except Project.DoesNotExist:
            return Response(status=404)

        # Check access to the project as this endpoint doesn't use membership checks done
        # get_filter_params().
        if not request.access.has_project_access(project):
            return Response(status=404)

        # We return the requested event if we find a match regardless of whether
        # it occurred within the range specified
        with self.handle_query_errors():
            event = eventstore.get_event_by_id(project.id, event_id)

        if event is None:
            return Response({"detail": "Event not found"}, status=404)

        data = serialize(event)
        data["projectSlug"] = project_slug

        return Response(data)
Ejemplo n.º 19
0
    def get(self, request, project, event_id):
        """
        Retrieve suggested owners information for an event
        ``````````````````````````````````````````````````

        :pparam string project_slug: the slug of the project the event
                                     belongs to.
        :pparam string event_id: the id of the event.
        :auth: required
        """
        event = eventstore.get_event_by_id(project.id, event_id)
        if event is None:
            return Response({"detail": "Event not found"}, status=404)

        # populate event data
        Event.objects.bind_nodes([event], "data")

        owners, rules = ProjectOwnership.get_owners(project.id, event.data)

        # For sake of the API, we don't differentiate between
        # the implicit "everyone" and no owners
        if owners == ProjectOwnership.Everyone:
            owners = []

        return Response({
            "owners":
            serialize(Actor.resolve_many(owners), request.user,
                      ActorSerializer()),
            # TODO(mattrobenolt): We need to change the API here to return
            # all rules, just keeping this way currently for API compat
            "rule":
            rules[0].matcher if rules else None,
            "rules":
            rules or [],
        })
Ejemplo n.º 20
0
def backfill_group_ids(model):
    query = model.objects.filter(group_id__isnull=True)

    for attachment in RangeQuerySetWrapper(query, step=1000):
        event = eventstore.get_event_by_id(attachment.project_id, attachment.event_id)
        if event:
            model.objects.filter(id=attachment.id).update(group_id=event.group_id)
Ejemplo n.º 21
0
    def get(self, request, project, event_id):
        """
        Retrieve attachments for an event
        `````````````````````````````````

        :pparam string organization_slug: the slug of the organization the
                                          issues belong to.
        :pparam string project_slug: the slug of the project the event
                                     belongs to.
        :pparam string event_id: the id of the event.
        :auth: required
        """
        if not features.has('organizations:event-attachments',
                            project.organization,
                            actor=request.user):
            return self.respond(status=404)

        event = eventstore.get_event_by_id(project.id, event_id)
        if event is None:
            return self.respond({'detail': 'Event not found'}, status=404)

        queryset = EventAttachment.objects.filter(
            project_id=project.id,
            event_id=event.event_id,
        ).select_related('file')

        return self.paginate(
            request=request,
            queryset=queryset,
            order_by='name',
            on_results=lambda x: serialize(x, request.user),
            paginator_cls=OffsetPaginator,
        )
Ejemplo n.º 22
0
def unfurl_issues(
    request: HttpRequest,
    integration: Integration,
    links: List[UnfurlableUrl],
    user: Optional["User"] = None,
) -> UnfurledUrl:
    """
    Returns a map of the attachments used in the response we send to Slack
    for a particular issue by the URL of the yet-unfurled links a user included
    in their Slack message.
    """
    group_by_id = {
        g.id: g
        for g in Group.objects.filter(
            id__in={link.args["issue_id"] for link in links},
            project__in=Project.objects.filter(organization__in=integration.organizations.all()),
        )
    }
    if not group_by_id:
        return {}

    out = {}
    for link in links:
        issue_id = link.args["issue_id"]

        if issue_id in group_by_id:
            group = group_by_id[issue_id]
            # lookup the event by the id
            event_id = link.args["event_id"]
            event = eventstore.get_event_by_id(group.project_id, event_id) if event_id else None
            out[link.url] = build_group_attachment(
                group_by_id[issue_id], event=event, link_to_event=True
            )
    return out
Ejemplo n.º 23
0
def unfurl_issues(integration, url_by_issue_id, event_id_by_url=None):
    """
    Returns a map of the attachments used in the response we send to Slack
    for a particular issue by the URL of the yet-unfurled links a user included
    in their Slack message.

    url_by_issue_id: a map with URL as the value and the issue ID as the key
    event_id_by_url: a map with the event ID in a URL as the value and the URL as the key
    """
    group_by_id = {
        g.id: g
        for g in Group.objects.filter(
            id__in=set(url_by_issue_id.keys()),
            project__in=Project.objects.filter(
                organization__in=integration.organizations.all()),
        )
    }
    if not group_by_id:
        return {}

    out = {}
    for issue_id, url in url_by_issue_id.items():
        if issue_id in group_by_id:
            group = group_by_id[issue_id]
            # lookup the event by the id
            event_id = event_id_by_url.get(url)
            event = eventstore.get_event_by_id(group.project_id,
                                               event_id) if event_id else None
            out[url] = build_group_attachment(group_by_id[issue_id],
                                              event=event,
                                              link_to_event=True)
    return out
    def get(self, request: Request, project, event_id,
            attachment_id) -> Response:
        """
        Retrieve an Attachment
        ``````````````````````

        :pparam string organization_slug: the slug of the organization the
                                          issues belong to.
        :pparam string project_slug: the slug of the project the event
                                     belongs to.
        :pparam string event_id: the id of the event.
        :pparam string attachment_id: the id of the attachment.
        :auth: required
        """
        if not features.has("organizations:event-attachments",
                            project.organization,
                            actor=request.user):
            return self.respond(status=404)

        event = eventstore.get_event_by_id(project.id, event_id)
        if event is None:
            return self.respond({"detail": "Event not found"}, status=404)

        try:
            attachment = EventAttachment.objects.filter(
                project_id=project.id,
                event_id=event.event_id,
                id=attachment_id).get()
        except EventAttachment.DoesNotExist:
            return self.respond({"detail": "Attachment not found"}, status=404)

        if request.GET.get("download") is not None:
            return self.download(attachment)

        return self.respond(serialize(attachment, request.user))
Ejemplo n.º 25
0
    def get(self, request, project, event_id):
        """
        Retrieve an Event for a Project
        ```````````````````````````````

        Return details on an individual event.

        :pparam string organization_slug: the slug of the organization the
                                          event belongs to.
        :pparam string project_slug: the slug of the project the event
                                     belongs to.
        :pparam string event_id: the id of the event to retrieve (either the
                                 numeric primary-key or the hexadecimal id as
                                 reported by the raven client)
        :auth: required
        """

        event = eventstore.get_event_by_id(project.id, event_id)

        if event is None:
            return Response({"detail": "Event not found"}, status=404)

        data = serialize(event, request.user, DetailedEventSerializer())

        # Used for paginating through events of a single issue in group details
        # Skip next/prev for issueless events
        next_event_id = None
        prev_event_id = None

        if event.group_id:
            requested_environments = set(request.GET.getlist("environment"))
            conditions = [["event.type", "!=", "transaction"]]

            if requested_environments:
                conditions.append(
                    ["environment", "IN", requested_environments])

            _filter = eventstore.Filter(conditions=conditions,
                                        project_ids=[event.project_id],
                                        group_ids=[event.group_id])

            # Ignore any time params and search entire retention period
            next_event_filter = deepcopy(_filter)
            next_event_filter.end = datetime.utcnow()
            next_event = eventstore.get_next_event_id(event,
                                                      filter=next_event_filter)

            prev_event_filter = deepcopy(_filter)
            prev_event_filter.start = datetime.utcfromtimestamp(0)
            prev_event = eventstore.get_prev_event_id(event,
                                                      filter=prev_event_filter)

            next_event_id = next_event[1] if next_event else None
            prev_event_id = prev_event[1] if prev_event else None

        data["nextEventID"] = next_event_id
        data["previousEventID"] = prev_event_id

        return Response(data)
Ejemplo n.º 26
0
    def get(self, request, organization, project_slug, event_id):
        if not features.has("organizations:discover-basic",
                            organization,
                            actor=request.user):
            return Response(status=404)

        try:
            params = self.get_filter_params(request, organization)
        except NoProjects:
            return Response(status=404)

        try:
            project = Project.objects.get(slug=project_slug,
                                          organization_id=organization.id,
                                          status=ProjectStatus.VISIBLE)
        except Project.DoesNotExist:
            return Response(status=404)
        # Check access to the project as this endpoint doesn't use membership checks done
        # get_filter_params().
        if not request.access.has_project_access(project):
            return Response(status=404)

        # We return the requested event if we find a match regardless of whether
        # it occurred within the range specified
        event = eventstore.get_event_by_id(project.id, event_id)

        if event is None:
            return Response({"detail": "Event not found"}, status=404)

        reference = None
        fields = [
            field for field in request.query_params.getlist("field")
            if not field.strip() == ""
        ]
        if fields:
            event_slug = u"{}:{}".format(project.slug, event_id)
            reference = discover.ReferenceEvent(organization, event_slug,
                                                fields, event.datetime,
                                                event.datetime)
        try:
            pagination = discover.get_pagination_ids(
                event=event,
                query=request.query_params.get("query"),
                params=params,
                organization=organization,
                reference_event=reference,
                referrer="api.organization-event-details",
            )
        except discover.InvalidSearchQuery as err:
            raise ParseError(detail=six.text_type(err))

        data = serialize(event)
        data["nextEventID"] = pagination.next
        data["previousEventID"] = pagination.previous
        data["oldestEventID"] = pagination.oldest
        data["latestEventID"] = pagination.latest
        data["projectSlug"] = project_slug

        return Response(data)
Ejemplo n.º 27
0
    def post_and_retrieve_event(self, data):
        resp = self._postWithHeader(data)
        assert resp.status_code == 200
        event_id = json.loads(resp.content)["id"]

        event = eventstore.get_event_by_id(self.project.id, event_id)
        assert event is not None
        return event
Ejemplo n.º 28
0
    def serialize(self,
                  parent_map,
                  root,
                  warning_extra,
                  params,
                  snuba_event=None,
                  event_id=None):
        """ For the full event trace, we return the results as a graph instead of a flattened list """
        parent_events = {}
        result = parent_events[root["id"]] = self.serialize_event(
            root, None, 0, True)

        with sentry_sdk.start_span(op="building.trace",
                                   description="full trace"):
            to_check = deque([root])
            iteration = 0
            while to_check:
                current_event = to_check.popleft()

                # This is faster than doing a call to get_events, since get_event_by_id only makes a call to snuba
                # when non transaction events are included.
                with sentry_sdk.start_span(op="nodestore",
                                           description="get_event_by_id"):
                    event = eventstore.get_event_by_id(
                        current_event["project.id"], current_event["id"])

                previous_event = parent_events[current_event["id"]]
                previous_event.update({
                    event_key: event.data.get(event_key)
                    for event_key in NODESTORE_KEYS
                })

                for child in event.data.get("spans", []):
                    if child["span_id"] not in parent_map:
                        continue
                    # Avoid potential span loops by popping, so we don't traverse the same nodes twice
                    child_events = parent_map.pop(child["span_id"])

                    for child_event in child_events:
                        parent_events[
                            child_event["id"]] = self.serialize_event(
                                child_event, current_event["id"],
                                previous_event["generation"] + 1)
                        # Add this event to its parent's children
                        previous_event["children"].append(
                            parent_events[child_event["id"]])

                        to_check.append(child_event)
                # Limit iterations just to be safe
                iteration += 1
                if iteration > MAX_TRACE_SIZE:
                    logger.warning(
                        "discover.trace-view.surpassed-trace-limit",
                        extra=warning_extra,
                    )
                    break

        return result
Ejemplo n.º 29
0
    def get(self, request, project, event_id):
        """
        Retrieve an Event for a Project
        ```````````````````````````````

        Return details on an individual event.

        :pparam string organization_slug: the slug of the organization the
                                          event belongs to.
        :pparam string project_slug: the slug of the project the event
                                     belongs to.
        :pparam string event_id: the id of the event to retrieve (either the
                                 numeric primary-key or the hexadecimal id as
                                 reported by the raven client)
        :auth: required
        """

        event = eventstore.get_event_by_id(project.id, event_id)

        if event is None:
            return Response({"detail": "Event not found"}, status=404)

        data = serialize(event, request.user, DetailedEventSerializer())

        # Used for paginating through events of a single issue in group details
        # Skip next/prev for issueless events
        next_event_id = None
        prev_event_id = None

        if event.group_id:
            requested_environments = set(request.GET.getlist("environment"))
            conditions = []

            if requested_environments:
                conditions.append(
                    ["environment", "IN", requested_environments])

            filter_keys = {
                "project_id": [event.project_id],
                "issue": [event.group_id]
            }

            next_event = eventstore.get_next_event_id(event,
                                                      conditions=conditions,
                                                      filter_keys=filter_keys)

            prev_event = eventstore.get_prev_event_id(event,
                                                      conditions=conditions,
                                                      filter_keys=filter_keys)

            next_event_id = next_event[1] if next_event else None
            prev_event_id = prev_event[1] if prev_event else None

        data["nextEventID"] = next_event_id
        data["previousEventID"] = prev_event_id

        return Response(data)
Ejemplo n.º 30
0
    def test_merge_with_event_integrity(self):
        project = self.create_project()
        event1 = self.store_event(
            data={
                "event_id": "a" * 32,
                "timestamp": iso_format(before_now(seconds=1)),
                "fingerprint": ["group-1"],
                "extra": {
                    "foo": "bar"
                },
            },
            project_id=project.id,
        )
        group1 = event1.group
        event2 = self.store_event(
            data={
                "event_id": "b" * 32,
                "timestamp": iso_format(before_now(seconds=1)),
                "fingerprint": ["group-2"],
                "extra": {
                    "foo": "baz"
                },
            },
            project_id=project.id,
        )
        group2 = event2.group

        with self.tasks():
            eventstream_state = eventstream.start_merge(
                project.id, [group1.id], group2.id)
            merge_groups([group1.id], group2.id)
            eventstream.end_merge(eventstream_state)

        assert not Group.objects.filter(id=group1.id).exists()

        event1 = eventstore.get_event_by_id(project.id, event1.event_id)
        assert event1.group_id == group2.id
        Event.objects.bind_nodes([event1], "data")
        assert event1.data["extra"]["foo"] == "bar"

        event2 = eventstore.get_event_by_id(project.id, event2.event_id)
        assert event2.group_id == group2.id
        Event.objects.bind_nodes([event2], "data")
        assert event2.data["extra"]["foo"] == "baz"