Beispiel #1
0
    def extract_common(
        self,
        output: MutableMapping[str, Any],
        event: InsertEvent,
        metadata: KafkaMessageMetadata,
    ) -> None:
        # Properties we get from the top level of the message payload
        output["platform"] = _unicodify(event["platform"])

        # Properties we get from the "data" dict, which is the actual event body.
        data = event.get("data", {})
        received = _collapse_uint32(int(data["received"]))
        output["received"] = (datetime.utcfromtimestamp(received)
                              if received is not None else None)
        output["version"] = _unicodify(data.get("version", None))
        output["location"] = _unicodify(data.get("location", None))

        module_names = []
        module_versions = []
        modules = data.get("modules", {})
        if isinstance(modules, dict):
            for name, version in modules.items():
                module_names.append(_unicodify(name))
                # Being extra careful about a stray (incorrect by spec) `null`
                # value blowing up the write.
                module_versions.append(_unicodify(version) or "")

        output["modules.name"] = module_names
        output["modules.version"] = module_versions
Beispiel #2
0
def extract_http(output: MutableMapping[str, Any],
                 request: Mapping[str, Any]) -> None:
    http_headers: Mapping[str,
                          Any] = _as_dict_safe(request.get("headers", None))
    output["http_method"] = _unicodify(request.get("method", None))
    output["http_referer"] = _unicodify(http_headers.get("Referer", None))
    output["http_url"] = _unicodify(request.get("url", None))
Beispiel #3
0
    def extract_custom(
        self,
        output: MutableMapping[str, Any],
        event: InsertEvent,
        metadata: KafkaMessageMetadata,
    ) -> None:
        data = event.get("data", {})

        output["message"] = _unicodify(event["message"])

        # USER REQUEST GEO
        user = data.get("user", data.get("sentry.interfaces.User", None)) or {}
        extract_user(output, user)

        geo = user.get("geo", None) or {}
        self.extract_geo(output, geo)

        request = data.get("request", data.get("sentry.interfaces.Http",
                                               None)) or {}
        http_data: MutableMapping[str, Any] = {}
        extract_http(http_data, request)
        output["http_method"] = http_data["http_method"]
        output["http_referer"] = http_data["http_referer"]

        output["primary_hash"] = _hashify(event["primary_hash"])
        output["hierarchical_hashes"] = list(
            _hashify(x) for x in data.get("hierarchical_hashes") or ())

        output["culprit"] = _unicodify(data.get("culprit", None))
        output["type"] = _unicodify(data.get("type", None))
        output["title"] = _unicodify(data.get("title", None))
Beispiel #4
0
def extract_user(output: MutableMapping[str, Any], user: Mapping[str,
                                                                 Any]) -> None:
    output["user_id"] = _unicodify(user.get("id", None))
    output["username"] = _unicodify(user.get("username", None))
    output["email"] = _unicodify(user.get("email", None))
    ip_addr = _ensure_valid_ip(user.get("ip_address", None))
    output["ip_address"] = str(ip_addr) if ip_addr is not None else None
Beispiel #5
0
    def extract_sdk(self, output, sdk):
        output['sdk_name'] = _unicodify(sdk.get('name', None))
        output['sdk_version'] = _unicodify(sdk.get('version', None))

        sdk_integrations = []
        for i in sdk.get('integrations', None) or ():
            i = _unicodify(i)
            if i:
                sdk_integrations.append(i)
        output['sdk_integrations'] = sdk_integrations
Beispiel #6
0
def extract_extra_tags(tags) -> Tuple[Sequence[str], Sequence[str]]:
    tag_keys = []
    tag_values = []
    for tag_key, tag_value in sorted(tags.items()):
        value = _unicodify(tag_value)
        if value:
            tag_keys.append(_unicodify(tag_key))
            tag_values.append(value)

    return (tag_keys, tag_values)
Beispiel #7
0
    def process_message(self, message, metadata) -> Optional[ProcessedMessage]:
        if not (isinstance(message, (list, tuple)) and len(message) >= 2):
            return None
        version = message[0]
        if version not in (0, 1, 2):
            return None
        type_, event = message[1:3]
        if type_ != "insert":
            return None

        data = event["data"]
        event_type = data.get("type")
        if event_type != "transaction":
            return None

        ret: List[MutableMapping[str, Any]] = []

        # Add the transaction span
        transaction_ctx = data["contexts"].get("trace")
        if not transaction_ctx:
            metrics.increment("missing_trace_ctx")
            return None

        # Add the transaction root span
        processed = self.__init_span(event)
        processed["span_id"] = int(transaction_ctx["span_id"], 16)
        processed["transaction_name"] = _unicodify(
            data.get("transaction") or "")
        processed["parent_span_id"] = (int(transaction_ctx["parent_span_id"],
                                           16) if "parent_span_id"
                                       in transaction_ctx else None)
        processed["description"] = _unicodify(data.get("transaction") or "")
        processed["op"] = _unicodify(transaction_ctx.get("op") or "")
        status = transaction_ctx.get("status", None)
        self.__fill_status(processed, status)
        self.__fill_common(processed, event["data"])
        ret.append(processed)

        spans = data.get("spans", [])
        for span in spans:
            processed = self.__init_span(event)
            processed["span_id"] = int(span["span_id"], 16)
            processed["parent_span_id"] = int(span["parent_span_id"], 16)
            processed["description"] = span.get("description", "") or ""
            processed["op"] = span["op"]

            status = span.get("status", None)
            self.__fill_status(processed, status)
            self.__fill_common(processed, span)
            ret.append(processed)

        if ret:
            return InsertBatch(ret)
        else:
            return None
Beispiel #8
0
    def extract_sdk(self, output: MutableMapping[str, Any],
                    sdk: Mapping[str, Any]) -> None:
        output["sdk_name"] = _unicodify(sdk.get("name", None))
        output["sdk_version"] = _unicodify(sdk.get("version", None))

        sdk_integrations = []
        for i in sdk.get("integrations", None) or ():
            i = _unicodify(i)
            if i:
                sdk_integrations.append(i)
        output["sdk_integrations"] = sdk_integrations
Beispiel #9
0
def extract_extra_tags(output, tags):
    tag_keys = []
    tag_values = []
    for tag_key, tag_value in sorted(tags.items()):
        value = _unicodify(tag_value)
        if value:
            tag_keys.append(_unicodify(tag_key))
            tag_values.append(value)

    output['tags.key'] = tag_keys
    output['tags.value'] = tag_values
    def _process_sdk_data(
        self,
        processed: MutableMapping[str, Any],
        event_dict: EventDict,
    ) -> None:
        sdk = event_dict["data"].get("sdk", None) or {}
        processed["sdk_name"] = _unicodify(sdk.get("name") or "")
        processed["sdk_version"] = _unicodify(sdk.get("version") or "")

        if processed["sdk_name"] == "":
            metrics.increment("missing_sdk_name")
        if processed["sdk_version"] == "":
            metrics.increment("missing_sdk_version")
Beispiel #11
0
    def extract_custom(
        self,
        output: MutableMapping[str, Any],
        event: InsertEvent,
        metadata: KafkaMessageMetadata,
    ) -> None:
        data = event.get("data", {})
        user_dict = data.get("user", data.get("sentry.interfaces.User",
                                              None)) or {}

        user_data: MutableMapping[str, Any] = {}
        extract_user(user_data, user_dict)
        output["user_name"] = user_data["username"]
        output["user_id"] = user_data["user_id"]
        output["user_email"] = user_data["email"]

        ip_address = _ensure_valid_ip(user_data["ip_address"])
        if ip_address:
            if ip_address.version == 4:
                output["ip_address_v4"] = str(ip_address)
            elif ip_address.version == 6:
                output["ip_address_v6"] = str(ip_address)

        contexts: MutableMapping[str, Any] = _as_dict_safe(
            data.get("contexts", None))
        geo = user_dict.get("geo", {})
        if "geo" not in contexts and isinstance(geo, dict):
            contexts["geo"] = geo

        request = data.get("request", data.get("sentry.interfaces.Http",
                                               None)) or {}
        http_data: MutableMapping[str, Any] = {}
        extract_http(http_data, request)
        output["http_method"] = http_data["http_method"]
        output["http_referer"] = http_data["http_referer"]

        # _as_dict_safe may not return a reference to the entry in the data
        # dictionary in some cases.
        data["contexts"] = contexts

        output["message"] = _unicodify(event["message"])

        output["primary_hash"] = str(uuid.UUID(_hashify(
            event["primary_hash"])))
        output["hierarchical_hashes"] = list(
            str(uuid.UUID(_hashify(x)))
            for x in data.get("hierarchical_hashes") or ())

        output["culprit"] = _unicodify(data.get("culprit", ""))
        output["type"] = _unicodify(data.get("type", ""))
        output["title"] = _unicodify(data.get("title", ""))
Beispiel #12
0
    def process_message(
        self, value: Mapping[str, Any], metadata: KafkaMessageMetadata
    ) -> Optional[ProcessedMessage]:
        assert isinstance(value, dict)

        # Only record outcomes from traditional error tracking events, which
        # excludes transactions, attachments and sessions. Once TSDB defines
        # models for these, we can start recording again.
        category = value.get("category")
        if category is not None and category not in DataCategory.error_categories():
            return None

        v_uuid = value.get("event_id")
        message = {
            "org_id": value.get("org_id", 0),
            "project_id": value.get("project_id", 0),
            "key_id": value.get("key_id"),
            "timestamp": _ensure_valid_date(
                datetime.strptime(value["timestamp"], settings.PAYLOAD_DATETIME_FORMAT),
            ),
            "outcome": value["outcome"],
            "reason": _unicodify(value.get("reason")),
            "event_id": str(uuid.UUID(v_uuid)) if v_uuid is not None else None,
        }

        return InsertBatch([message], None)
Beispiel #13
0
def extract_extra_contexts(contexts) -> Tuple[Sequence[str], Sequence[str]]:
    context_keys = []
    context_values = []
    valid_types = (int, float, str)
    for ctx_name, ctx_obj in contexts.items():
        if isinstance(ctx_obj, dict):
            ctx_obj.pop("type", None)  # ignore type alias
            for inner_ctx_name, ctx_value in ctx_obj.items():
                if isinstance(ctx_value, valid_types):
                    value = _unicodify(ctx_value)
                    if value:
                        ctx_key = f"{ctx_name}.{inner_ctx_name}"
                        context_keys.append(_unicodify(ctx_key))
                        context_values.append(_unicodify(ctx_value))

    return (context_keys, context_values)
Beispiel #14
0
    def process_message(self,
                        value,
                        metadata=None) -> Optional[ProcessedMessage]:
        assert isinstance(value, dict)
        v_uuid = value.get("event_id")
        message = {
            "org_id":
            value.get("org_id", 0),
            "project_id":
            value.get("project_id", 0),
            "key_id":
            value.get("key_id"),
            "timestamp":
            _ensure_valid_date(
                datetime.strptime(value["timestamp"],
                                  settings.PAYLOAD_DATETIME_FORMAT), ),
            "outcome":
            value["outcome"],
            "reason":
            _unicodify(value.get("reason")),
            "event_id":
            str(uuid.UUID(v_uuid)) if v_uuid is not None else None,
        }

        return ProcessedMessage(
            action=ProcessorAction.INSERT,
            data=[message],
        )
Beispiel #15
0
    def __init_span(self, event: Mapping[str,
                                         Any]) -> MutableMapping[str, Any]:
        """
        Initializes the fields that are the same for all spans within a transaction.
        """
        data = event["data"]
        transaction_ctx = data["contexts"]["trace"]

        return {
            "deleted":
            0,
            "project_id":
            event["project_id"],
            "transaction_id":
            str(uuid.UUID(event["event_id"])),
            "retention_days":
            enforce_retention(event,
                              datetime.fromtimestamp(data["timestamp"])),
            "transaction_span_id":
            int(transaction_ctx["span_id"], 16),
            "trace_id":
            str(uuid.UUID(transaction_ctx["trace_id"])),
            "transaction_name":
            _unicodify(data.get("transaction") or ""),
        }
Beispiel #16
0
def extract_extra_contexts(output, contexts):
    context_keys = []
    context_values = []
    valid_types = (int, float, str)
    for ctx_name, ctx_obj in contexts.items():
        if isinstance(ctx_obj, dict):
            ctx_obj.pop('type', None)  # ignore type alias
            for inner_ctx_name, ctx_value in ctx_obj.items():
                if isinstance(ctx_value, valid_types):
                    value = _unicodify(ctx_value)
                    if value:
                        context_keys.append("%s.%s" %
                                            (ctx_name, inner_ctx_name))
                        context_values.append(_unicodify(ctx_value))

    output['contexts.key'] = context_keys
    output['contexts.value'] = context_values
Beispiel #17
0
 def extract_promoted_tags(
     self, output: MutableMapping[str, Any], tags: Mapping[str, Any],
 ) -> None:
     output.update(
         {
             col_name: _unicodify(tags.get(tag_name, None))
             for tag_name, col_name in self._promoted_tag_columns.items()
         }
     )
Beispiel #18
0
 def extract_promoted_tags(
     self,
     output: MutableMapping[str, Any],
     tags: Mapping[str, Any],
 ) -> None:
     output.update({
         col.name: _unicodify(tags.get(col.name, None))
         for col in self._promoted_tag_columns
     })
Beispiel #19
0
    def extract_custom(
        self,
        output: MutableMapping[str, Any],
        event: Mapping[str, Any],
        metadata: Optional[KafkaMessageMetadata] = None,
    ) -> None:
        data = event.get("data", {})
        user_dict = data.get("user", data.get("sentry.interfaces.User",
                                              None)) or {}

        user_data: MutableMapping[str, Any] = {}
        extract_user(user_data, user_dict)
        output["user_name"] = user_data["username"]
        output["user_id"] = user_data["user_id"]
        output["user_email"] = user_data["email"]

        ip_address = _ensure_valid_ip(user_data["ip_address"])
        if ip_address:
            if ip_address.version == 4:
                output["ip_address_v4"] = str(ip_address)
            elif ip_address.version == 6:
                output["ip_address_v6"] = str(ip_address)

        contexts = _as_dict_safe(data.get("contexts", None))
        geo = user_dict.get("geo", {})
        if "geo" not in contexts and isinstance(geo, dict):
            contexts["geo"] = geo

        request = data.get("request", data.get("sentry.interfaces.Http",
                                               None)) or {}
        if "request" not in contexts and isinstance(request, dict):
            http = {}
            http["http_method"] = _unicodify(request.get("method", None))
            http_headers = _as_dict_safe(request.get("headers", None))
            http["http_referer"] = _unicodify(http_headers.get(
                "Referer", None))
            contexts["request"] = http

        # _as_dict_safe may not return a reference to the entry in the data
        # dictionary in some cases.
        data["contexts"] = contexts

        output["message"] = _unicodify(event["message"])
        output["org_id"] = event["organization_id"]
Beispiel #20
0
    def extract_custom(
        self,
        output: MutableMapping[str, Any],
        event: Mapping[str, Any],
        metadata: Optional[KafkaMessageMetadata] = None,
    ) -> None:
        data = event.get("data", {})
        # The following concerns the change to message/search_message
        # There are 2 Scenarios:
        #   Pre-rename:
        #        - Payload contains:
        #             "message": "a long search message"
        #        - Does NOT contain a `search_message` property
        #        - "message" value saved in `message` column
        #        - `search_message` column nonexistent or Null
        #   Post-rename:
        #        - Payload contains:
        #             "search_message": "a long search message"
        #        - Optionally the payload's "data" dict (event body) contains:
        #             "message": "short message"
        #        - "search_message" value stored in `search_message` column
        #        - "message" value stored in `message` column
        #
        output["search_message"] = _unicodify(event.get(
            "search_message", None))
        if output["search_message"] is None:
            # Pre-rename scenario, we expect to find "message" at the top level
            output["message"] = _unicodify(event["message"])
        else:
            # Post-rename scenario, we check in case we have the optional
            # "message" in the event body.
            output["message"] = _unicodify(data.get("message", None))

        # USER REQUEST GEO
        user = data.get("user", data.get("sentry.interfaces.User", None)) or {}
        extract_user(output, user)

        geo = user.get("geo", None) or {}
        self.extract_geo(output, geo)

        http = data.get("request", data.get("sentry.interfaces.Http",
                                            None)) or {}
        self.extract_http(output, http)
    def _process_base_event_values(
            self, processed: MutableMapping[str, Any],
            event_dict: EventDict) -> MutableMapping[str, Any]:

        extract_base(processed, event_dict)

        transaction_ctx = event_dict["data"]["contexts"]["trace"]
        trace_id = transaction_ctx["trace_id"]
        processed["event_id"] = str(uuid.UUID(processed["event_id"]))
        processed["trace_id"] = str(uuid.UUID(trace_id))
        processed["span_id"] = int(transaction_ctx["span_id"], 16)
        processed["transaction_op"] = _unicodify(
            transaction_ctx.get("op") or "")
        processed["transaction_name"] = _unicodify(
            event_dict["data"].get("transaction") or "")
        processed["start_ts"], processed[
            "start_ms"] = self.__extract_timestamp(
                event_dict["data"]["start_timestamp"], )
        status = transaction_ctx.get("status", None)
        if status:
            int_status = SPAN_STATUS_NAME_TO_CODE.get(status,
                                                      UNKNOWN_SPAN_STATUS)
        else:
            int_status = UNKNOWN_SPAN_STATUS

        processed["transaction_status"] = int_status
        if event_dict["data"]["timestamp"] - event_dict["data"][
                "start_timestamp"] < 0:
            # Seems we have some negative durations in the DB
            metrics.increment("negative_duration")

        processed["finish_ts"], processed[
            "finish_ms"] = self.__extract_timestamp(
                event_dict["data"]["timestamp"], )

        duration_secs = (processed["finish_ts"] -
                         processed["start_ts"]).total_seconds()
        processed["duration"] = max(int(duration_secs * 1000), 0)

        processed["platform"] = _unicodify(event_dict["platform"])
        return processed
Beispiel #22
0
def extract_nested(
    nested_col: Mapping[str, Any],
    val_processor: Callable[[Any],
                            TVal]) -> Tuple[Sequence[str], Sequence[TVal]]:
    keys = []
    values = []
    for key, value in sorted(nested_col.items()):
        value = val_processor(value)
        if value is not None:
            keys.append(_unicodify(key))
            values.append(value)

    return (keys, values)
Beispiel #23
0
def extract_nested(
    nested_col: Mapping[str, Any], val_processor: Callable[[Any], Optional[TVal]]
) -> Tuple[Sequence[str], Sequence[TVal]]:
    keys = []
    values = []
    for key, value in sorted(nested_col.items()):
        value = val_processor(value)
        if value is not None:
            unicode_key = _unicodify(key)
            assert isinstance(unicode_key, str)
            keys.append(unicode_key)
            values.append(value)

    return (keys, values)
Beispiel #24
0
    def process_message(
            self, value: Mapping[str, Any],
            metadata: KafkaMessageMetadata) -> Optional[ProcessedMessage]:
        assert isinstance(value, dict)
        v_uuid = value.get("event_id")
        reason = value.get("reason")

        # relays let arbitrary outcome reasons through do the topic.  We
        # reject undesired values only in the processor so that we can
        # add new ones without having to update relays through the entire
        # chain.
        if value["outcome"] == OUTCOME_CLIENT_DISCARD:
            if reason is not None and reason not in CLIENT_DISCARD_REASONS:
                reason = None

        if (value["outcome"] != OUTCOME_ABUSE
            ):  # we dont care about abuse outcomes for these metrics
            if "category" not in value:
                metrics.increment("missing_category")
            if "quantity" not in value:
                metrics.increment("missing_quantity")

        message = None
        try:
            timestamp = _ensure_valid_date(
                datetime.strptime(value["timestamp"],
                                  settings.PAYLOAD_DATETIME_FORMAT), )
        except Exception:
            metrics.increment("bad_outcome_timestamp")
            timestamp = _ensure_valid_date(datetime.utcnow())

        try:
            message = {
                "org_id": value.get("org_id", 0),
                "project_id": value.get("project_id", 0),
                "key_id": value.get("key_id"),
                "timestamp": timestamp,
                "outcome": value["outcome"],
                "category": value.get("category", DataCategory.ERROR),
                "quantity": value.get("quantity", 1),
                "reason": _unicodify(reason),
                "event_id":
                str(uuid.UUID(v_uuid)) if v_uuid is not None else None,
            }
        except Exception:
            metrics.increment("bad_outcome")
            return None

        return InsertBatch([message], None)
Beispiel #25
0
    def _process_base_event_values(
            self, processed: MutableMapping[str, Any],
            event_dict: EventDict) -> MutableMapping[str, Any]:

        processed["replay_id"] = str(uuid.UUID(event_dict["replay_id"]))
        processed["project_id"] = event_dict["project_id"]

        processed["sequence_id"] = event_dict["sequence_id"]
        processed["trace_ids"] = event_dict["trace_ids"]

        processed["timestamp"] = self.__extract_timestamp(
            event_dict["data"]["timestamp"], )

        processed["platform"] = _unicodify(event_dict["platform"])
        return processed
    def _process_tags(
        self,
        processed: MutableMapping[str, Any],
        event_dict: EventDict,
    ) -> None:

        tags: Mapping[str, Any] = _as_dict_safe(event_dict["data"].get(
            "tags", None))
        processed["tags.key"], processed["tags.value"] = extract_extra_tags(
            tags)
        promoted_tags = {
            col: tags[col]
            for col in self.PROMOTED_TAGS if col in tags
        }
        processed["release"] = promoted_tags.get(
            "sentry:release",
            event_dict.get("release"),
        )
        processed["environment"] = promoted_tags.get("environment")
        processed["user"] = promoted_tags.get("sentry:user", "")
        processed["dist"] = _unicodify(
            promoted_tags.get("sentry:dist", event_dict["data"].get("dist")), )
Beispiel #27
0
    def extract_custom(
        self,
        output: MutableMapping[str, Any],
        event: InsertEvent,
        metadata: KafkaMessageMetadata,
    ) -> None:
        data = event.get("data", {})

        output["message"] = _unicodify(event["message"])

        # USER REQUEST GEO
        user = data.get("user", data.get("sentry.interfaces.User", None)) or {}
        extract_user(output, user)

        geo = user.get("geo", None) or {}
        self.extract_geo(output, geo)

        request = data.get("request", data.get("sentry.interfaces.Http",
                                               None)) or {}
        http_data: MutableMapping[str, Any] = {}
        extract_http(http_data, request)
        output["http_method"] = http_data["http_method"]
        output["http_referer"] = http_data["http_referer"]
Beispiel #28
0
    def process_message(self, value, metadata):
        assert isinstance(value, dict)
        v_uuid = value.get('event_id')
        message = {
            'org_id':
            value.get('org_id', 0),
            'project_id':
            value.get('project_id', 0),
            'key_id':
            value.get('key_id'),
            'timestamp':
            _ensure_valid_date(
                datetime.strptime(value['timestamp'],
                                  settings.PAYLOAD_DATETIME_FORMAT), ),
            'outcome':
            value['outcome'],
            'reason':
            _unicodify(value.get('reason')),
            'event_id':
            str(uuid.UUID(v_uuid)) if v_uuid is not None else None,
        }

        return (self.INSERT, message)
Beispiel #29
0
    def process_message(
            self, value: Mapping[str, Any],
            metadata: KafkaMessageMetadata) -> Optional[ProcessedMessage]:
        assert isinstance(value, dict)
        v_uuid = value.get("event_id")

        if value["outcome"] != 4:  # we dont care about abuse outcomes for these metrics
            if "category" not in value:
                metrics.increment("missing_category")
            if "quantity" not in value:
                metrics.increment("missing_quantity")

        message = {
            "org_id":
            value.get("org_id", 0),
            "project_id":
            value.get("project_id", 0),
            "key_id":
            value.get("key_id"),
            "timestamp":
            _ensure_valid_date(
                datetime.strptime(value["timestamp"],
                                  settings.PAYLOAD_DATETIME_FORMAT), ),
            "outcome":
            value["outcome"],
            "category":
            value.get("category", DataCategory.ERROR),
            "quantity":
            value.get("quantity", 1),
            "reason":
            _unicodify(value.get("reason")),
            "event_id":
            str(uuid.UUID(v_uuid)) if v_uuid is not None else None,
        }

        return InsertBatch([message], None)
    def process_message(self, message, metadata) -> Optional[ProcessedMessage]:
        processed = {"deleted": 0}
        if not (isinstance(message, (list, tuple)) and len(message) >= 2):
            return None
        version = message[0]
        if version not in (0, 1, 2):
            return None
        type_, event = message[1:3]
        if type_ != "insert":
            return None

        data = event["data"]
        event_type = data.get("type")
        if event_type != "transaction":
            return None
        extract_base(processed, event)
        processed["retention_days"] = enforce_retention(
            event,
            datetime.fromtimestamp(data["timestamp"]),
        )
        if not data.get("contexts", {}).get("trace"):
            return None

        transaction_ctx = data["contexts"]["trace"]
        trace_id = transaction_ctx["trace_id"]
        try:
            processed["event_id"] = str(uuid.UUID(processed["event_id"]))
            processed["trace_id"] = str(uuid.UUID(trace_id))
            processed["span_id"] = int(transaction_ctx["span_id"], 16)
            processed["transaction_op"] = _unicodify(
                transaction_ctx.get("op") or "")
            processed["transaction_name"] = _unicodify(
                data.get("transaction") or "")
            processed[
                "start_ts"], processed["start_ms"] = self.__extract_timestamp(
                    data["start_timestamp"], )

            status = transaction_ctx.get("status", None)
            if status:
                int_status = SPAN_STATUS_NAME_TO_CODE.get(
                    status, UNKNOWN_SPAN_STATUS)
            else:
                int_status = UNKNOWN_SPAN_STATUS

            processed["transaction_status"] = int_status

            if data["timestamp"] - data["start_timestamp"] < 0:
                # Seems we have some negative durations in the DB
                metrics.increment("negative_duration")
        except Exception:
            # all these fields are required but we saw some events go through here
            # in the past.  For now bail.
            return
        processed["finish_ts"], processed[
            "finish_ms"] = self.__extract_timestamp(data["timestamp"], )

        duration_secs = (processed["finish_ts"] -
                         processed["start_ts"]).total_seconds()
        processed["duration"] = max(int(duration_secs * 1000), 0)

        processed["platform"] = _unicodify(event["platform"])

        tags = _as_dict_safe(data.get("tags", None))
        processed["tags.key"], processed["tags.value"] = extract_extra_tags(
            tags)
        processed["_tags_flattened"] = flatten_nested_field(
            processed["tags.key"], processed["tags.value"])

        promoted_tags = {
            col: tags[col]
            for col in self.PROMOTED_TAGS if col in tags
        }
        processed["release"] = promoted_tags.get(
            "sentry:release",
            event.get("release"),
        )
        processed["environment"] = promoted_tags.get("environment")

        contexts = _as_dict_safe(data.get("contexts", None))

        user_dict = data.get("user", data.get("sentry.interfaces.User",
                                              None)) or {}
        geo = user_dict.get("geo", None) or {}
        if "geo" not in contexts and isinstance(geo, dict):
            contexts["geo"] = geo

        measurements = data.get("measurements")
        if measurements is not None:
            try:
                (
                    processed["measurements.key"],
                    processed["measurements.value"],
                ) = extract_nested(measurements,
                                   lambda value: float(value["value"]))
            except Exception:
                # Not failing the event in this case just yet, because we are still
                # developing this feature.
                logger.error(
                    "Invalid measurements field.",
                    extra={"measurements": measurements},
                    exc_info=True,
                )
        request = data.get("request", data.get("sentry.interfaces.Http",
                                               None)) or {}
        http_data: MutableMapping[str, Any] = {}
        extract_http(http_data, request)
        processed["http_method"] = http_data["http_method"]
        processed["http_referer"] = http_data["http_referer"]

        processed["contexts.key"], processed[
            "contexts.value"] = extract_extra_contexts(contexts)
        processed["_contexts_flattened"] = flatten_nested_field(
            processed["contexts.key"], processed["contexts.value"])

        processed["dist"] = _unicodify(
            promoted_tags.get("sentry:dist", data.get("dist")), )

        user_data = {}
        extract_user(user_data, user_dict)
        processed["user"] = promoted_tags.get("sentry:user", "")
        processed["user_name"] = user_data["username"]
        processed["user_id"] = user_data["user_id"]
        processed["user_email"] = user_data["email"]
        ip_address = _ensure_valid_ip(user_data["ip_address"])

        if ip_address:
            if ip_address.version == 4:
                processed["ip_address_v4"] = str(ip_address)
            elif ip_address.version == 6:
                processed["ip_address_v6"] = str(ip_address)

        processed["partition"] = metadata.partition
        processed["offset"] = metadata.offset

        sdk = data.get("sdk", None) or {}
        processed["sdk_name"] = _unicodify(sdk.get("name") or "")
        processed["sdk_version"] = _unicodify(sdk.get("version") or "")

        if processed["sdk_name"] == "":
            metrics.increment("missing_sdk_name")
        if processed["sdk_version"] == "":
            metrics.increment("missing_sdk_version")

        return InsertBatch([processed])