def get(self, request, organization, project, group_id, key):
        try:
            # TODO(tkaemming): This should *actually* redirect, see similar
            # comment in ``GroupEndpoint.convert_args``.
            group, _ = get_group_with_redirect(
                group_id,
                queryset=Group.objects.filter(project=project),
            )
        except Group.DoesNotExist:
            raise Http404

        if tagstore.is_reserved_key(key):
            lookup_key = 'sentry:{0}'.format(key)
        else:
            lookup_key = key

        try:
            environment_id = self._get_environment_id_from_request(
                request, project.organization_id)
        except Environment.DoesNotExist:
            # if the environment doesn't exist then the tag can't possibly exist
            raise Http404

        # validate existance as it may be deleted
        try:
            tagstore.get_tag_key(project.id, environment_id, lookup_key)
        except tagstore.TagKeyNotFound:
            raise Http404

        if key == 'user':
            callbacks = [attach_eventuser(project.id)]
        else:
            callbacks = []

        gtv_iter = tagstore.get_group_tag_value_iter(group.project_id,
                                                     group.id,
                                                     environment_id,
                                                     lookup_key,
                                                     callbacks=callbacks)

        filename = '{}-{}'.format(
            group.qualified_short_id or group.id,
            key,
        )

        return self.to_csv_response(gtv_iter, filename, key=key)
Exemplo n.º 2
0
    def get(self, request, organization, project, group_id, key):
        try:
            # TODO(tkaemming): This should *actually* redirect, see similar
            # comment in ``GroupEndpoint.convert_args``.
            group, _ = get_group_with_redirect(
                group_id,
                queryset=Group.objects.filter(project=project),
            )
        except Group.DoesNotExist:
            raise Http404

        if tagstore.is_reserved_key(key):
            lookup_key = 'sentry:{0}'.format(key)
        else:
            lookup_key = key

        try:
            environment_id = self._get_environment_id_from_request(request, project.organization_id)
        except Environment.DoesNotExist:
            # if the environment doesn't exist then the tag can't possibly exist
            raise Http404

        # validate existance as it may be deleted
        try:
            tagstore.get_tag_key(project.id, environment_id, lookup_key)
        except tagstore.TagKeyNotFound:
            raise Http404

        if key == 'user':
            callbacks = [attach_eventuser(project.id)]
        else:
            callbacks = []

        queryset = RangeQuerySetWrapper(
            tagstore.get_group_tag_value_qs(group.project_id, group.id, environment_id, lookup_key),
            callbacks=callbacks,
        )

        filename = '{}-{}'.format(
            group.qualified_short_id or group.id,
            key,
        )

        return self.to_csv_response(queryset, filename, key=key)
Exemplo n.º 3
0
    def get(self, request, organization, project, team, group_id, key):
        try:
            # TODO(tkaemming): This should *actually* redirect, see similar
            # comment in ``GroupEndpoint.convert_args``.
            group, _ = get_group_with_redirect(
                group_id,
                queryset=Group.objects.filter(project=project),
            )
        except Group.DoesNotExist:
            raise Http404

        if tagstore.is_reserved_key(key):
            lookup_key = 'sentry:{0}'.format(key)
        else:
            lookup_key = key

        # validate existance as it may be deleted
        try:
            tagstore.get_tag_key(group.project_id, lookup_key)
        except tagstore.TagKeyNotFound:
            raise Http404

        if key == 'user':
            callbacks = [attach_eventuser(project.id)]
        else:
            callbacks = []

        queryset = RangeQuerySetWrapper(
            GroupTagValue.objects.filter(
                group_id=group.id,
                key=lookup_key,
            ),
            callbacks=callbacks,
        )

        filename = '{}-{}'.format(
            group.qualified_short_id or group.id,
            key,
        )

        return self.to_csv_response(queryset, filename, key=key)
Exemplo n.º 4
0
    def validate_data(self, project, data):
        # TODO(dcramer): move project out of the data packet
        data['project'] = project.id

        data['errors'] = []

        if data.get('culprit'):
            if not isinstance(data['culprit'], six.string_types):
                raise APIForbidden('Invalid value for culprit')

        if not data.get('event_id'):
            data['event_id'] = uuid.uuid4().hex
        elif not isinstance(data['event_id'], six.string_types):
            raise APIForbidden('Invalid value for event_id')

        if len(data['event_id']) > 32:
            self.log.debug(
                'Discarded value for event_id due to length (%d chars)',
                len(data['event_id']))
            data['errors'].append({
                'type': EventError.VALUE_TOO_LONG,
                'name': 'event_id',
                'value': data['event_id'],
            })
            data['event_id'] = uuid.uuid4().hex
        elif not is_event_id(data['event_id']):
            self.log.debug('Discarded invalid value for event_id: %r',
                           data['event_id'],
                           exc_info=True)
            data['errors'].append({
                'type': EventError.INVALID_DATA,
                'name': 'event_id',
                'value': data['event_id'],
            })
            data['event_id'] = uuid.uuid4().hex

        if 'timestamp' in data:
            try:
                self._process_data_timestamp(data)
            except InvalidTimestamp as e:
                self.log.debug('Discarded invalid value for timestamp: %r',
                               data['timestamp'],
                               exc_info=True)
                data['errors'].append({
                    'type': EventError.INVALID_DATA,
                    'name': 'timestamp',
                    'value': data['timestamp'],
                })
                del data['timestamp']

        if 'fingerprint' in data:
            try:
                self._process_fingerprint(data)
            except InvalidFingerprint as e:
                self.log.debug('Discarded invalid value for fingerprint: %r',
                               data['fingerprint'],
                               exc_info=True)
                data['errors'].append({
                    'type': EventError.INVALID_DATA,
                    'name': 'fingerprint',
                    'value': data['fingerprint'],
                })
                del data['fingerprint']

        if 'platform' not in data or data['platform'] not in VALID_PLATFORMS:
            data['platform'] = 'other'

        if data.get('modules') and type(data['modules']) != dict:
            self.log.debug('Discarded invalid type for modules: %s',
                           type(data['modules']))
            data['errors'].append({
                'type': EventError.INVALID_DATA,
                'name': 'modules',
                'value': data['modules'],
            })
            del data['modules']

        if data.get('extra') is not None and type(data['extra']) != dict:
            self.log.debug('Discarded invalid type for extra: %s',
                           type(data['extra']))
            data['errors'].append({
                'type': EventError.INVALID_DATA,
                'name': 'extra',
                'value': data['extra'],
            })
            del data['extra']

        if data.get('tags') is not None:
            if type(data['tags']) == dict:
                data['tags'] = list(data['tags'].items())
            elif not isinstance(data['tags'], (list, tuple)):
                self.log.debug('Discarded invalid type for tags: %s',
                               type(data['tags']))
                data['errors'].append({
                    'type': EventError.INVALID_DATA,
                    'name': 'tags',
                    'value': data['tags'],
                })
                del data['tags']

        if data.get('tags'):
            # remove any values which are over 32 characters
            tags = []
            for pair in data['tags']:
                try:
                    k, v = pair
                except ValueError:
                    self.log.debug('Discarded invalid tag value: %r', pair)
                    data['errors'].append({
                        'type': EventError.INVALID_DATA,
                        'name': 'tags',
                        'value': pair,
                    })
                    continue

                if not isinstance(k, six.string_types):
                    try:
                        k = six.text_type(k)
                    except Exception:
                        self.log.debug('Discarded invalid tag key: %r',
                                       type(k))
                        data['errors'].append({
                            'type': EventError.INVALID_DATA,
                            'name': 'tags',
                            'value': pair,
                        })
                        continue

                if not isinstance(v, six.string_types):
                    try:
                        v = six.text_type(v)
                    except Exception:
                        self.log.debug('Discarded invalid tag value: %s=%r', k,
                                       type(v))
                        data['errors'].append({
                            'type': EventError.INVALID_DATA,
                            'name': 'tags',
                            'value': pair,
                        })
                        continue

                if len(k) > MAX_TAG_KEY_LENGTH or len(
                        v) > MAX_TAG_VALUE_LENGTH:
                    self.log.debug('Discarded invalid tag: %s=%s', k, v)
                    data['errors'].append({
                        'type': EventError.INVALID_DATA,
                        'name': 'tags',
                        'value': pair,
                    })
                    continue

                # support tags with spaces by converting them
                k = k.replace(' ', '-')

                if tagstore.is_reserved_key(k):
                    self.log.debug('Discarding reserved tag key: %s', k)
                    data['errors'].append({
                        'type': EventError.INVALID_DATA,
                        'name': 'tags',
                        'value': pair,
                    })
                    continue

                if not tagstore.is_valid_key(k):
                    self.log.debug('Discarded invalid tag key: %s', k)
                    data['errors'].append({
                        'type': EventError.INVALID_DATA,
                        'name': 'tags',
                        'value': pair,
                    })
                    continue

                if not tagstore.is_valid_value(v):
                    self.log.debug('Discard invalid tag value: %s', v)
                    data['errors'].append({
                        'type': EventError.INVALID_DATA,
                        'name': 'tags',
                        'value': pair,
                    })
                    continue

                tags.append((k, v))
            data['tags'] = tags

        for k in list(iter(data)):
            if k in CLIENT_RESERVED_ATTRS:
                continue

            value = data.pop(k)

            if not value:
                self.log.debug('Ignored empty interface value: %s', k)
                continue

            try:
                interface = get_interface(k)
            except ValueError:
                self.log.debug('Ignored unknown attribute: %s', k)
                data['errors'].append({
                    'type': EventError.INVALID_ATTRIBUTE,
                    'name': k,
                })
                continue

            if type(value) != dict:
                # HACK(dcramer): the exception/breadcrumbs interface supports a
                # list as the value. We should change this in a new protocol
                # version.
                if type(value) in (list, tuple):
                    value = {'values': value}
                else:
                    self.log.debug('Invalid parameter for value: %s (%r)', k,
                                   type(value))
                    data['errors'].append({
                        'type': EventError.INVALID_DATA,
                        'name': k,
                        'value': value,
                    })
                    continue

            try:
                inst = interface.to_python(value)
                data[inst.get_path()] = inst.to_json()
            except Exception as e:
                if isinstance(e, InterfaceValidationError):
                    log = self.log.debug
                else:
                    log = self.log.error
                log('Discarded invalid value for interface: %s (%r)',
                    k,
                    value,
                    exc_info=True)
                data['errors'].append({
                    'type': EventError.INVALID_DATA,
                    'name': k,
                    'value': value,
                })

        # TODO(dcramer): ideally this logic would happen in normalize, but today
        # we don't do "validation" there (create errors)

        # message is coerced to an interface, as its used for pure
        # index of searchable strings
        # See GH-3248
        message = data.pop('message', None)
        if message:
            if 'sentry.interfaces.Message' not in data:
                value = {
                    'message': message,
                }
            elif not data['sentry.interfaces.Message'].get('formatted'):
                value = data['sentry.interfaces.Message']
                value['formatted'] = message
            else:
                value = None

            if value is not None:
                k = 'sentry.interfaces.Message'
                interface = get_interface(k)
                try:
                    inst = interface.to_python(value)
                    data[inst.get_path()] = inst.to_json()
                except Exception as e:
                    if isinstance(e, InterfaceValidationError):
                        log = self.log.debug
                    else:
                        log = self.log.error
                    log('Discarded invalid value for interface: %s (%r)',
                        k,
                        value,
                        exc_info=True)
                    data['errors'].append({
                        'type': EventError.INVALID_DATA,
                        'name': k,
                        'value': value,
                    })

        level = data.get('level') or DEFAULT_LOG_LEVEL
        if isinstance(level, six.string_types) and not level.isdigit():
            # assume it's something like 'warning'
            try:
                data['level'] = LOG_LEVELS_MAP[level]
            except KeyError as e:
                self.log.debug('Discarded invalid logger value: %s', level)
                data['errors'].append({
                    'type': EventError.INVALID_DATA,
                    'name': 'level',
                    'value': level,
                })
                data['level'] = LOG_LEVELS_MAP.get(DEFAULT_LOG_LEVEL,
                                                   DEFAULT_LOG_LEVEL)

        if data.get('release'):
            data['release'] = six.text_type(data['release'])
            if len(data['release']) > 64:
                data['errors'].append({
                    'type': EventError.VALUE_TOO_LONG,
                    'name': 'release',
                    'value': data['release'],
                })
                del data['release']

        if data.get('dist'):
            data['dist'] = six.text_type(data['dist']).strip()
            if not data.get('release'):
                data['dist'] = None
            elif len(data['dist']) > 64:
                data['errors'].append({
                    'type': EventError.VALUE_TOO_LONG,
                    'name': 'dist',
                    'value': data['dist'],
                })
                del data['dist']
            elif _dist_re.match(data['dist']) is None:
                data['errors'].append({
                    'type': EventError.INVALID_DATA,
                    'name': 'dist',
                    'value': data['dist'],
                })
                del data['dist']

        if data.get('environment'):
            data['environment'] = six.text_type(data['environment'])
            if len(data['environment']) > 64:
                data['errors'].append({
                    'type': EventError.VALUE_TOO_LONG,
                    'name': 'environment',
                    'value': data['environment'],
                })
                del data['environment']

        if data.get('time_spent'):
            try:
                data['time_spent'] = int(data['time_spent'])
            except (ValueError, TypeError):
                data['errors'].append({
                    'type': EventError.INVALID_DATA,
                    'name': 'time_spent',
                    'value': data['time_spent'],
                })
                del data['time_spent']
            else:
                if data['time_spent'] > BoundedIntegerField.MAX_VALUE:
                    data['errors'].append({
                        'type': EventError.VALUE_TOO_LONG,
                        'name': 'time_spent',
                        'value': data['time_spent'],
                    })
                    del data['time_spent']

        return data
Exemplo n.º 5
0
 def get_lookup_key(key):
     return six.text_type(
         "sentry:{}".format(key)) if tagstore.is_reserved_key(key) else key
Exemplo n.º 6
0
def process_issue_by_tag(data_export, file, limit=None):
    """
    Convert the tag query to a CSV, writing it to the provided file.
    Returns the suggested file name.
    (Adapted from 'src/sentry/web/frontend/group_tag_export.py')
    """
    # Get the pertaining project
    try:
        payload = data_export.query_info
        project = Project.objects.get(id=payload["project_id"])
    except Project.DoesNotExist as error:
        metrics.incr("dataexport.error", instance=six.text_type(error))
        logger.error("dataexport.error: {}".format(six.text_type(error)))
        raise DataExportError("Requested project does not exist")

    # Get the pertaining issue
    try:
        group, _ = get_group_with_redirect(
            payload["group_id"], queryset=Group.objects.filter(project=project)
        )
    except Group.DoesNotExist as error:
        metrics.incr("dataexport.error", instance=six.text_type(error))
        logger.error("dataexport.error: {}".format(six.text_type(error)))
        raise DataExportError("Requested issue does not exist")

    # Get the pertaining key
    key = payload["key"]
    lookup_key = six.text_type("sentry:{}").format(key) if tagstore.is_reserved_key(key) else key

    # If the key is the 'user' tag, attach the event user
    def attach_eventuser(items):
        users = EventUser.for_tags(group.project_id, [i.value for i in items])
        for item in items:
            item._eventuser = users.get(item.value)

    # Create the fields/callback lists
    if key == "user":
        callbacks = [attach_eventuser]
        fields = [
            "value",
            "id",
            "email",
            "username",
            "ip_address",
            "times_seen",
            "last_seen",
            "first_seen",
        ]
    else:
        callbacks = []
        fields = ["value", "times_seen", "last_seen", "first_seen"]

    # Example file name: ISSUE_BY_TAG-project10-user__721.csv
    file_details = six.text_type("{}-{}__{}").format(project.slug, key, data_export.id)
    file_name = get_file_name(ExportQueryType.ISSUE_BY_TAG_STR, file_details)

    # Iterate through all the GroupTagValues
    writer = create_writer(file, fields)
    iteration = 0
    with snuba_error_handler():
        while True:
            offset = SNUBA_MAX_RESULTS * iteration
            next_offset = SNUBA_MAX_RESULTS * (iteration + 1)
            gtv_list = tagstore.get_group_tag_value_iter(
                project_id=group.project_id,
                group_id=group.id,
                environment_id=None,
                key=lookup_key,
                callbacks=callbacks,
                offset=offset,
            )
            if len(gtv_list) == 0:
                break
            gtv_list_raw = [serialize_issue_by_tag(key, item) for item in gtv_list]
            if limit and limit < next_offset:
                # Since the next offset will pass the limit, write the remainder and quit
                writer.writerows(gtv_list_raw[: limit % SNUBA_MAX_RESULTS])
                break
            else:
                writer.writerows(gtv_list_raw)
                iteration += 1
    return file_name
Exemplo n.º 7
0
 def get_lookup_key(key):
     return str(f"sentry:{key}") if tagstore.is_reserved_key(key) else key
Exemplo n.º 8
0
def process_issue_by_tag(data_export, file):
    """
    Convert the tag query to a CSV, writing it to the provided file.
    Returns the suggested file name.
    (Adapted from 'src/sentry/web/frontend/group_tag_export.py')
    """
    # Get the pertaining project
    payload = data_export.query_info
    project = Project.objects.get(id=payload["project_id"])

    # Get the pertaining issue
    group, _ = get_group_with_redirect(
        payload["group_id"], queryset=Group.objects.filter(project=project)
    )

    # Get the pertaining key
    key = payload["key"]
    lookup_key = u"sentry:{0}".format(key) if tagstore.is_reserved_key(key) else key

    # If the key is the 'user' tag, attach the event user
    def attach_eventuser(items):
        users = EventUser.for_tags(group.project_id, [i.value for i in items])
        for item in items:
            item._eventuser = users.get(item.value)

    # Create the fields/callback lists
    if key == "user":
        callbacks = [attach_eventuser]
        fields = [
            "value",
            "id",
            "email",
            "username",
            "ip_address",
            "times_seen",
            "last_seen",
            "first_seen",
        ]
    else:
        callbacks = []
        fields = ["value", "times_seen", "last_seen", "first_seen"]

    # Example file name: ISSUE_BY_TAG-project10-user__721.csv
    file_details = u"{}-{}__{}".format(project.slug, key, data_export.id)
    file_name = get_file_name(ExportQueryType.ISSUE_BY_TAG_STR, file_details)

    # Iterate through all the GroupTagValues
    writer = create_writer(file, fields)
    iteration = 0
    while True:
        gtv_list = tagstore.get_group_tag_value_iter(
            project_id=group.project_id,
            group_id=group.id,
            environment_id=None,
            key=lookup_key,
            callbacks=callbacks,
            offset=SNUBA_MAX_RESULTS * iteration,
        )
        gtv_list_raw = [serialize_issue_by_tag(key, item) for item in gtv_list]
        if len(gtv_list_raw) == 0:
            break
        writer.writerows(gtv_list_raw)
        iteration += 1
    return file_name
Exemplo n.º 9
0
    def validate_data(self, data):
        data['errors'] = []

        if data.get('culprit'):
            if not isinstance(data['culprit'], six.string_types):
                raise APIForbidden('Invalid value for culprit')

        if not data.get('event_id'):
            data['event_id'] = uuid.uuid4().hex
        elif not isinstance(data['event_id'], six.string_types):
            raise APIForbidden('Invalid value for event_id')

        if len(data['event_id']) > 32:
            self.log.debug(
                'Discarded value for event_id due to length (%d chars)', len(
                    data['event_id'])
            )
            data['errors'].append(
                {
                    'type': EventError.VALUE_TOO_LONG,
                    'name': 'event_id',
                    'value': data['event_id'],
                }
            )
            data['event_id'] = uuid.uuid4().hex
        elif not is_event_id(data['event_id']):
            self.log.debug(
                'Discarded invalid value for event_id: %r', data['event_id'], exc_info=True
            )
            data['errors'].append(
                {
                    'type': EventError.INVALID_DATA,
                    'name': 'event_id',
                    'value': data['event_id'],
                }
            )
            data['event_id'] = uuid.uuid4().hex

        if 'timestamp' in data:
            try:
                self._process_data_timestamp(data)
            except InvalidTimestamp as e:
                self.log.debug(
                    'Discarded invalid value for timestamp: %r', data['timestamp'], exc_info=True
                )
                data['errors'].append(
                    {
                        'type': EventError.INVALID_DATA,
                        'name': 'timestamp',
                        'value': data['timestamp'],
                    }
                )
                del data['timestamp']

        if 'fingerprint' in data:
            try:
                data['fingerprint'] = self._process_fingerprint(data)
            except InvalidFingerprint as e:
                self.log.debug(
                    'Discarded invalid value for fingerprint: %r',
                    data['fingerprint'],
                    exc_info=True
                )
                data['errors'].append(
                    {
                        'type': EventError.INVALID_DATA,
                        'name': 'fingerprint',
                        'value': data['fingerprint'],
                    }
                )
                del data['fingerprint']

        if 'platform' not in data or data['platform'] not in VALID_PLATFORMS:
            data['platform'] = 'other'

        if data.get('modules') and type(data['modules']) != dict:
            self.log.debug(
                'Discarded invalid type for modules: %s', type(data['modules']))
            data['errors'].append(
                {
                    'type': EventError.INVALID_DATA,
                    'name': 'modules',
                    'value': data['modules'],
                }
            )
            del data['modules']

        if data.get('extra') is not None and type(data['extra']) != dict:
            self.log.debug('Discarded invalid type for extra: %s',
                           type(data['extra']))
            data['errors'].append(
                {
                    'type': EventError.INVALID_DATA,
                    'name': 'extra',
                    'value': data['extra'],
                }
            )
            del data['extra']

        if data.get('tags') is not None:
            if type(data['tags']) == dict:
                data['tags'] = list(data['tags'].items())
            elif not isinstance(data['tags'], (list, tuple)):
                self.log.debug(
                    'Discarded invalid type for tags: %s', type(data['tags']))
                data['errors'].append(
                    {
                        'type': EventError.INVALID_DATA,
                        'name': 'tags',
                        'value': data['tags'],
                    }
                )
                del data['tags']

        if data.get('tags'):
            # remove any values which are over 32 characters
            tags = []
            for pair in data['tags']:
                try:
                    k, v = pair
                except ValueError:
                    self.log.debug('Discarded invalid tag value: %r', pair)
                    data['errors'].append(
                        {
                            'type': EventError.INVALID_DATA,
                            'name': 'tags',
                            'value': pair,
                        }
                    )
                    continue

                if not isinstance(k, six.string_types):
                    try:
                        k = six.text_type(k)
                    except Exception:
                        self.log.debug(
                            'Discarded invalid tag key: %r', type(k))
                        data['errors'].append(
                            {
                                'type': EventError.INVALID_DATA,
                                'name': 'tags',
                                'value': pair,
                            }
                        )
                        continue

                if not isinstance(v, six.string_types):
                    try:
                        v = six.text_type(v)
                    except Exception:
                        self.log.debug(
                            'Discarded invalid tag value: %s=%r', k, type(v))
                        data['errors'].append(
                            {
                                'type': EventError.INVALID_DATA,
                                'name': 'tags',
                                'value': pair,
                            }
                        )
                        continue

                if len(k) > MAX_TAG_KEY_LENGTH or len(v) > MAX_TAG_VALUE_LENGTH:
                    self.log.debug('Discarded invalid tag: %s=%s', k, v)
                    data['errors'].append(
                        {
                            'type': EventError.INVALID_DATA,
                            'name': 'tags',
                            'value': pair,
                        }
                    )
                    continue

                # support tags with spaces by converting them
                k = k.replace(' ', '-')

                if tagstore.is_reserved_key(k):
                    self.log.debug('Discarding reserved tag key: %s', k)
                    data['errors'].append(
                        {
                            'type': EventError.INVALID_DATA,
                            'name': 'tags',
                            'value': pair,
                        }
                    )
                    continue

                if not tagstore.is_valid_key(k):
                    self.log.debug('Discarded invalid tag key: %s', k)
                    data['errors'].append(
                        {
                            'type': EventError.INVALID_DATA,
                            'name': 'tags',
                            'value': pair,
                        }
                    )
                    continue

                if not tagstore.is_valid_value(v):
                    self.log.debug('Discard invalid tag value: %s', v)
                    data['errors'].append(
                        {
                            'type': EventError.INVALID_DATA,
                            'name': 'tags',
                            'value': pair,
                        }
                    )
                    continue

                tags.append((k, v))
            data['tags'] = tags

        for k in list(iter(data)):
            if k in CLIENT_RESERVED_ATTRS:
                continue

            value = data.pop(k)

            if not value:
                self.log.debug('Ignored empty interface value: %s', k)
                continue

            try:
                interface = get_interface(k)
            except ValueError:
                self.log.debug('Ignored unknown attribute: %s', k)
                data['errors'].append({
                    'type': EventError.INVALID_ATTRIBUTE,
                    'name': k,
                })
                continue

            if type(value) != dict:
                # HACK(dcramer): the exception/breadcrumbs interface supports a
                # list as the value. We should change this in a new protocol
                # version.
                if type(value) in (list, tuple):
                    value = {'values': value}
                else:
                    self.log.debug(
                        'Invalid parameter for value: %s (%r)', k, type(value))
                    data['errors'].append(
                        {
                            'type': EventError.INVALID_DATA,
                            'name': k,
                            'value': value,
                        }
                    )
                    continue

            try:
                inst = interface.to_python(value)
                data[inst.get_path()] = inst.to_json()
            except Exception as e:
                if isinstance(e, InterfaceValidationError):
                    log = self.log.debug
                else:
                    log = self.log.error
                log('Discarded invalid value for interface: %s (%r)',
                    k, value, exc_info=True)
                data['errors'].append(
                    {
                        'type': EventError.INVALID_DATA,
                        'name': k,
                        'value': value,
                    }
                )

        # TODO(dcramer): ideally this logic would happen in normalize, but today
        # we don't do "validation" there (create errors)

        # message is coerced to an interface, as its used for pure
        # index of searchable strings
        # See GH-3248
        message = data.pop('message', None)
        if message:
            if 'sentry.interfaces.Message' not in data:
                value = {
                    'message': message,
                }
            elif not data['sentry.interfaces.Message'].get('formatted'):
                value = data['sentry.interfaces.Message']
                value['formatted'] = message
            else:
                value = None

            if value is not None:
                k = 'sentry.interfaces.Message'
                interface = get_interface(k)
                try:
                    inst = interface.to_python(value)
                    data[inst.get_path()] = inst.to_json()
                except Exception as e:
                    if isinstance(e, InterfaceValidationError):
                        log = self.log.debug
                    else:
                        log = self.log.error
                    log('Discarded invalid value for interface: %s (%r)',
                        k, value, exc_info=True)
                    data['errors'].append(
                        {
                            'type': EventError.INVALID_DATA,
                            'name': k,
                            'value': value,
                        }
                    )

        level = data.get('level') or DEFAULT_LOG_LEVEL
        if isinstance(level, six.string_types) and not level.isdigit():
            # assume it's something like 'warning'
            try:
                data['level'] = LOG_LEVELS_MAP[level]
            except KeyError as e:
                self.log.debug('Discarded invalid logger value: %s', level)
                data['errors'].append(
                    {
                        'type': EventError.INVALID_DATA,
                        'name': 'level',
                        'value': level,
                    }
                )
                data['level'] = LOG_LEVELS_MAP.get(
                    DEFAULT_LOG_LEVEL, DEFAULT_LOG_LEVEL)

        if data.get('release'):
            data['release'] = six.text_type(data['release'])
            if len(data['release']) > 64:
                data['errors'].append(
                    {
                        'type': EventError.VALUE_TOO_LONG,
                        'name': 'release',
                        'value': data['release'],
                    }
                )
                del data['release']

        if data.get('dist'):
            data['dist'] = six.text_type(data['dist']).strip()
            if not data.get('release'):
                data['dist'] = None
            elif len(data['dist']) > 64:
                data['errors'].append(
                    {
                        'type': EventError.VALUE_TOO_LONG,
                        'name': 'dist',
                        'value': data['dist'],
                    }
                )
                del data['dist']
            elif _dist_re.match(data['dist']) is None:
                data['errors'].append(
                    {
                        'type': EventError.INVALID_DATA,
                        'name': 'dist',
                        'value': data['dist'],
                    }
                )
                del data['dist']

        if data.get('environment'):
            data['environment'] = six.text_type(data['environment'])
            if len(data['environment']) > 64:
                data['errors'].append(
                    {
                        'type': EventError.VALUE_TOO_LONG,
                        'name': 'environment',
                        'value': data['environment'],
                    }
                )
                del data['environment']

        if data.get('time_spent'):
            try:
                data['time_spent'] = int(data['time_spent'])
            except (ValueError, TypeError):
                data['errors'].append(
                    {
                        'type': EventError.INVALID_DATA,
                        'name': 'time_spent',
                        'value': data['time_spent'],
                    }
                )
                del data['time_spent']
            else:
                if data['time_spent'] > BoundedIntegerField.MAX_VALUE:
                    data['errors'].append(
                        {
                            'type': EventError.VALUE_TOO_LONG,
                            'name': 'time_spent',
                            'value': data['time_spent'],
                        }
                    )
                    del data['time_spent']

        return data