Example #1
0
def test_get_sdk_from_event():
    sdk_info = get_sdk_from_event(
        {
            'debug_meta': {
                'sdk_info': {
                    'sdk_name': 'iOS',
                    'version_major': 9,
                    'version_minor': 3,
                    'version_patchlevel': 0,
                }
            }
        }
    )
    assert sdk_info['sdk_name'] == 'iOS'
    assert sdk_info['version_major'] == 9
    assert sdk_info['version_minor'] == 3
    assert sdk_info['version_patchlevel'] == 0

    sdk_info = get_sdk_from_event(
        {
            'contexts': {
                'os': {
                    'type': 'os',
                    'name': 'iOS',
                    'version': '9.3.1.1234',
                }
            }
        }
    )

    assert sdk_info['sdk_name'] == 'iOS'
    assert sdk_info['version_major'] == 9
    assert sdk_info['version_minor'] == 3
    assert sdk_info['version_patchlevel'] == 1
Example #2
0
def test_get_sdk_from_event():
    sdk_info = get_sdk_from_event({
        "debug_meta": {
            "sdk_info": {
                "sdk_name": "iOS",
                "version_major": 9,
                "version_minor": 3,
                "version_patchlevel": 0,
            }
        }
    })
    assert sdk_info["sdk_name"] == "iOS"
    assert sdk_info["version_major"] == 9
    assert sdk_info["version_minor"] == 3
    assert sdk_info["version_patchlevel"] == 0

    sdk_info = get_sdk_from_event({
        "contexts": {
            "os": {
                "type": "os",
                "name": "iOS",
                "version": "9.3.1.1234"
            }
        }
    })

    assert sdk_info["sdk_name"] == "iOS"
    assert sdk_info["version_major"] == 9
    assert sdk_info["version_minor"] == 3
    assert sdk_info["version_patchlevel"] == 1
Example #3
0
def test_get_sdk_from_event():
    sdk_info = get_sdk_from_event(
        {
            'debug_meta': {
                'sdk_info': {
                    'sdk_name': 'iOS',
                    'version_major': 9,
                    'version_minor': 3,
                    'version_patchlevel': 0,
                }
            }
        }
    )
    assert sdk_info['sdk_name'] == 'iOS'
    assert sdk_info['version_major'] == 9
    assert sdk_info['version_minor'] == 3
    assert sdk_info['version_patchlevel'] == 0

    sdk_info = get_sdk_from_event(
        {
            'contexts': {
                'os': {
                    'type': 'os',
                    'name': 'iOS',
                    'version': '9.3.1.1234',
                }
            }
        }
    )

    assert sdk_info['sdk_name'] == 'iOS'
    assert sdk_info['version_major'] == 9
    assert sdk_info['version_minor'] == 3
    assert sdk_info['version_patchlevel'] == 1
Example #4
0
    def __init__(self, *args, **kwargs):
        StacktraceProcessor.__init__(self, *args, **kwargs)

        # If true, the project has been opted into using the symbolicator
        # service for native symbolication, which also means symbolic is not
        # used at all anymore.
        # The (iOS) symbolserver is still used regardless of this value.
        self.use_symbolicator = _is_symbolicator_enabled(
            self.project, self.data)

        metrics.incr('native.use_symbolicator',
                     tags={'value': self.use_symbolicator})

        self.arch = cpu_name_from_data(self.data)
        self.signal = signal_from_data(self.data)

        self.sym = None
        self.difs_referenced = set()

        images = get_path(self.data,
                          'debug_meta',
                          'images',
                          default=(),
                          filter=self._is_valid_image)

        if images:
            self.available = True
            self.sdk_info = get_sdk_from_event(self.data)
            self.object_lookup = ObjectLookup(images)
            self.images = images
        else:
            self.available = False
Example #5
0
    def __init__(self, *args, **kwargs):
        StacktraceProcessor.__init__(self, *args, **kwargs)

        # If true, the project has been opted into using the symbolicator
        # service for native symbolication, which also means symbolic is not
        # used at all anymore.
        # The (iOS) symbolserver is still used regardless of this value.
        self.use_symbolicator = _is_symbolicator_enabled(self.project, self.data)

        metrics.incr('native.use_symbolicator', tags={'value': self.use_symbolicator})

        self.arch = cpu_name_from_data(self.data)
        self.signal = signal_from_data(self.data)

        self.sym = None
        self.difs_referenced = set()

        images = get_path(self.data, 'debug_meta', 'images', default=(),
                          filter=self._is_valid_image)

        if images:
            self.available = True
            self.sdk_info = get_sdk_from_event(self.data)
            self.object_lookup = ObjectLookup(images)
            self.images = images
        else:
            self.available = False
Example #6
0
def _merge_minidump_response(data, response):
    data["platform"] = "native"
    if response.get("crashed") is not None:
        data["level"] = "fatal" if response["crashed"] else "info"

    validate_and_set_timestamp(data, response.get("timestamp"))

    if response.get("system_info"):
        _merge_system_info(data, response["system_info"])

    sdk_info = get_sdk_from_event(data)

    images = []
    set_path(data, "debug_meta", "images", value=images)

    for complete_image in response["modules"]:
        image = {}
        _merge_image(image, complete_image, sdk_info,
                     lambda e: write_error(e, data))
        images.append(image)

    # Extract the crash reason and infos
    data_exception = get_path(data, "exception", "values", 0)
    exc_value = ("Assertion Error: %s" % response.get("assertion")
                 if response.get("assertion") else "Fatal Error: %s" %
                 response.get("crash_reason"))
    data_exception["value"] = exc_value
    data_exception["type"] = response.get("crash_reason")

    data_threads = []
    if response["stacktraces"]:
        data["threads"] = {"values": data_threads}
    else:
        error = SymbolicationFailed(message="minidump has no thread list",
                                    type=EventError.NATIVE_SYMBOLICATOR_FAILED)
        write_error(error, data)

    for complete_stacktrace in response["stacktraces"]:
        is_requesting = complete_stacktrace.get("is_requesting")
        thread_id = complete_stacktrace.get("thread_id")

        data_thread = {"id": thread_id, "crashed": is_requesting}
        data_threads.append(data_thread)

        if is_requesting:
            data_exception["thread_id"] = thread_id
            data_stacktrace = data_exception.setdefault("stacktrace", {})
            data_stacktrace["frames"] = []
        else:
            data_thread["stacktrace"] = data_stacktrace = {"frames": []}

        if complete_stacktrace.get("registers"):
            data_stacktrace["registers"] = complete_stacktrace["registers"]

        for complete_frame in reversed(complete_stacktrace["frames"]):
            new_frame = {}
            _merge_frame(new_frame, complete_frame)
            data_stacktrace["frames"].append(new_frame)
Example #7
0
 def __init__(self, *args, **kwargs):
     StacktraceProcessor.__init__(self, *args, **kwargs)
     debug_meta = self.data.get('debug_meta')
     self.sym = None
     if debug_meta:
         self.available = True
         self.debug_meta = debug_meta
         self.sdk_info = get_sdk_from_event(self.data)
     else:
         self.available = False
Example #8
0
def merge_symbolicator_minidump_response(data, response):
    sdk_info = get_sdk_from_event(data)

    # TODO(markus): Add OS context here when `merge_process_state_event` is no
    # longer called for symbolicator projects

    images = []
    set_path(data, 'debug_meta', 'images', value=images)

    for complete_image in response['modules']:
        image = {}
        merge_symbolicator_image(
            image, complete_image, sdk_info,
            lambda e: handle_symbolication_failed(e, data=data))
        images.append(image)

    data_threads = []
    data['threads'] = {'values': data_threads}

    data_exception = get_path(data, 'exception', 'values', 0)

    for complete_stacktrace in response['stacktraces']:
        is_requesting = complete_stacktrace.get('is_requesting')
        thread_id = complete_stacktrace.get('thread_id')

        data_thread = {
            'id': thread_id,
            'crashed': is_requesting,
        }
        data_threads.append(data_thread)

        if is_requesting:
            data_stacktrace = get_path(data_exception, 'stacktrace')
            assert isinstance(data_stacktrace, dict), data_stacktrace
            # Make exemption specifically for unreal portable callstacks
            # TODO(markus): Allow overriding stacktrace more generically
            # (without looking into unreal context) once we no longer parse
            # minidump in the endpoint (right now we can't distinguish that
            # from user json).
            if data_stacktrace['frames'] and is_unreal_exception_stacktrace(
                    data):
                continue
            data_stacktrace['frames'] = []
        else:
            data_thread['stacktrace'] = data_stacktrace = {'frames': []}

        if complete_stacktrace.get('registers'):
            data_stacktrace['registers'] = complete_stacktrace['registers']

        for complete_frame in reversed(complete_stacktrace['frames']):
            new_frame = {}
            merge_symbolicated_frame(new_frame, complete_frame)
            data_stacktrace['frames'].append(new_frame)
Example #9
0
 def __init__(self, *args, **kwargs):
     StacktraceProcessor.__init__(self, *args, **kwargs)
     debug_meta = self.data.get('debug_meta')
     self.cpu_name = cpu_name_from_data(self.data)
     self.sym = None
     if debug_meta:
         self.available = True
         self.debug_meta = debug_meta
         self.sdk_info = get_sdk_from_event(self.data)
         self.image_lookup = ImageLookup(self.debug_meta['images'])
     else:
         self.available = False
Example #10
0
 def __init__(self, *args, **kwargs):
     StacktraceProcessor.__init__(self, *args, **kwargs)
     debug_meta = self.data.get('debug_meta')
     self.cpu_name = cpu_name_from_data(self.data)
     self.sym = None
     if debug_meta:
         self.available = True
         self.debug_meta = debug_meta
         self.sdk_info = get_sdk_from_event(self.data)
         self.image_lookup = ImageLookup(self.debug_meta['images'])
     else:
         self.available = False
Example #11
0
def merge_symbolicator_minidump_response(data, response):
    sdk_info = get_sdk_from_event(data)

    # TODO(markus): Add OS context here when `merge_process_state_event` is no
    # longer called for symbolicator projects

    images = []
    set_path(data, 'debug_meta', 'images', value=images)

    for complete_image in response['modules']:
        image = {}
        merge_symbolicator_image(
            image, complete_image, sdk_info,
            lambda e: handle_symbolication_failed(e, data=data)
        )
        images.append(image)

    data_threads = []
    data['threads'] = {'values': data_threads}

    data_exception = get_path(data, 'exception', 'values', 0)

    for complete_stacktrace in response['stacktraces']:
        is_requesting = complete_stacktrace.get('is_requesting')
        thread_id = complete_stacktrace.get('thread_id')

        data_thread = {
            'id': thread_id,
            'crashed': is_requesting,
        }
        data_threads.append(data_thread)

        if is_requesting:
            data_stacktrace = get_path(data_exception, 'stacktrace')
            assert isinstance(data_stacktrace, dict), data_stacktrace
            # Make exemption specifically for unreal portable callstacks
            # TODO(markus): Allow overriding stacktrace more generically
            # (without looking into unreal context) once we no longer parse
            # minidump in the endpoint (right now we can't distinguish that
            # from user json).
            if data_stacktrace['frames'] and is_unreal_exception_stacktrace(data):
                continue
            del data_stacktrace['frames'][:]
        else:
            data_thread['stacktrace'] = data_stacktrace = {'frames': []}

        if complete_stacktrace.get('registers'):
            data_stacktrace['registers'] = complete_stacktrace['registers']

        for complete_frame in reversed(complete_stacktrace['frames']):
            new_frame = {}
            merge_symbolicated_frame(new_frame, complete_frame)
            data_stacktrace['frames'].append(new_frame)
Example #12
0
 def __init__(self, *args, **kwargs):
     StacktraceProcessor.__init__(self, *args, **kwargs)
     debug_meta = self.data.get('debug_meta')
     self.arch = cpu_name_from_data(self.data)
     self.sym = None
     self.difs_referenced = set()
     if debug_meta:
         self.available = True
         self.debug_meta = debug_meta
         self.sdk_info = get_sdk_from_event(self.data)
         self.object_lookup = ObjectLookup(
             [img for img in self.debug_meta['images'] if img['type'] in self.supported_images]
         )
     else:
         self.available = False
Example #13
0
 def __init__(self, *args, **kwargs):
     StacktraceProcessor.__init__(self, *args, **kwargs)
     debug_meta = self.data.get('debug_meta')
     self.arch = cpu_name_from_data(self.data)
     self.sym = None
     self.dsyms_referenced = set()
     if debug_meta:
         self.available = True
         self.debug_meta = debug_meta
         self.sdk_info = get_sdk_from_event(self.data)
         self.object_lookup = ObjectLookup(
             [img for img in self.debug_meta['images'] if img['type'] in self.supported_images]
         )
     else:
         self.available = False
Example #14
0
    def __init__(self, *args, **kwargs):
        StacktraceProcessor.__init__(self, *args, **kwargs)

        self.arch = cpu_name_from_data(self.data)
        self.sym = None
        self.difs_referenced = set()

        images = get_path(self.data, 'debug_meta', 'images', default=(),
                          filter=self._is_valid_image)

        if images:
            self.available = True
            self.sdk_info = get_sdk_from_event(self.data)
            self.object_lookup = ObjectLookup(images)
        else:
            self.available = False
Example #15
0
    def __init__(self, *args, **kwargs):
        StacktraceProcessor.__init__(self, *args, **kwargs)

        self.arch = cpu_name_from_data(self.data)
        self.sym = None
        self.difs_referenced = set()

        images = get_path(self.data, 'debug_meta', 'images', default=(),
                          filter=(lambda img: img and img.get('type') in self.supported_images))

        if images:
            self.available = True
            self.sdk_info = get_sdk_from_event(self.data)
            self.object_lookup = ObjectLookup(images)
        else:
            self.available = False
Example #16
0
 def __init__(self, *args, **kwargs):
     StacktraceProcessor.__init__(self, *args, **kwargs)
     debug_meta = self.data.get('debug_meta')
     self.cpu_name = cpu_name_from_data(self.data)
     self.sym = None
     self.dsyms_referenced = set()
     if debug_meta:
         self.available = True
         self.debug_meta = debug_meta
         self.sdk_info = get_sdk_from_event(self.data)
         self.image_lookup = ImageLookup([
             img for img in self.debug_meta['images']
             if img['type'] == 'apple'
         ])
     else:
         self.available = False
Example #17
0
    def normalize(self, request_env=None):
        request_env = request_env or {}
        data = self.data
        errors = data['errors'] = []

        # Ignore event meta data for now.
        data.pop('_meta', None)

        # Before validating with a schema, attempt to cast values to their desired types
        # so that the schema doesn't have to take every type variation into account.
        text = six.text_type
        fp_types = six.string_types + six.integer_types + (float, )

        def to_values(v):
            return {'values': v} if v and isinstance(v, (tuple, list)) else v

        def stringify(f):
            if isinstance(f, float):
                return text(int(f)) if abs(f) < (1 << 53) else None
            return text(f)

        casts = {
            'environment': lambda v: text(v) if v is not None else v,
            'fingerprint': lambda v: list(x for x in map(stringify, v) if x is not None) if isinstance(v, list) and all(isinstance(f, fp_types) for f in v) else v,
            'release': lambda v: text(v) if v is not None else v,
            'dist': lambda v: text(v).strip() if v is not None else v,
            'time_spent': lambda v: int(v) if v is not None else v,
            'tags': lambda v: [(text(v_k).replace(' ', '-').strip(), text(v_v).strip()) for (v_k, v_v) in dict(v).items()],
            'timestamp': lambda v: process_timestamp(v),
            'platform': lambda v: v if v in VALID_PLATFORMS else 'other',
            'logentry': lambda v: v if isinstance(v, dict) else {'message': v},

            # These can be sent as lists and need to be converted to {'values': [...]}
            'exception': to_values,
            'breadcrumbs': to_values,
            'threads': to_values,
        }

        for c in casts:
            if c in data:
                try:
                    data[c] = casts[c](data[c])
                except InvalidTimestamp as it:
                    errors.append({'type': it.args[0], 'name': c, 'value': data[c]})
                    del data[c]
                except Exception as e:
                    errors.append({'type': EventError.INVALID_DATA, 'name': c, 'value': data[c]})
                    del data[c]

        # raw 'message' is coerced to the Message interface, as its used for pure index of
        # searchable strings. If both a raw 'message' and a Message interface exist, try and
        # add the former as the 'formatted' attribute of the latter.
        # See GH-3248
        msg_str = data.pop('message', None)
        if msg_str:
            msg_if = data.get('logentry')
            msg_meta = data.get('_meta', {}).get('message')

            if not msg_if:
                msg_if = data['logentry'] = {'message': msg_str}
                if msg_meta:
                    data.setdefault('_meta', {}).setdefault('logentry', {})['message'] = msg_meta

            if msg_if.get('message') != msg_str:
                if not msg_if.get('formatted'):
                    msg_if['formatted'] = msg_str
                    if msg_meta:
                        data.setdefault('_meta', {}).setdefault(
                            'logentry', {})['formatted'] = msg_meta

        # Fill in ip addresses marked as {{auto}}
        client_ip = request_env.get('client_ip')
        if client_ip:
            if get_path(data, ['sentry.interfaces.Http', 'env', 'REMOTE_ADDR']) == '{{auto}}':
                data['sentry.interfaces.Http']['env']['REMOTE_ADDR'] = client_ip

            if get_path(data, ['request', 'env', 'REMOTE_ADDR']) == '{{auto}}':
                data['request']['env']['REMOTE_ADDR'] = client_ip

            if get_path(data, ['sentry.interfaces.User', 'ip_address']) == '{{auto}}':
                data['sentry.interfaces.User']['ip_address'] = client_ip

            if get_path(data, ['user', 'ip_address']) == '{{auto}}':
                data['user']['ip_address'] = client_ip

        # Validate main event body and tags against schema.
        # XXX(ja): jsonschema does not like CanonicalKeyDict, so we need to pass
        #          in the inner data dict.
        is_valid, event_errors = validate_and_default_interface(data.data, 'event')
        errors.extend(event_errors)
        if 'tags' in data:
            is_valid, tag_errors = validate_and_default_interface(data['tags'], 'tags', name='tags')
            errors.extend(tag_errors)

        # Validate interfaces
        for k in list(iter(data)):
            if k in CLIENT_RESERVED_ATTRS:
                continue

            value = data.pop(k)

            if not value:
                self.logger.debug('Ignored empty interface value: %s', k)
                continue

            try:
                interface = get_interface(k)
            except ValueError:
                self.logger.debug('Ignored unknown attribute: %s', k)
                errors.append({'type': EventError.INVALID_ATTRIBUTE, 'name': k})
                continue

            try:
                inst = interface.to_python(value)
                data[inst.get_path()] = inst.to_json()
            except Exception as e:
                log = self.logger.debug if isinstance(
                    e, InterfaceValidationError) else self.logger.error
                log('Discarded invalid value for interface: %s (%r)', k, value, exc_info=True)
                errors.append({'type': EventError.INVALID_DATA, 'name': k, 'value': value})

        # Additional data coercion and defaulting
        level = data.get('level') or DEFAULT_LOG_LEVEL
        if isinstance(level, int) or (isinstance(level, six.string_types) and level.isdigit()):
            level = LOG_LEVELS.get(int(level), DEFAULT_LOG_LEVEL)
        data['level'] = LOG_LEVELS_MAP.get(level, LOG_LEVELS_MAP[DEFAULT_LOG_LEVEL])

        if data.get('dist') and not data.get('release'):
            data['dist'] = None

        timestamp = data.get('timestamp')
        if not timestamp:
            timestamp = timezone.now()

        # TODO (alex) can this all be replaced by utcnow?
        # it looks like the only time that this would even be hit is when timestamp
        # is not defined, as the earlier process_timestamp already converts existing
        # timestamps to floats.
        if isinstance(timestamp, datetime):
            # We must convert date to local time so Django doesn't mess it up
            # based on TIME_ZONE
            if settings.TIME_ZONE:
                if not timezone.is_aware(timestamp):
                    timestamp = timestamp.replace(tzinfo=timezone.utc)
            elif timezone.is_aware(timestamp):
                timestamp = timestamp.replace(tzinfo=None)
            timestamp = float(timestamp.strftime('%s'))

        data['timestamp'] = timestamp
        data['received'] = float(timezone.now().strftime('%s'))

        data.setdefault('checksum', None)
        data.setdefault('culprit', None)
        data.setdefault('dist', None)
        data.setdefault('environment', None)
        data.setdefault('extra', {})
        data.setdefault('fingerprint', None)
        data.setdefault('logger', DEFAULT_LOGGER_NAME)
        data.setdefault('platform', None)
        data.setdefault('server_name', None)
        data.setdefault('site', None)
        data.setdefault('tags', [])
        data.setdefault('transaction', None)

        # Fix case where legacy apps pass 'environment' as a tag
        # instead of a top level key.
        # TODO (alex) save() just reinserts the environment into the tags
        if not data.get('environment'):
            tagsdict = dict(data['tags'])
            if 'environment' in tagsdict:
                data['environment'] = tagsdict['environment']
                del tagsdict['environment']
                data['tags'] = tagsdict.items()

        # the SDKs currently do not describe event types, and we must infer
        # them from available attributes
        data['type'] = eventtypes.infer(data).key
        data['version'] = self.version

        exception = data.get('sentry.interfaces.Exception')
        stacktrace = data.get('sentry.interfaces.Stacktrace')
        if exception and len(exception['values']) == 1 and stacktrace:
            exception['values'][0]['stacktrace'] = stacktrace
            del data['sentry.interfaces.Stacktrace']

        # Exception mechanism needs SDK information to resolve proper names in
        # exception meta (such as signal names). "SDK Information" really means
        # the operating system version the event was generated on. Some
        # normalization still works without sdk_info, such as mach_exception
        # names (they can only occur on macOS).
        if exception:
            sdk_info = get_sdk_from_event(data)
            for ex in exception['values']:
                if 'mechanism' in ex:
                    normalize_mechanism_meta(ex['mechanism'], sdk_info)

        # If there is no User ip_addres, update it either from the Http interface
        # or the client_ip of the request.
        auth = request_env.get('auth')
        is_public = auth and auth.is_public
        add_ip_platforms = ('javascript', 'cocoa', 'objc')

        http_ip = data.get('sentry.interfaces.Http', {}).get('env', {}).get('REMOTE_ADDR')
        if http_ip:
            data.setdefault('sentry.interfaces.User', {}).setdefault('ip_address', http_ip)
        elif client_ip and (is_public or data.get('platform') in add_ip_platforms):
            data.setdefault('sentry.interfaces.User', {}).setdefault('ip_address', client_ip)

        # Trim values
        data['logger'] = trim(data['logger'].strip(), 64)
        trim_dict(data['extra'], max_size=settings.SENTRY_MAX_EXTRA_VARIABLE_SIZE)

        if data['culprit']:
            data['culprit'] = trim(data['culprit'], MAX_CULPRIT_LENGTH)

        if data['transaction']:
            data['transaction'] = trim(data['transaction'], MAX_CULPRIT_LENGTH)

        return data
Example #18
0
    def normalize(self, request_env=None):
        request_env = request_env or {}
        data = self.data
        errors = data['errors'] = []

        # Before validating with a schema, attempt to cast values to their desired types
        # so that the schema doesn't have to take every type variation into account.
        text = six.text_type
        fp_types = six.string_types + six.integer_types + (float, )

        def to_values(v):
            return {'values': v} if v and isinstance(v, (tuple, list)) else v

        def convert_fingerprint(values):
            rv = values[:]
            bad_float = False
            for idx, item in enumerate(rv):
                if isinstance(item, float) and \
                   (abs(item) >= (1 << 53) or int(item) != item):
                    bad_float = True
                rv[idx] = text(item)
            if bad_float:
                metrics.incr(
                    'events.bad_float_fingerprint',
                    skip_internal=True,
                    tags={
                        'project_id': data.get('project'),
                    },
                )
            return rv

        casts = {
            'environment':
            lambda v: text(v) if v is not None else v,
            'fingerprint':
            lambda v: convert_fingerprint(v)
            if isinstance(v, list) and all(isinstance(f, fp_types)
                                           for f in v) else v,
            'release':
            lambda v: text(v) if v is not None else v,
            'dist':
            lambda v: text(v).strip() if v is not None else v,
            'time_spent':
            lambda v: int(v) if v is not None else v,
            'tags':
            lambda v: [(text(v_k).replace(' ', '-').strip(), text(v_v).strip())
                       for (v_k, v_v) in dict(v).items()],
            'timestamp':
            lambda v: process_timestamp(v),
            'platform':
            lambda v: v if v in VALID_PLATFORMS else 'other',
            'sentry.interfaces.Message':
            lambda v: v if isinstance(v, dict) else {
                'message': v
            },

            # These can be sent as lists and need to be converted to {'values': [...]}
            'exception':
            to_values,
            'sentry.interfaces.Exception':
            to_values,
            'breadcrumbs':
            to_values,
            'sentry.interfaces.Breadcrumbs':
            to_values,
            'threads':
            to_values,
            'sentry.interfaces.Threads':
            to_values,
        }

        for c in casts:
            if c in data:
                try:
                    data[c] = casts[c](data[c])
                except InvalidTimestamp as it:
                    errors.append({
                        'type': it.args[0],
                        'name': c,
                        'value': data[c]
                    })
                    del data[c]
                except Exception as e:
                    errors.append({
                        'type': EventError.INVALID_DATA,
                        'name': c,
                        'value': data[c]
                    })
                    del data[c]

        # raw 'message' is coerced to the Message interface, as its used for pure index of
        # searchable strings. If both a raw 'message' and a Message interface exist, try and
        # add the former as the 'formatted' attribute of the latter.
        # See GH-3248
        msg_str = data.pop('message', None)
        if msg_str:
            msg_if = data.setdefault('sentry.interfaces.Message',
                                     {'message': msg_str})
            if msg_if.get('message') != msg_str:
                msg_if.setdefault('formatted', msg_str)

        # Fill in ip addresses marked as {{auto}}
        client_ip = request_env.get('client_ip')
        if client_ip:
            if get_path(data, ['sentry.interfaces.Http', 'env', 'REMOTE_ADDR'
                               ]) == '{{auto}}':
                data['sentry.interfaces.Http']['env'][
                    'REMOTE_ADDR'] = client_ip

            if get_path(data, ['request', 'env', 'REMOTE_ADDR']) == '{{auto}}':
                data['request']['env']['REMOTE_ADDR'] = client_ip

            if get_path(
                    data,
                ['sentry.interfaces.User', 'ip_address']) == '{{auto}}':
                data['sentry.interfaces.User']['ip_address'] = client_ip

            if get_path(data, ['user', 'ip_address']) == '{{auto}}':
                data['user']['ip_address'] = client_ip

        # Validate main event body and tags against schema
        is_valid, event_errors = validate_and_default_interface(data, 'event')
        errors.extend(event_errors)
        if 'tags' in data:
            is_valid, tag_errors = validate_and_default_interface(data['tags'],
                                                                  'tags',
                                                                  name='tags')
            errors.extend(tag_errors)

        # Validate interfaces
        for k in list(iter(data)):
            if k in CLIENT_RESERVED_ATTRS:
                continue

            value = data.pop(k)

            if not value:
                self.logger.debug('Ignored empty interface value: %s', k)
                continue

            try:
                interface = get_interface(k)
            except ValueError:
                self.logger.debug('Ignored unknown attribute: %s', k)
                errors.append({
                    'type': EventError.INVALID_ATTRIBUTE,
                    'name': k
                })
                continue

            try:
                inst = interface.to_python(value)
                data[inst.get_path()] = inst.to_json()
            except Exception as e:
                log = self.logger.debug if isinstance(
                    e, InterfaceValidationError) else self.logger.error
                log('Discarded invalid value for interface: %s (%r)',
                    k,
                    value,
                    exc_info=True)
                errors.append({
                    'type': EventError.INVALID_DATA,
                    'name': k,
                    'value': value
                })

        # Additional data coercion and defaulting
        level = data.get('level') or DEFAULT_LOG_LEVEL
        if isinstance(level, int) or (isinstance(level, six.string_types)
                                      and level.isdigit()):
            level = LOG_LEVELS.get(int(level), DEFAULT_LOG_LEVEL)
        data['level'] = LOG_LEVELS_MAP.get(level,
                                           LOG_LEVELS_MAP[DEFAULT_LOG_LEVEL])

        if data.get('dist') and not data.get('release'):
            data['dist'] = None

        timestamp = data.get('timestamp')
        if not timestamp:
            timestamp = timezone.now()

        # TODO (alex) can this all be replaced by utcnow?
        # it looks like the only time that this would even be hit is when timestamp
        # is not defined, as the earlier process_timestamp already converts existing
        # timestamps to floats.
        if isinstance(timestamp, datetime):
            # We must convert date to local time so Django doesn't mess it up
            # based on TIME_ZONE
            if settings.TIME_ZONE:
                if not timezone.is_aware(timestamp):
                    timestamp = timestamp.replace(tzinfo=timezone.utc)
            elif timezone.is_aware(timestamp):
                timestamp = timestamp.replace(tzinfo=None)
            timestamp = float(timestamp.strftime('%s'))

        data['timestamp'] = timestamp
        data['received'] = float(timezone.now().strftime('%s'))

        data.setdefault('checksum', None)
        data.setdefault('culprit', None)
        data.setdefault('dist', None)
        data.setdefault('environment', None)
        data.setdefault('extra', {})
        data.setdefault('fingerprint', None)
        data.setdefault('logger', DEFAULT_LOGGER_NAME)
        data.setdefault('platform', None)
        data.setdefault('server_name', None)
        data.setdefault('site', None)
        data.setdefault('tags', [])
        data.setdefault('transaction', None)

        # Fix case where legacy apps pass 'environment' as a tag
        # instead of a top level key.
        # TODO (alex) save() just reinserts the environment into the tags
        if not data.get('environment'):
            tagsdict = dict(data['tags'])
            if 'environment' in tagsdict:
                data['environment'] = tagsdict['environment']
                del tagsdict['environment']
                data['tags'] = tagsdict.items()

        # the SDKs currently do not describe event types, and we must infer
        # them from available attributes
        data['type'] = eventtypes.infer(data).key
        data['version'] = self.version

        exception = data.get('sentry.interfaces.Exception')
        stacktrace = data.get('sentry.interfaces.Stacktrace')
        if exception and len(exception['values']) == 1 and stacktrace:
            exception['values'][0]['stacktrace'] = stacktrace
            del data['sentry.interfaces.Stacktrace']

        # Exception mechanism needs SDK information to resolve proper names in
        # exception meta (such as signal names). "SDK Information" really means
        # the operating system version the event was generated on. Some
        # normalization still works without sdk_info, such as mach_exception
        # names (they can only occur on macOS).
        if exception:
            sdk_info = get_sdk_from_event(data)
            for ex in exception['values']:
                if 'mechanism' in ex:
                    normalize_mechanism_meta(ex['mechanism'], sdk_info)

        # If there is no User ip_addres, update it either from the Http interface
        # or the client_ip of the request.
        auth = request_env.get('auth')
        is_public = auth and auth.is_public
        add_ip_platforms = ('javascript', 'cocoa', 'objc')

        http_ip = data.get('sentry.interfaces.Http',
                           {}).get('env', {}).get('REMOTE_ADDR')
        if http_ip:
            data.setdefault('sentry.interfaces.User',
                            {}).setdefault('ip_address', http_ip)
        elif client_ip and (is_public
                            or data.get('platform') in add_ip_platforms):
            data.setdefault('sentry.interfaces.User',
                            {}).setdefault('ip_address', client_ip)

        # Trim values
        data['logger'] = trim(data['logger'].strip(), 64)
        trim_dict(data['extra'],
                  max_size=settings.SENTRY_MAX_EXTRA_VARIABLE_SIZE)

        if data['culprit']:
            data['culprit'] = trim(data['culprit'], MAX_CULPRIT_LENGTH)

        if data['transaction']:
            data['transaction'] = trim(data['transaction'], MAX_CULPRIT_LENGTH)

        return data
Example #19
0
def resolve_frame_symbols(data):
    debug_meta = data['debug_meta']
    debug_images = debug_meta['images']
    sdk_info = get_sdk_from_event(data)

    stacktraces = find_all_stacktraces(data)
    if not stacktraces:
        return

    project = Project.objects.get_from_cache(
        id=data['project'],
    )

    errors = []
    referenced_images = find_stacktrace_referenced_images(
        debug_images, [x[0] for x in stacktraces])
    sym = Symbolizer(project, debug_images,
                     referenced_images=referenced_images)

    frame = None
    idx = -1

    def report_error(exc_type, exc_value, tb):
        if exc_value.is_user_fixable or exc_value.is_sdk_failure:
            errors.append({
                'type': EventError.NATIVE_INTERNAL_FAILURE,
                'frame': frame,
                'error': u'frame #%d: %s' % (idx, exc_value)
            })
        if not exc_value.is_user_fixable:
            logger.debug('Failed to symbolicate',
                         exc_info=(exc_type, exc_value, tb))

    with sym:
        for stacktrace, container in stacktraces:
            store_raw = False

            new_frames = list(stacktrace['frames'])
            for idx, frame in enumerate(stacktrace['frames']):
                if 'image_addr' not in frame or \
                   'instruction_addr' not in frame or \
                   'symbol_addr' not in frame:
                    continue
                try:
                    # Construct a raw frame that is used by the symbolizer
                    # backend.
                    raw_frame = {
                        'object_name': frame.get('package'),
                        'object_addr': frame['image_addr'],
                        'instruction_addr': frame['instruction_addr'],
                        'symbol_addr': frame['symbol_addr'],
                    }
                    new_frame = dict(frame)

                    try:
                        sfrm = sym.symbolize_frame(raw_frame, sdk_info)
                    except SymbolicationFailed:
                        report_error(*sys.exc_info())
                    else:
                        symbol = sfrm.get('symbol_name') or \
                            new_frame.get('function') or '<unknown>'
                        function = demangle_symbol(symbol, simplified=True)

                        new_frame['function'] = function

                        # If we demangled something, store the original in the
                        # symbol portion of the frame
                        if function != symbol:
                            new_frame['symbol'] = symbol

                        new_frame['abs_path'] = sfrm.get('filename') or None
                        if new_frame['abs_path']:
                            new_frame['filename'] = posixpath.basename(
                                new_frame['abs_path'])
                        if sfrm.get('line') is not None:
                            new_frame['lineno'] = sfrm['line']
                        else:
                            new_frame['instruction_offset'] = \
                                parse_addr(sfrm['instruction_addr']) - \
                                parse_addr(sfrm['symbol_addr'])
                        if sfrm.get('column') is not None:
                            new_frame['colno'] = sfrm['column']
                        new_frame['package'] = sfrm['object_name'] \
                            or new_frame.get('package')
                        new_frame['symbol_addr'] = '0x%x' % \
                            parse_addr(sfrm['symbol_addr'])
                        new_frame['instruction_addr'] = '0x%x' % parse_addr(
                            sfrm['instruction_addr'])

                    new_frame['in_app'] = sym.is_in_app(raw_frame)
                    if new_frame != frame:
                        new_frames[idx] = new_frame
                        store_raw = True
                except Exception:
                    logger.exception('Failed to symbolicate')
                    errors.append({
                        'type': EventError.NATIVE_INTERNAL_FAILURE,
                        'error': 'The symbolicator encountered an internal failure',
                    })

            # Remember the raw stacktrace.
            if store_raw and container is not None:
                container['raw_stacktrace'] = {
                    'frames': stacktrace['frames'],
                }

            # Put the new frames in
            stacktrace['frames'] = new_frames

    if errors:
        data.setdefault('errors', []).extend(errors)

    return data
Example #20
0
def process_payload(data):
    project = Project.objects.get_from_cache(id=data["project"])

    symbolicator = Symbolicator(project=project, event_id=data["event_id"])

    stacktrace_infos = [
        stacktrace for stacktrace in find_stacktraces_in_data(data) if any(
            is_native_platform(x) for x in stacktrace.platforms)
    ]

    modules = native_images_from_data(data)

    stacktraces = [{
        "registers":
        sinfo.stacktrace.get("registers") or {},
        "frames":
        get_frames_for_symbolication(
            sinfo.stacktrace.get("frames") or (), data, modules),
    } for sinfo in stacktrace_infos]

    if not any(stacktrace["frames"] for stacktrace in stacktraces):
        return

    signal = signal_from_data(data)

    response = symbolicator.process_payload(stacktraces=stacktraces,
                                            modules=modules,
                                            signal=signal)

    if not _handle_response_status(data, response):
        return data

    assert len(modules) == len(response["modules"]), (modules, response)

    sdk_info = get_sdk_from_event(data)

    for raw_image, complete_image in zip(modules, response["modules"]):
        _merge_image(raw_image, complete_image, sdk_info, data)

    assert len(stacktraces) == len(response["stacktraces"]), (stacktraces,
                                                              response)

    for sinfo, complete_stacktrace in zip(stacktrace_infos,
                                          response["stacktraces"]):
        complete_frames_by_idx = {}
        for complete_frame in complete_stacktrace.get("frames") or ():
            complete_frames_by_idx.setdefault(complete_frame["original_index"],
                                              []).append(complete_frame)

        new_frames = []
        native_frames_idx = 0

        for raw_frame in reversed(sinfo.stacktrace["frames"]):
            if not _handles_frame(data, raw_frame):
                new_frames.append(raw_frame)
                continue

            for complete_frame in complete_frames_by_idx.get(
                    native_frames_idx) or ():
                merged_frame = dict(raw_frame)
                _merge_frame(merged_frame, complete_frame)
                if merged_frame.get("package"):
                    raw_frame["package"] = merged_frame["package"]
                new_frames.append(merged_frame)

            native_frames_idx += 1

        if sinfo.container is not None and native_frames_idx > 0:
            sinfo.container["raw_stacktrace"] = {
                "frames": list(sinfo.stacktrace["frames"]),
                "registers": sinfo.stacktrace.get("registers"),
            }

        new_frames.reverse()
        sinfo.stacktrace["frames"] = new_frames

    return data
Example #21
0
def _merge_full_response(data, response):
    data["platform"] = "native"
    if response.get("crashed") is not None:
        data["level"] = "fatal" if response["crashed"] else "info"

    if response.get("system_info"):
        _merge_system_info(data, response["system_info"])

    sdk_info = get_sdk_from_event(data)

    images = []
    set_path(data, "debug_meta", "images", value=images)

    for complete_image in response["modules"]:
        image = {}
        _merge_image(image, complete_image, sdk_info, data)
        images.append(image)

    # Extract the crash reason and infos
    data_exception = get_path(data, "exception", "values", 0)
    if response.get("assertion"):
        data_exception["value"] = "Assertion Error: {}".format(
            response["assertion"])
    elif response.get("crash_details"):
        data_exception["value"] = response["crash_details"]
    elif response.get("crash_reason"):
        data_exception["value"] = "Fatal Error: {}".format(
            response["crash_reason"])
    else:
        # We're merging a full response, so there was no initial payload
        # submitted. Assuming that this still contains the placeholder, remove
        # it rather than showing a default value.
        data_exception.pop("value", None)

    if response.get("crash_reason"):
        data_exception["type"] = response["crash_reason"]

    data_threads = []
    if response["stacktraces"]:
        data["threads"] = {"values": data_threads}
    else:
        error = SymbolicationFailed(message="minidump has no thread list",
                                    type=EventError.NATIVE_SYMBOLICATOR_FAILED)
        write_error(error, data)

    for complete_stacktrace in response["stacktraces"]:
        is_requesting = complete_stacktrace.get("is_requesting")
        thread_id = complete_stacktrace.get("thread_id")

        data_thread = {"id": thread_id, "crashed": is_requesting}
        data_threads.append(data_thread)

        if is_requesting:
            data_exception["thread_id"] = thread_id
            data_stacktrace = data_exception.setdefault("stacktrace", {})
            data_stacktrace["frames"] = []
        else:
            data_thread["stacktrace"] = data_stacktrace = {"frames": []}

        if complete_stacktrace.get("registers"):
            data_stacktrace["registers"] = complete_stacktrace["registers"]

        for complete_frame in reversed(complete_stacktrace["frames"]):
            new_frame = {}
            _merge_frame(new_frame, complete_frame)
            data_stacktrace["frames"].append(new_frame)
Example #22
0
def _merge_minidump_response(data, response):
    data['platform'] = 'native'
    if response.get('crashed') is not None:
        data['level'] = 'fatal' if response['crashed'] else 'info'

    validate_and_set_timestamp(data, response.get('timestamp'))

    if response.get('system_info'):
        _merge_system_info(data, response['system_info'])

    sdk_info = get_sdk_from_event(data)

    images = []
    set_path(data, 'debug_meta', 'images', value=images)

    for complete_image in response['modules']:
        image = {}
        _merge_image(image, complete_image, sdk_info,
                     lambda e: write_error(e, data))
        images.append(image)

    # Extract the crash reason and infos
    data_exception = get_path(data, 'exception', 'values', 0)
    exc_value = ('Assertion Error: %s' % response.get('assertion')
                 if response.get('assertion') else 'Fatal Error: %s' %
                 response.get('crash_reason'))
    data_exception['value'] = exc_value
    data_exception['type'] = response.get('crash_reason')

    data_threads = []
    if response['stacktraces']:
        data['threads'] = {'values': data_threads}
    else:
        error = SymbolicationFailed(message='minidump has no thread list',
                                    type=EventError.NATIVE_SYMBOLICATOR_FAILED)
        write_error(error, data)

    for complete_stacktrace in response['stacktraces']:
        is_requesting = complete_stacktrace.get('is_requesting')
        thread_id = complete_stacktrace.get('thread_id')

        data_thread = {
            'id': thread_id,
            'crashed': is_requesting,
        }
        data_threads.append(data_thread)

        if is_requesting:
            data_exception['thread_id'] = thread_id
            data_stacktrace = data_exception.setdefault('stacktrace', {})
            data_stacktrace['frames'] = []
        else:
            data_thread['stacktrace'] = data_stacktrace = {'frames': []}

        if complete_stacktrace.get('registers'):
            data_stacktrace['registers'] = complete_stacktrace['registers']

        for complete_frame in reversed(complete_stacktrace['frames']):
            new_frame = {}
            _merge_frame(new_frame, complete_frame)
            data_stacktrace['frames'].append(new_frame)
Example #23
0
def resolve_frame_symbols(data):
    debug_meta = data.get('debug_meta')
    if not debug_meta:
        return

    debug_images = debug_meta['images']
    sdk_info = get_sdk_from_event(data)

    stacktraces = find_all_stacktraces(data)
    if not stacktraces:
        return

    project = Project.objects.get_from_cache(
        id=data['project'],
    )

    errors = []
    referenced_images = find_stacktrace_referenced_images(
        debug_images, stacktraces)
    sym = Symbolizer(project, debug_images,
                     referenced_images=referenced_images)

    frame = None
    idx = -1

    def report_error(e):
        errors.append({
            'type': EventError.NATIVE_INTERNAL_FAILURE,
            'frame': frame,
            'error': 'frame #%d: %s: %s' % (
                idx,
                e.__class__.__name__,
                six.text_type(e),
            )
        })

    longest_addr = 0
    processed_frames = []
    with sym:
        for stacktrace in stacktraces:
            for idx, frame in enumerate(stacktrace['frames']):
                if 'image_addr' not in frame or \
                   'instruction_addr' not in frame or \
                   'symbol_addr' not in frame:
                    continue
                try:
                    sfrm = sym.symbolize_frame({
                        'object_name': frame.get('package'),
                        'object_addr': frame['image_addr'],
                        'instruction_addr': frame['instruction_addr'],
                        'symbol_addr': frame['symbol_addr'],
                    }, sdk_info, report_error=report_error)
                    if not sfrm:
                        continue
                    # XXX: log here if symbol could not be found?
                    frame['function'] = sfrm.get('symbol_name') or \
                        frame.get('function') or '<unknown>'
                    frame['abs_path'] = sfrm.get('filename') or None
                    if frame['abs_path']:
                        frame['filename'] = posixpath.basename(frame['abs_path'])
                    if sfrm.get('line') is not None:
                        frame['lineno'] = sfrm['line']
                    else:
                        frame['instruction_offset'] = \
                            parse_addr(sfrm['instruction_addr']) - \
                            parse_addr(sfrm['symbol_addr'])
                    if sfrm.get('column') is not None:
                        frame['colno'] = sfrm['column']
                    frame['package'] = sfrm['object_name'] or frame.get('package')
                    frame['symbol_addr'] = '0x%x' % parse_addr(sfrm['symbol_addr'])
                    frame['instruction_addr'] = '0x%x' % parse_addr(
                        sfrm['instruction_addr'])
                    frame['in_app'] = is_in_app(frame)
                    longest_addr = max(longest_addr, len(frame['symbol_addr']),
                                       len(frame['instruction_addr']))
                    processed_frames.append(frame)
                except Exception:
                    logger.exception('Failed to symbolicate')
                    errors.append({
                        'type': EventError.NATIVE_INTERNAL_FAILURE,
                        'error': 'The symbolicator encountered an internal failure',
                    })

    # Pad out addresses to be of the same length
    for frame in processed_frames:
        for key in 'symbol_addr', 'instruction_addr':
            frame[key] = '0x' + frame[key][2:].rjust(longest_addr - 2, '0')

    if errors:
        data.setdefault('errors', []).extend(errors)

    return data
Example #24
0
def process_payload(data):
    project = Project.objects.get_from_cache(id=data['project'])

    symbolicator = Symbolicator(project=project, event_id=data['event_id'])

    stacktrace_infos = [
        stacktrace for stacktrace in find_stacktraces_in_data(data) if any(
            is_native_platform(x) for x in stacktrace.platforms)
    ]

    stacktraces = [{
        'registers':
        sinfo.stacktrace.get('registers') or {},
        'frames': [
            f for f in reversed(sinfo.stacktrace.get('frames') or ())
            if _handles_frame(data, f)
        ]
    } for sinfo in stacktrace_infos]

    if not any(stacktrace['frames'] for stacktrace in stacktraces):
        return

    modules = native_images_from_data(data)
    signal = signal_from_data(data)

    response = symbolicator.process_payload(
        stacktraces=stacktraces,
        modules=modules,
        signal=signal,
    )

    if not _handle_response_status(data, response):
        return data

    assert len(modules) == len(response['modules']), (modules, response)

    sdk_info = get_sdk_from_event(data)

    for raw_image, complete_image in zip(modules, response['modules']):
        _merge_image(raw_image, complete_image, sdk_info,
                     lambda e: write_error(e, data))

    assert len(stacktraces) == len(response['stacktraces']), (stacktraces,
                                                              response)

    for sinfo, complete_stacktrace in zip(stacktrace_infos,
                                          response['stacktraces']):
        complete_frames_by_idx = {}
        for complete_frame in complete_stacktrace.get('frames') or ():
            complete_frames_by_idx \
                .setdefault(complete_frame['original_index'], []) \
                .append(complete_frame)

        new_frames = []
        native_frames_idx = 0

        for raw_frame in reversed(sinfo.stacktrace['frames']):
            if not _handles_frame(data, raw_frame):
                new_frames.append(raw_frame)
                continue

            for complete_frame in complete_frames_by_idx.get(
                    native_frames_idx) or ():
                merged_frame = dict(raw_frame)
                _merge_frame(merged_frame, complete_frame)
                if merged_frame.get('package'):
                    raw_frame['package'] = merged_frame['package']
                new_frames.append(merged_frame)

            native_frames_idx += 1

        if sinfo.container is not None and native_frames_idx > 0:
            sinfo.container['raw_stacktrace'] = {
                'frames': list(sinfo.stacktrace['frames']),
                'registers': sinfo.stacktrace.get('registers')
            }

        new_frames.reverse()
        sinfo.stacktrace['frames'] = new_frames

    return data
Example #25
0
def resolve_frame_symbols(data):
    debug_meta = data['debug_meta']
    debug_images = debug_meta['images']
    sdk_info = get_sdk_from_event(data)

    stacktraces = find_all_stacktraces(data)
    if not stacktraces:
        return

    project = Project.objects.get_from_cache(
        id=data['project'],
    )

    errors = []
    referenced_images = find_stacktrace_referenced_images(
        debug_images, [x[0] for x in stacktraces])
    sym = Symbolizer(project, debug_images,
                     referenced_images=referenced_images)

    frame = None
    idx = -1

    def report_error(e):
        errors.append({
            'type': EventError.NATIVE_INTERNAL_FAILURE,
            'frame': frame,
            'error': 'frame #%d: %s: %s' % (
                idx,
                e.__class__.__name__,
                six.text_type(e),
            )
        })

    with sym:
        for stacktrace, container in stacktraces:
            store_raw = False

            new_frames = list(stacktrace['frames'])
            for idx, frame in enumerate(stacktrace['frames']):
                if 'image_addr' not in frame or \
                   'instruction_addr' not in frame or \
                   'symbol_addr' not in frame:
                    continue
                try:
                    sfrm = sym.symbolize_frame({
                        'object_name': frame.get('package'),
                        'object_addr': frame['image_addr'],
                        'instruction_addr': frame['instruction_addr'],
                        'symbol_addr': frame['symbol_addr'],
                    }, sdk_info, report_error=report_error)
                    if not sfrm:
                        continue
                    new_frame = dict(frame)
                    # XXX: log here if symbol could not be found?
                    symbol = sfrm.get('symbol_name') or \
                        new_frame.get('function') or '<unknown>'
                    function = demangle_symbol(symbol, simplified=True)

                    new_frame['function'] = function

                    # If we demangled something, store the original in the
                    # symbol portion of the frame
                    if function != symbol:
                        new_frame['symbol'] = symbol

                    new_frame['abs_path'] = sfrm.get('filename') or None
                    if new_frame['abs_path']:
                        new_frame['filename'] = posixpath.basename(new_frame['abs_path'])
                    if sfrm.get('line') is not None:
                        new_frame['lineno'] = sfrm['line']
                    else:
                        new_frame['instruction_offset'] = \
                            parse_addr(sfrm['instruction_addr']) - \
                            parse_addr(sfrm['symbol_addr'])
                    if sfrm.get('column') is not None:
                        new_frame['colno'] = sfrm['column']
                    new_frame['package'] = sfrm['object_name'] or new_frame.get('package')
                    new_frame['symbol_addr'] = '0x%x' % parse_addr(sfrm['symbol_addr'])
                    new_frame['instruction_addr'] = '0x%x' % parse_addr(
                        sfrm['instruction_addr'])
                    new_frame['in_app'] = is_in_app(new_frame)

                    if new_frame != frame:
                        new_frames[idx] = new_frame
                        store_raw = True
                except Exception:
                    logger.exception('Failed to symbolicate')
                    errors.append({
                        'type': EventError.NATIVE_INTERNAL_FAILURE,
                        'error': 'The symbolicator encountered an internal failure',
                    })

            # Remember the raw stacktrace.
            if store_raw and container is not None:
                container['raw_stacktrace'] = {
                    'frames': stacktrace['frames'],
                }

            # Put the new frames in
            stacktrace['frames'] = new_frames

    if errors:
        data.setdefault('errors', []).extend(errors)

    return data
Example #26
0
def merge_symbolicator_minidump_response(data, response):
    sdk_info = get_sdk_from_event(data)

    data['platform'] = 'native'
    if response.get('crashed') is not None:
        data['level'] = 'fatal' if response['crashed'] else 'info'

    if response.get('timestamp'):
        data['timestamp'] = float(response['timestamp'])

    if response.get('system_info'):
        merge_symbolicator_minidump_system_info(data, response['system_info'])

    images = []
    set_path(data, 'debug_meta', 'images', value=images)

    for complete_image in response['modules']:
        image = {}
        merge_symbolicator_image(
            image, complete_image, sdk_info,
            lambda e: handle_symbolication_failed(e, data=data))
        images.append(image)

    # Extract the crash reason and infos
    data_exception = get_path(data, 'exception', 'values', 0)
    exc_value = ('Assertion Error: %s' % response.get('assertion')
                 if response.get('assertion') else 'Fatal Error: %s' %
                 response.get('crash_reason'))
    data_exception['value'] = exc_value
    data_exception['type'] = response.get('crash_reason')

    data_threads = []
    if response['stacktraces']:
        data['threads'] = {'values': data_threads}
    else:
        error = SymbolicationFailed(message='minidump has no thread list',
                                    type=EventError.NATIVE_SYMBOLICATOR_FAILED)
        handle_symbolication_failed(error, data=data)

    for complete_stacktrace in response['stacktraces']:
        is_requesting = complete_stacktrace.get('is_requesting')
        thread_id = complete_stacktrace.get('thread_id')

        data_thread = {
            'id': thread_id,
            'crashed': is_requesting,
        }
        data_threads.append(data_thread)

        if is_requesting:
            data_exception['thread_id'] = thread_id
            data_stacktrace = data_exception.setdefault('stacktrace', {})
            # Make exemption specifically for unreal portable callstacks
            # TODO(markus): Allow overriding stacktrace more generically
            # (without looking into unreal context) once we no longer parse
            # minidump in the endpoint (right now we can't distinguish that
            # from user json).
            if data_stacktrace.get(
                    'frames') and is_unreal_exception_stacktrace(data):
                continue
            data_stacktrace['frames'] = []
        else:
            data_thread['stacktrace'] = data_stacktrace = {'frames': []}

        if complete_stacktrace.get('registers'):
            data_stacktrace['registers'] = complete_stacktrace['registers']

        for complete_frame in reversed(complete_stacktrace['frames']):
            new_frame = {}
            merge_symbolicated_frame(new_frame, complete_frame)
            data_stacktrace['frames'].append(new_frame)