Exemple #1
0
def test_context_with_only_app_frames(make_exception_snapshot):
    values = [{
        'type': 'ValueError',
        'value': 'hello world',
        'module': 'foo.bar',
        'stacktrace': {
            'frames': [{
                'filename': 'foo/baz.py',
                'lineno': 1,
                'in_app': True,
            }]
        },
    }, {
        'type': 'ValueError',
        'value': 'hello world',
        'module': 'foo.bar',
        'stacktrace': {
            'frames': [{
                'filename': 'foo/baz.py',
                'lineno': 1,
                'in_app': True,
            }]
        },
    }]
    exc = dict(values=values)
    normalize_stacktraces_for_grouping({'exception': exc})
    make_exception_snapshot(exc)
Exemple #2
0
    def test_normalize_with_system_frames(self):
        data = {
            'stacktrace': {
                'frames': [
                    None,
                    {
                        'abs_path': 'http://example.com/foo.js',
                        'filename': 'foo.js',
                        'lineno': 4,
                        'colno': 0,
                        'in_app': True,
                    },
                    {
                        'abs_path': 'http://example.com/foo.js',
                        'filename': 'foo.js',
                        'lineno': 1,
                        'colno': 0,
                    },
                ]
            }
        }

        normalize_stacktraces_for_grouping(data)
        assert data['stacktrace']['frames'][1]['in_app'] is True
        assert data['stacktrace']['frames'][2]['in_app'] is False
Exemple #3
0
    def tes_macos_package_in_app_detection(self):
        data = {
            "platform": "cocoa",
            "debug_meta": {
                "images": []  # omitted
            },
            "exception": {
                "values": [{
                    "stacktrace": {
                        "frames": [
                            {
                                "function": "-[CRLCrashAsyncSafeThread crash]",
                                "package":
                                "/Users/haza/Library/Developer/Xcode/Archives/2017-06-19/CrashProbe 19-06-2017, 08.53.xcarchive/Products/Applications/CrashProbe.app/Contents/Frameworks/CrashLib.framework/Versions/A/CrashLib",
                                "instruction_addr": 4295098388
                            },
                            {
                                "function": "[KSCrash ]",
                                "package": "/usr/lib/system/libdyld.dylib",
                                "instruction_addr": 4295098388,
                            },
                        ]
                    },
                    "type": "NSRangeException",
                }]
            },
            "contexts": {
                "os": {
                    "version": "10.12.5",
                    "type": "os",
                    "name": "macOS"
                }
            },
        }

        config = load_grouping_config(get_default_grouping_config_dict())
        normalize_stacktraces_for_grouping(data, grouping_config=config)

        frames = data['exception']['values'][0]['stacktrace']['frames']
        assert frames[0]['in_app'] is True
        assert frames[1]['in_app'] is False
Exemple #4
0
def test_event_hash_variant(insta_snapshot, config_name, test_name, log):
    with open(os.path.join(_fixture_path, test_name + '.json')) as f:
        input = json.load(f)

    # Cutomize grouping config from the _grouping config
    grouping_config = get_default_grouping_config_dict(config_name)
    grouping_info = input.pop('_grouping', None) or {}
    enhancement_base = grouping_info.get('enhancement_base')
    enhancements = grouping_info.get('enhancements')
    if enhancement_base or enhancements:
        enhancement_bases = [enhancement_base] if enhancement_base else []
        e = Enhancements.from_config_string(
            enhancements or '', bases=enhancement_bases)
        grouping_config['enhancements'] = e.dumps()

    # Noramlize the event
    mgr = EventManager(data=input, grouping_config=grouping_config)
    mgr.normalize()
    data = mgr.get_data()

    # Normalize the stacktrace for grouping.  This normally happens in
    # save()
    normalize_stacktraces_for_grouping(data, load_grouping_config(grouping_config))
    evt = Event(data=data, platform=data['platform'])

    # Make sure we don't need to touch the DB here because this would
    # break stuff later on.
    evt.project = None

    rv = []
    for (key, value) in sorted(evt.get_grouping_variants().items()):
        if rv:
            rv.append('-' * 74)
        rv.append('%s:' % key)
        dump_variant(value, rv, 1)
    output = '\n'.join(rv)
    log(repr(evt.get_hashes()))

    assert evt.get_grouping_config() == grouping_config

    insta_snapshot(output)
Exemple #5
0
    def test_ios_package_in_app_detection(self):
        data = {
            'platform': 'native',
            'stacktrace': {
                'frames': [
                    {
                        'package':
                        '/var/containers/Bundle/Application/B33C37A8-F933-4B6B-9FFA-152282BFDF13/SentryTest.app/SentryTest',
                        'instruction_addr': '0x1000'
                    },
                    {
                        'package':
                        '/var/containers/Bundle/Application/B33C37A8-F933-4B6B-9FFA-152282BFDF13/SentryTest.app/Frameworks/foo.dylib',
                        'instruction_addr': '0x2000'
                    },
                    {
                        'package':
                        '/var/containers/Bundle/Application/B33C37A8-F933-4B6B-9FFA-152282BFDF13/SentryTest.app/Frameworks/libswiftCore.dylib',
                        'instruction_addr': '0x3000'
                    },
                    {
                        'package': '/usr/lib/whatever.dylib',
                        'instruction_addr': '0x4000'
                    },
                ]
            }
        }

        config = load_grouping_config(get_default_grouping_config_dict())
        normalize_stacktraces_for_grouping(data, grouping_config=config)

        # App object should be in_app
        assert data['stacktrace']['frames'][0]['in_app'] is True
        # Framework should be in app (but optional)
        assert data['stacktrace']['frames'][1]['in_app'] is True
        # libswift should not be system
        assert data['stacktrace']['frames'][2]['in_app'] is False
        # Unknown object should default to not in_app
        assert data['stacktrace']['frames'][3]['in_app'] is False
Exemple #6
0
    def save(self, project_id, raw=False, assume_normalized=False):
        # Normalize if needed
        if not self._normalized:
            if not assume_normalized:
                self.normalize()
            self._normalized = True

        data = self._data

        project = Project.objects.get_from_cache(id=project_id)
        project._organization_cache = Organization.objects.get_from_cache(
            id=project.organization_id)

        # Check to make sure we're not about to do a bunch of work that's
        # already been done if we've processed an event with this ID. (This
        # isn't a perfect solution -- this doesn't handle ``EventMapping`` and
        # there's a race condition between here and when the event is actually
        # saved, but it's an improvement. See GH-7677.)
        try:
            event = Event.objects.get(
                project_id=project.id,
                event_id=data['event_id'],
            )
        except Event.DoesNotExist:
            pass
        else:
            # Make sure we cache on the project before returning
            event._project_cache = project
            logger.info('duplicate.found',
                        exc_info=True,
                        extra={
                            'event_uuid': data['event_id'],
                            'project_id': project.id,
                            'model': Event.__name__,
                        })
            return event

        # Pull out the culprit
        culprit = self.get_culprit()

        # Pull the toplevel data we're interested in
        level = data.get('level')

        # TODO(mitsuhiko): this code path should be gone by July 2018.
        # This is going to be fine because no code actually still depends
        # on integers here.  When we need an integer it will be converted
        # into one later.  Old workers used to send integers here.
        if level is not None and isinstance(level, six.integer_types):
            level = LOG_LEVELS[level]

        transaction_name = data.get('transaction')
        logger_name = data.get('logger')
        release = data.get('release')
        dist = data.get('dist')
        environment = data.get('environment')
        recorded_timestamp = data.get('timestamp')

        # We need to swap out the data with the one internal to the newly
        # created event object
        event = self._get_event_instance(project_id=project_id)
        self._data = data = event.data.data

        event._project_cache = project

        date = event.datetime
        platform = event.platform
        event_id = event.event_id

        if transaction_name:
            transaction_name = force_text(transaction_name)

        # Some of the data that are toplevel attributes are duplicated
        # into tags (logger, level, environment, transaction).  These are
        # different from legacy attributes which are normalized into tags
        # ahead of time (site, server_name).
        setdefault_path(data, 'tags', value=[])
        set_tag(data, 'level', level)
        if logger_name:
            set_tag(data, 'logger', logger_name)
        if environment:
            set_tag(data, 'environment', environment)
        if transaction_name:
            set_tag(data, 'transaction', transaction_name)

        if release:
            # dont allow a conflicting 'release' tag
            pop_tag(data, 'release')
            release = Release.get_or_create(
                project=project,
                version=release,
                date_added=date,
            )
            set_tag(data, 'sentry:release', release.version)

        if dist and release:
            dist = release.add_dist(dist, date)
            # dont allow a conflicting 'dist' tag
            pop_tag(data, 'dist')
            set_tag(data, 'sentry:dist', dist.name)
        else:
            dist = None

        event_user = self._get_event_user(project, data)
        if event_user:
            # dont allow a conflicting 'user' tag
            pop_tag(data, 'user')
            set_tag(data, 'sentry:user', event_user.tag_value)

        # At this point we want to normalize the in_app values in case the
        # clients did not set this appropriately so far.
        grouping_config = load_grouping_config(
            get_grouping_config_dict_for_event_data(data, project))
        normalize_stacktraces_for_grouping(data, grouping_config)

        for plugin in plugins.for_project(project, version=None):
            added_tags = safe_execute(plugin.get_tags,
                                      event,
                                      _with_transaction=False)
            if added_tags:
                # plugins should not override user provided tags
                for key, value in added_tags:
                    if get_tag(data, key) is None:
                        set_tag(data, key, value)

        for path, iface in six.iteritems(event.interfaces):
            for k, v in iface.iter_tags():
                set_tag(data, k, v)
            # Get rid of ephemeral interface data
            if iface.ephemeral:
                data.pop(iface.path, None)

        # The active grouping config was put into the event in the
        # normalize step before.  We now also make sure that the
        # fingerprint was set to `'{{ default }}' just in case someone
        # removed it from the payload.  The call to get_hashes will then
        # look at `grouping_config` to pick the right paramters.
        data['fingerprint'] = data.get('fingerprint') or ['{{ default }}']
        apply_server_fingerprinting(
            data, get_fingerprinting_config_for_project(project))
        hashes = event.get_hashes()
        data['hashes'] = hashes

        # we want to freeze not just the metadata and type in but also the
        # derived attributes.  The reason for this is that we push this
        # data into kafka for snuba processing and our postprocessing
        # picks up the data right from the snuba topic.  For most usage
        # however the data is dynamically overriden by Event.title and
        # Event.location (See Event.as_dict)
        materialized_metadata = self.materialize_metadata()
        event_metadata = materialized_metadata['metadata']
        data.update(materialized_metadata)
        data['culprit'] = culprit

        # index components into ``Event.message``
        # See GH-3248
        event.message = self.get_search_message(event_metadata, culprit)
        received_timestamp = event.data.get('received') or float(
            event.datetime.strftime('%s'))

        # The group gets the same metadata as the event when it's flushed but
        # additionally the `last_received` key is set.  This key is used by
        # _save_aggregate.
        group_metadata = dict(materialized_metadata)
        group_metadata['last_received'] = received_timestamp
        kwargs = {
            'platform': platform,
            'message': event.message,
            'culprit': culprit,
            'logger': logger_name,
            'level': LOG_LEVELS_MAP.get(level),
            'last_seen': date,
            'first_seen': date,
            'active_at': date,
            'data': group_metadata,
        }

        if release:
            kwargs['first_release'] = release

        try:
            group, is_new, is_regression, is_sample = self._save_aggregate(
                event=event, hashes=hashes, release=release, **kwargs)
        except HashDiscarded:
            event_discarded.send_robust(
                project=project,
                sender=EventManager,
            )

            metrics.incr(
                'events.discarded',
                skip_internal=True,
                tags={
                    'organization_id': project.organization_id,
                    'platform': platform,
                },
            )
            raise
        else:
            event_saved.send_robust(
                project=project,
                event_size=event.size,
                sender=EventManager,
            )

        event.group = group
        # store a reference to the group id to guarantee validation of isolation
        event.data.bind_ref(event)

        # When an event was sampled, the canonical source of truth
        # is the EventMapping table since we aren't going to be writing out an actual
        # Event row. Otherwise, if the Event isn't being sampled, we can safely
        # rely on the Event table itself as the source of truth and ignore
        # EventMapping since it's redundant information.
        if is_sample:
            try:
                with transaction.atomic(
                        using=router.db_for_write(EventMapping)):
                    EventMapping.objects.create(project=project,
                                                group=group,
                                                event_id=event_id)
            except IntegrityError:
                logger.info('duplicate.found',
                            exc_info=True,
                            extra={
                                'event_uuid': event_id,
                                'project_id': project.id,
                                'group_id': group.id,
                                'model': EventMapping.__name__,
                            })
                return event

        environment = Environment.get_or_create(
            project=project,
            name=environment,
        )

        group_environment, is_new_group_environment = GroupEnvironment.get_or_create(
            group_id=group.id,
            environment_id=environment.id,
            defaults={
                'first_release': release if release else None,
            },
        )

        if release:
            ReleaseEnvironment.get_or_create(
                project=project,
                release=release,
                environment=environment,
                datetime=date,
            )

            ReleaseProjectEnvironment.get_or_create(
                project=project,
                release=release,
                environment=environment,
                datetime=date,
            )

            grouprelease = GroupRelease.get_or_create(
                group=group,
                release=release,
                environment=environment,
                datetime=date,
            )

        counters = [
            (tsdb.models.group, group.id),
            (tsdb.models.project, project.id),
        ]

        if release:
            counters.append((tsdb.models.release, release.id))

        tsdb.incr_multi(counters,
                        timestamp=event.datetime,
                        environment_id=environment.id)

        frequencies = [
            # (tsdb.models.frequent_projects_by_organization, {
            #     project.organization_id: {
            #         project.id: 1,
            #     },
            # }),
            # (tsdb.models.frequent_issues_by_project, {
            #     project.id: {
            #         group.id: 1,
            #     },
            # })
            (tsdb.models.frequent_environments_by_group, {
                group.id: {
                    environment.id: 1,
                },
            })
        ]

        if release:
            frequencies.append((tsdb.models.frequent_releases_by_group, {
                group.id: {
                    grouprelease.id: 1,
                },
            }))

        tsdb.record_frequency_multi(frequencies, timestamp=event.datetime)

        UserReport.objects.filter(
            project=project,
            event_id=event_id,
        ).update(
            group=group,
            environment=environment,
        )

        # save the event unless its been sampled
        if not is_sample:
            try:
                with transaction.atomic(using=router.db_for_write(Event)):
                    event.save()
            except IntegrityError:
                logger.info('duplicate.found',
                            exc_info=True,
                            extra={
                                'event_uuid': event_id,
                                'project_id': project.id,
                                'group_id': group.id,
                                'model': Event.__name__,
                            })
                return event

            tagstore.delay_index_event_tags(
                organization_id=project.organization_id,
                project_id=project.id,
                group_id=group.id,
                environment_id=environment.id,
                event_id=event.id,
                tags=event.tags,
                date_added=event.datetime,
            )

        if event_user:
            tsdb.record_multi(
                (
                    (tsdb.models.users_affected_by_group, group.id,
                     (event_user.tag_value, )),
                    (tsdb.models.users_affected_by_project, project.id,
                     (event_user.tag_value, )),
                ),
                timestamp=event.datetime,
                environment_id=environment.id,
            )
        if release:
            if is_new:
                buffer.incr(ReleaseProject, {'new_groups': 1}, {
                    'release_id': release.id,
                    'project_id': project.id,
                })
            if is_new_group_environment:
                buffer.incr(ReleaseProjectEnvironment, {'new_issues_count': 1},
                            {
                                'project_id': project.id,
                                'release_id': release.id,
                                'environment_id': environment.id,
                            })

        safe_execute(Group.objects.add_tags,
                     group,
                     environment,
                     event.get_tags(),
                     _with_transaction=False)

        if not raw:
            if not project.first_event:
                project.update(first_event=date)
                first_event_received.send_robust(project=project,
                                                 group=group,
                                                 sender=Project)

        eventstream.insert(
            group=group,
            event=event,
            is_new=is_new,
            is_sample=is_sample,
            is_regression=is_regression,
            is_new_group_environment=is_new_group_environment,
            primary_hash=hashes[0],
            # We are choosing to skip consuming the event back
            # in the eventstream if it's flagged as raw.
            # This means that we want to publish the event
            # through the event stream, but we don't care
            # about post processing and handling the commit.
            skip_consume=raw,
        )

        metrics.timing(
            'events.latency',
            received_timestamp - recorded_timestamp,
            tags={
                'project_id': project.id,
            },
        )

        metrics.timing('events.size.data.post_save',
                       event.size,
                       tags={'project_id': project.id})

        return event
Exemple #7
0
    def test_ios_function_name_in_app_detection(self):
        data = {
            "platform": "cocoa",
            "debug_meta": {
                "images": []  # omitted
            },
            "exception": {
                "values": [{
                    "stacktrace": {
                        "frames": [
                            {
                                "function": "+[RNSentry ]",
                                "package":
                                "/var/containers/Bundle/Application/B33C37A8-F933-4B6B-9FFA-152282BFDF13/SentryTest.app/SentryTest",
                                "instruction_addr": 4295098388,
                            },
                            {
                                "function": "+[SentryClient ]",
                                "package":
                                "/var/containers/Bundle/Application/B33C37A8-F933-4B6B-9FFA-152282BFDF13/SentryTest.app/SentryTest",
                                "instruction_addr": 4295098388,
                            },
                            {
                                "function": "kscrash_foobar",
                                "package":
                                "/var/containers/Bundle/Application/B33C37A8-F933-4B6B-9FFA-152282BFDF13/SentryTest.app/SentryTest",
                                "instruction_addr": 4295098388,
                            },
                            {
                                "function": "kscm_foobar",
                                "package":
                                "/var/containers/Bundle/Application/B33C37A8-F933-4B6B-9FFA-152282BFDF13/SentryTest.app/SentryTest",
                                "instruction_addr": 4295098388,
                            },
                            {
                                "function": "+[KSCrash ]",
                                "package":
                                "/var/containers/Bundle/Application/B33C37A8-F933-4B6B-9FFA-152282BFDF13/SentryTest.app/SentryTest",
                                "instruction_addr": 4295098388,
                            },
                            {
                                "function": "+[KSCrash]",
                                "package":
                                "/var/containers/Bundle/Application/B33C37A8-F933-4B6B-9FFA-152282BFDF13/SentryTest.app/SentryTest",
                                "instruction_addr": 4295098388,
                            },
                            {
                                "function": "+[KSCrashy]",
                                "package":
                                "/var/containers/Bundle/Application/B33C37A8-F933-4B6B-9FFA-152282BFDF13/SentryTest.app/SentryTest",
                                "instruction_addr": 4295098388,
                            },
                        ]
                    },
                    "type": "NSRangeException",
                }]
            },
            "contexts": {
                "os": {
                    "version": "9.3.2",
                    "type": "os",
                    "name": "iOS"
                }
            }
        }

        config = load_grouping_config(get_default_grouping_config_dict())
        normalize_stacktraces_for_grouping(data, grouping_config=config)

        frames = data['exception']['values'][0]['stacktrace']['frames']
        assert frames[0]['in_app'] is False
        assert frames[1]['in_app'] is False
        assert frames[2]['in_app'] is False
        assert frames[3]['in_app'] is False
        assert frames[4]['in_app'] is False
        assert frames[5]['in_app'] is True
        assert frames[6]['in_app'] is True