示例#1
0
    def test_disable_plugin_when_fully_migrated(self):
        self._stub_github()

        project = Project.objects.create(
            organization_id=self.organization.id,
        )

        plugin = plugins.get('github')
        plugin.enable(project)

        # Accessible to new Integration - mocked in _stub_github
        Repository.objects.create(
            organization_id=self.organization.id,
            name='Test-Organization/foo',
            url='https://github.com/Test-Organization/foo',
            provider='github',
            external_id='123',
            config={
                'name': 'Test-Organization/foo',
            },
        )

        # Enabled before
        assert 'github' in [p.slug for p in plugins.for_project(project)]

        with self.tasks():
            self.assert_setup_flow()

        # Disabled after Integration installed
        assert 'github' not in [p.slug for p in plugins.for_project(project)]
示例#2
0
文件: group.py 项目: jonashaag/sentry
    def get_attrs(self, item_list, user):
        from sentry.plugins import plugins

        GroupMeta.objects.populate_cache(item_list)

        attach_foreignkey(item_list, Group.project, ['team'])

        if user.is_authenticated() and item_list:
            bookmarks = set(GroupBookmark.objects.filter(
                user=user,
                group__in=item_list,
            ).values_list('group_id', flat=True))
            seen_groups = dict(GroupSeen.objects.filter(
                user=user,
                group__in=item_list,
            ).values_list('group_id', 'last_seen'))
        else:
            bookmarks = set()
            seen_groups = {}

        assignees = dict(
            (a.group_id, a.user)
            for a in GroupAssignee.objects.filter(
                group__in=item_list,
            ).select_related('user')
        )

        user_counts = dict(
            GroupTagKey.objects.filter(
                group__in=item_list,
                key='sentry:user',
            ).values_list('group', 'values_seen')
        )

        snoozes = dict(
            GroupSnooze.objects.filter(
                group__in=item_list,
            ).values_list('group', 'until')
        )

        result = {}
        for item in item_list:
            active_date = item.active_at or item.last_seen

            annotations = []
            for plugin in plugins.for_project(project=item.project, version=1):
                safe_execute(plugin.tags, None, item, annotations)
            for plugin in plugins.for_project(project=item.project, version=2):
                annotations.extend(safe_execute(plugin.get_annotations, group=item) or ())

            result[item] = {
                'assigned_to': serialize(assignees.get(item.id)),
                'is_bookmarked': item.id in bookmarks,
                'has_seen': seen_groups.get(item.id, active_date) > active_date,
                'annotations': annotations,
                'user_count': user_counts.get(item.id, 0),
                'snooze': snoozes.get(item.id),
            }
        return result
示例#3
0
    def test_disable_for_all_projects(self):
        plugin = plugins.get('example')
        plugin.enable(self.project)

        assert plugin in plugins.for_project(self.project)

        self.migrator.disable_for_all_projects(plugin)

        assert plugin not in plugins.for_project(self.project)
示例#4
0
文件: group.py 项目: Qwiz/sentry
    def get_attrs(self, item_list, user):
        from sentry.plugins import plugins

        GroupMeta.objects.populate_cache(item_list)

        attach_foreignkey(item_list, Group.project, ["team"])

        if user.is_authenticated() and item_list:
            bookmarks = set(
                GroupBookmark.objects.filter(user=user, group__in=item_list).values_list("group_id", flat=True)
            )
            seen_groups = dict(
                GroupSeen.objects.filter(user=user, group__in=item_list).values_list("group_id", "last_seen")
            )
        else:
            bookmarks = set()
            seen_groups = {}

        tag_counts = defaultdict(dict)
        tag_results = GroupTagKey.objects.filter(group__in=item_list).values_list("key", "group", "values_seen")
        for key, group_id, values_seen in tag_results:
            tag_counts[key][group_id] = values_seen

        assignees = dict(
            (a.group_id, a.user) for a in GroupAssignee.objects.filter(group__in=item_list).select_related("user")
        )

        result = {}
        for item in item_list:
            active_date = item.active_at or item.last_seen

            tags = {}
            for key in tag_counts.iterkeys():
                label = TAG_LABELS.get(key, key.replace("_", " ")).lower()
                try:
                    value = tag_counts[key].get(item.id, 0)
                except KeyError:
                    value = 0
                tags[key] = {"label": label, "count": value}

            annotations = []
            for plugin in plugins.for_project(project=item.project, version=1):
                safe_execute(plugin.tags, None, item, annotations)
            for plugin in plugins.for_project(project=item.project, version=2):
                annotations.extend(safe_execute(plugin.get_annotations, item) or ())

            result[item] = {
                "assigned_to": serialize(assignees.get(item.id)),
                "is_bookmarked": item.id in bookmarks,
                "has_seen": seen_groups.get(item.id, active_date) > active_date,
                "tags": tags,
                "annotations": annotations,
            }
        return result
    def get_plugins(self):
        from sentry.plugins.bases.notify import NotificationPlugin

        results = []
        for plugin in plugins.for_project(self.project, version=1):
            if not isinstance(plugin, NotificationPlugin):
                continue
            results.append(plugin)

        for plugin in plugins.for_project(self.project, version=2):
            for notifier in (safe_execute(plugin.get_notifiers, _with_transaction=False) or ()):
                results.append(notifier)

        return results
示例#6
0
def get_activity_notifiers(project):
    from sentry.plugins.bases.notify import NotificationPlugin
    from sentry.plugins import plugins

    results = []
    for plugin in plugins.for_project(project, version=1):
        if isinstance(plugin, NotificationPlugin):
            results.append(plugin)

    for plugin in plugins.for_project(project, version=2):
        for notifier in (safe_execute(plugin.get_notifiers, _with_transaction=False) or ()):
            results.append(notifier)

    return results
示例#7
0
def plugin_is_regression(group, event):
    project = event.project
    for plugin in plugins.for_project(project):
        result = safe_execute(plugin.is_regression, group, event, version=1, _with_transaction=False)
        if result is not None:
            return result
    return True
示例#8
0
    def test_doesnt_disable_plugin_when_partially_migrated(self):
        self.setupPluginTest()

        # Repo accessible by new Integration
        Repository.objects.create(
            organization_id=self.organization.id,
            name=self.project_a['name'],
            url=u'https://{}.visualstudio.com/_git/{}'.format(
                self.vsts_account_name,
                self.repo_name,
            ),
            provider='visualstudio',
            external_id=self.repo_id,
        )

        # Inaccessible Repo - causes plugin to stay enabled
        Repository.objects.create(
            organization_id=self.organization.id,
            name='NotReachable',
            url='https://randoaccount.visualstudio.com/Product/_git/NotReachable',
            provider='visualstudio',
            external_id='123456789',
        )

        self.assert_installation()

        # Still enabled
        assert 'vsts' in [p.slug for p in plugins.for_project(self.project)]
示例#9
0
def post_process_group(event, is_new, is_regression, is_sample, **kwargs):
    """
    Fires post processing hooks for a group.
    """
    from sentry.models import Project
    from sentry.rules.processor import RuleProcessor

    project_id = event.group.project_id
    Raven.tags_context({
        'project': project_id,
    })

    project = Project.objects.get_from_cache(id=project_id)

    _capture_stats(event, is_new)

    rp = RuleProcessor(event, is_new, is_regression, is_sample)
    # TODO(dcramer): ideally this would fanout, but serializing giant
    # objects back and forth isn't super efficient
    for callback, futures in rp.apply():
        safe_execute(callback, event, futures)

    for plugin in plugins.for_project(project):
        plugin_post_process_group(
            plugin_slug=plugin.slug,
            event=event,
            is_new=is_new,
            is_regresion=is_regression,
            is_sample=is_sample,
        )
示例#10
0
def post_process_group(event, is_new, is_regression, is_sample, **kwargs):
    """
    Fires post processing hooks for a group.
    """
    from sentry.models import Project
    from sentry.rules.processor import RuleProcessor

    project = Project.objects.get_from_cache(id=event.group.project_id)

    _capture_stats(event, is_new)

    if settings.SENTRY_ENABLE_EXPLORE_CODE:
        record_affected_code.delay(event=event)

    record_affected_user.delay(event=event)

    record_additional_tags(event=event)

    rp = RuleProcessor(event, is_new, is_regression, is_sample)
    # TODO(dcramer): ideally this would fanout, but serializing giant
    # objects back and forth isn't super efficient
    for callback, futures in rp.apply():
        safe_execute(callback, event, futures)

    for plugin in plugins.for_project(project):
        plugin_post_process_group(
            plugin_slug=plugin.slug,
            event=event,
            is_new=is_new,
            is_regresion=is_regression,
            is_sample=is_sample,
        )
示例#11
0
def plugin_is_regression(group, event):
    project = event.project
    for plugin in plugins.for_project(project):
        result = safe_execute(plugin.is_regression, group, event)
        if result is not None:
            return result
    return True
示例#12
0
def post_process_group(event, is_new, is_regression, is_sample, **kwargs):
    """
    Fires post processing hooks for a group.
    """
    from sentry.models import Project
    from sentry.rules.processor import RuleProcessor

    project = Project.objects.get_from_cache(id=event.group.project_id)

    if settings.SENTRY_ENABLE_EXPLORE_CODE:
        record_affected_code.delay(event=event)

    if settings.SENTRY_ENABLE_EXPLORE_USERS:
        record_affected_user.delay(event=event)

    for plugin in plugins.for_project(project):
        plugin_post_process_group.apply_async(
            kwargs={
                'plugin_slug': plugin.slug,
                'event': event,
                'is_new': is_new,
                'is_regresion': is_regression,
                'is_sample': is_sample,
            },
            expires=300,
        )

    rp = RuleProcessor(event, is_new, is_regression, is_sample)
    # TODO(dcramer): ideally this would fanout, but serializing giant
    # objects back and forth isn't super efficient
    for callback, futures in rp.apply():
        safe_execute(callback, event, futures)
示例#13
0
    def test_disable_plugin_when_fully_migrated(self):
        project = Project.objects.create(
            organization_id=self.organization.id,
        )

        plugin = plugins.get('bitbucket')
        plugin.enable(project)

        # Accessible to new Integration
        Repository.objects.create(
            organization_id=self.organization.id,
            name='sentryuser/repo',
            url='https://bitbucket.org/sentryuser/repo',
            provider='bitbucket',
            external_id='123456',
            config={'name': 'sentryuser/repo'},
        )

        self.client.post(
            self.path,
            data=self.data_from_bitbucket,
        )

        integration = Integration.objects.get(
            provider=self.provider,
            external_id=self.client_key,
        )

        responses.add(
            responses.GET,
            u'https://api.bitbucket.org/2.0/repositories/sentryuser/repo/hooks',
            json={
                'values': [{
                    'description': 'sentry-bitbucket-repo-hook',
                }],
            },
        )

        assert 'bitbucket' in [p.slug for p in plugins.for_project(project)]

        with self.tasks():
            BitbucketIntegrationProvider().post_install(
                integration,
                self.organization,
            )

            assert 'bitbucket' not in [p.slug for p in plugins.for_project(project)]
示例#14
0
def get_actions(group, request):
    project = group.project

    action_list = []
    for plugin in plugins.for_project(project, version=1):
        results = safe_execute(plugin.actions, request, group, action_list)

        if not results:
            continue

        action_list = results

    for plugin in plugins.for_project(project, version=2):
        for action in (safe_execute(plugin.get_actions, request, group) or ()):
            action_list.append(action)

    return [(a[0], a[1], request.path == a[1]) for a in action_list]
示例#15
0
def get_widgets(group, request):
    project = group.project

    for plugin in plugins.for_project(project):
        resp = safe_execute(plugin.widget, request, group)

        if resp:
            yield resp.render(request)
示例#16
0
 def _get_context_plugins(self, request, group):
     project = group.project
     return serialize([
         plugin
         for plugin in plugins.for_project(project, version=None)
         if plugin.has_project_conf() and hasattr(plugin, 'get_custom_contexts')
         and plugin.get_custom_contexts()
     ], request.user, PluginSerializer(project))
示例#17
0
    def _get_actions(self, request, group):
        project = group.project

        action_list = []
        for plugin in plugins.for_project(project, version=1):
            results = safe_execute(plugin.actions, request, group, action_list)

            if not results:
                continue

            action_list = results

        for plugin in plugins.for_project(project, version=2):
            for action in (safe_execute(plugin.get_actions, request, group) or ()):
                action_list.append(action)

        return action_list
示例#18
0
def record_additional_tags(event):
    from sentry.models import Group

    added_tags = []
    for plugin in plugins.for_project(event.project, version=2):
        added_tags.extend(safe_execute(plugin.get_tags, event) or ())
    if added_tags:
        Group.objects.add_tags(event.group, added_tags)
示例#19
0
    def _get_available_issue_plugins(self, request, group):
        project = group.project

        plugin_issues = []
        for plugin in plugins.for_project(project, version=1):
            if isinstance(plugin, IssueTrackingPlugin2):
                plugin_issues = safe_execute(plugin.plugin_issues, request, group, plugin_issues,
                                             _with_transaction=False)
        return plugin_issues
示例#20
0
    def get_plugins(self):
        from sentry.plugins.bases.notify import NotificationPlugin

        results = []
        for plugin in plugins.for_project(self.project):
            if not isinstance(plugin, NotificationPlugin):
                continue
            results.append(plugin)
        return results
示例#21
0
def post_process_group(group, event, **kwargs):
    """
    Fires post processing hooks for a group.
    """
    for plugin in plugins.for_project(group.project):
        plugin_post_process_group.delay(
            plugin.slug, group=group, event=event, **kwargs)

    record_affected_code.delay(group=group, event=event)
    record_affected_user.delay(group=group, event=event)
示例#22
0
def get_annotations(group, request=None):
    project = group.project

    annotation_list = []
    for plugin in plugins.for_project(project, version=2):
        for value in (safe_execute(plugin.get_annotations, group=group) or ()):
            annotation = safe_execute(Annotation, **value)
            if annotation:
                annotation_list.append(annotation)

    return annotation_list
示例#23
0
文件: migrate.py 项目: yodi-n/sentry
    def call(self):
        for project in self.projects:
            for plugin in plugins.for_project(project):
                if plugin.slug != self.integration.provider:
                    continue

                if self.all_repos_migrated(plugin.slug):
                    # Since repos are Org-level, if they're all migrated, we
                    # can disable the Plugin for all Projects. There'd be no
                    # Repos left, associated with the Plugin.
                    self.disable_for_all_projects(plugin)
示例#24
0
    def call(self):
        for project in self.projects:
            for plugin in plugins.for_project(project):
                if plugin.slug != self.integration.provider:
                    continue

                if self.all_repos_migrated(plugin.slug):
                    # Since repos are Org-level, if they're all migrated, we
                    # can disable the Plugin for all Projects. There'd be no
                    # Repos left, associated with the Plugin.
                    self.disable_for_all_projects(plugin)
示例#25
0
def plugin_is_regression(group, event):
    project = event.project
    for plugin in plugins.for_project(project):
        result = safe_execute(plugin.is_regression,
                              group,
                              event,
                              version=1,
                              _with_transaction=False)
        if result is not None:
            return result
    return True
示例#26
0
def get_annotations(group, request=None):
    project = group.project

    annotation_list = []
    for plugin in plugins.for_project(project, version=2):
        for value in (safe_execute(plugin.get_annotations, group=group) or ()):
            annotation = safe_execute(Annotation, **value)
            if annotation:
                annotation_list.append(annotation)

    return annotation_list
示例#27
0
 def _get_context_plugins(self, request, group):
     project = group.project
     return serialize(
         [
             plugin for plugin in plugins.for_project(project, version=None)
             if plugin.has_project_conf()
             and hasattr(plugin, "get_custom_contexts")
             and plugin.get_custom_contexts()
         ],
         request.user,
         PluginSerializer(project),
     )
示例#28
0
    def test_disabled_plugin_when_fully_migrated(self):
        self.setupPluginTest()

        Repository.objects.create(
            organization_id=self.organization.id,
            name=self.project_a['name'],
            url='https://{}.visualstudio.com/DefaultCollection/_git/{}'.format(
                self.vsts_account_name,
                self.repo_name,
            ),
            provider='visualstudio',
            external_id=self.repo_id,
        )

        # Enabled before Integration installation
        assert 'vsts' in [p.slug for p in plugins.for_project(self.project)]

        self.assert_installation()

        # Disabled
        assert 'vsts' not in [p.slug for p in plugins.for_project(self.project)]
示例#29
0
def post_process_group(group, event, **kwargs):
    """
    Fires post processing hooks for a group.
    """
    for plugin in plugins.for_project(group.project):
        plugin_post_process_group.delay(plugin.slug,
                                        group=group,
                                        event=event,
                                        **kwargs)

    record_affected_code.delay(group=group, event=event)
    record_affected_user.delay(group=group, event=event)
示例#30
0
def post_process_group(group, event, is_new, is_regression, is_sample,
                       **kwargs):
    """
    Fires post processing hooks for a group.
    """
    from sentry.models import Project

    project = Project.objects.get_from_cache(id=group.project_id)

    child_kwargs = {
        'event': event,
        'is_new': is_new,
        'is_regression': is_regression,
        'is_sample': is_sample,
    }

    if settings.SENTRY_ENABLE_EXPLORE_CODE:
        record_affected_code.delay(group=group, event=event)

    if settings.SENTRY_ENABLE_EXPLORE_USERS:
        record_affected_user.delay(group=group, event=event)

    for plugin in plugins.for_project(project):
        plugin_post_process_group.delay(plugin.slug,
                                        group=group,
                                        **child_kwargs)

    for rule in get_rules(project):
        match = rule.data.get('action_match', 'all')
        condition_list = rule.data.get('conditions', ())
        if not condition_list:
            pass
        elif match == 'all':
            if not all(
                    condition_matches(project, c, **child_kwargs)
                    for c in condition_list):
                continue
        elif match == 'any':
            if not any(
                    condition_matches(project, c, **child_kwargs)
                    for c in condition_list):
                continue
        elif match == 'none':
            if any(
                    condition_matches(project, c, **child_kwargs)
                    for c in condition_list):
                continue
        else:
            rules_logger.error('Unsupported action_match %r for rule %d',
                               match, rule.id)
            continue

        execute_rule.delay(rule_id=rule.id, **child_kwargs)
示例#31
0
    def _get_actions(self, request, group):
        project = group.project

        action_list = []
        for plugin in plugins.for_project(project, version=1):
            results = safe_execute(
                plugin.actions, request, group, action_list, _with_transaction=False
            )

            if not results:
                continue

            action_list = results

        for plugin in plugins.for_project(project, version=2):
            for action in (
                safe_execute(plugin.get_actions, request, group, _with_transaction=False) or ()
            ):
                action_list.append(action)

        return action_list
示例#32
0
    def _get_available_issue_plugins(self, request, group):
        project = group.project

        plugin_issues = []
        for plugin in plugins.for_project(project, version=1):
            if isinstance(plugin, IssueTrackingPlugin2):
                plugin_issues = safe_execute(plugin.plugin_issues,
                                             request,
                                             group,
                                             plugin_issues,
                                             _with_transaction=False)
        return plugin_issues
示例#33
0
def post_process_group(group, event, **kwargs):
    """
    Fires post processing hooks for a group.
    """
    for plugin in plugins.for_project(group.project):
        plugin_post_process_group.delay(
            plugin.slug, group=group, event=event, **kwargs)

    if settings.SENTRY_ENABLE_EXPLORE_CODE:
        record_affected_code.delay(group=group, event=event)

    if settings.SENTRY_ENABLE_EXPLORE_USERS:
        record_affected_user.delay(group=group, event=event)
示例#34
0
def get_actions(group, request):
    project = group.project

    action_list = []
    for plugin in plugins.for_project(project):
        results = safe_execute(plugin.actions, request, group, action_list)

        if not results:
            continue

        action_list = results

    return [(a[0], a[1], request.path == a[1]) for a in action_list]
示例#35
0
    def test_disabled_plugin_when_fully_migrated(self):
        self.setup_plugin_test()

        Repository.objects.create(
            organization_id=self.organization.id,
            name=self.project_a["name"],
            url=u"https://{}.visualstudio.com/_git/{}".format(
                self.vsts_account_name, self.repo_name
            ),
            provider="visualstudio",
            external_id=self.repo_id,
            config={"name": self.project_a["name"], "project": self.project_a["name"]},
        )

        # Enabled before Integration installation
        assert "vsts" in [p.slug for p in plugins.for_project(self.project)]

        with self.tasks():
            self.assert_installation()

        # Disabled
        assert "vsts" not in [p.slug for p in plugins.for_project(self.project)]
示例#36
0
def get_legacy_annotations(group, request=None):
    project = group.project

    annotation_list = []
    for plugin in plugins.for_project(project, version=1):
        results = safe_execute(plugin.tags, request, group, annotation_list)

        if not results:
            continue

        annotation_list = results

    return annotation_list
示例#37
0
    def test_disables_plugin_when_fully_migrated(self):
        project = Project.objects.create(
            organization_id=self.organization.id,
        )

        plugin = plugins.get('github')
        plugin.enable(project)

        # Accessible to new Integration
        Repository.objects.create(
            organization_id=self.organization.id,
            name='Test-Organization/foo',
            url='https://github.com/Test-Organization/foo',
            provider='github',
            external_id=123,
        )

        assert 'github' in [p.slug for p in plugins.for_project(project)]

        self.assert_setup_flow()

        assert 'github' not in [p.slug for p in plugins.for_project(project)]
示例#38
0
def get_panels(group, request):
    project = group.project

    panel_list = []
    for plugin in plugins.for_project(project):
        results = safe_execute(plugin.panels, request, group, panel_list)

        if not results:
            continue

        panel_list = results

    return [(p[0], p[1], request.path == p[1]) for p in panel_list]
示例#39
0
def get_legacy_annotations(group, request=None):
    project = group.project

    annotation_list = []
    for plugin in plugins.for_project(project, version=1):
        results = safe_execute(plugin.tags, request, group, annotation_list)

        if not results:
            continue

        annotation_list = results

    return annotation_list
示例#40
0
    def after(self, event, **kwargs):
        from sentry.plugins.bases.notify import NotificationPlugin

        group = event.group

        for plugin in plugins.for_project(event.project):
            if not isinstance(plugin, NotificationPlugin):
                continue

            if not safe_execute(plugin.should_notify, group, event):
                continue

            safe_execute(plugin.notify_users, group=group, event=event)
示例#41
0
def get_panels(group, request):
    project = group.project

    panel_list = []
    for plugin in plugins.for_project(project):
        results = safe_execute(plugin.panels, request, group, panel_list)

        if not results:
            continue

        panel_list = results

    return [(p[0], p[1], request.path == p[1]) for p in panel_list]
示例#42
0
def get_actions(group, request):
    project = group.project

    action_list = []
    for plugin in plugins.for_project(project):
        results = safe_execute(plugin.actions, request, group, action_list)

        if not results:
            continue

        action_list = results

    return [(a[0], a[1], request.path == a[1]) for a in action_list]
示例#43
0
def post_process_group(event, is_new, is_regression, is_sample, **kwargs):
    """
    Fires post processing hooks for a group.
    """
    # NOTE: we must pass through the full Event object, and not an
    # event_id since the Event object may not actually have been stored
    # in the database due to sampling.
    from sentry.models import Project
    from sentry.models.group import get_group_with_redirect
    from sentry.rules.processor import RuleProcessor

    # Re-bind Group since we're pickling the whole Event object
    # which may contain a stale Group.
    event.group, _ = get_group_with_redirect(event.group_id)
    event.group_id = event.group.id

    project_id = event.group.project_id
    Raven.tags_context({
        'project': project_id,
    })

    # Re-bind Project since we're pickling the whole Event object
    # which may contain a stale Project.
    event.project = Project.objects.get_from_cache(id=project_id)

    _capture_stats(event, is_new)

    # we process snoozes before rules as it might create a regression
    process_snoozes(event.group)

    rp = RuleProcessor(event, is_new, is_regression, is_sample)
    # TODO(dcramer): ideally this would fanout, but serializing giant
    # objects back and forth isn't super efficient
    for callback, futures in rp.apply():
        safe_execute(callback, event, futures)

    for plugin in plugins.for_project(event.project):
        plugin_post_process_group(
            plugin_slug=plugin.slug,
            event=event,
            is_new=is_new,
            is_regresion=is_regression,
            is_sample=is_sample,
        )

    event_processed.send_robust(
        sender=post_process_group,
        project=event.project,
        group=event.group,
        event=event,
    )
示例#44
0
def post_process_group(event, is_new, is_regression, is_sample, **kwargs):
    """
    Fires post processing hooks for a group.
    """
    # NOTE: we must pass through the full Event object, and not an
    # event_id since the Event object may not actually have been stored
    # in the database due to sampling.
    from sentry.models import Project
    from sentry.models.group import get_group_with_redirect
    from sentry.rules.processor import RuleProcessor

    # Re-bind Group since we're pickling the whole Event object
    # which may contain a stale Group.
    event.group, _ = get_group_with_redirect(event.group_id)
    event.group_id = event.group.id

    project_id = event.group.project_id
    Raven.tags_context({
        'project': project_id,
    })

    # Re-bind Project since we're pickling the whole Event object
    # which may contain a stale Project.
    event.project = Project.objects.get_from_cache(id=project_id)

    _capture_stats(event, is_new)

    # we process snoozes before rules as it might create a regression
    process_snoozes(event.group)

    rp = RuleProcessor(event, is_new, is_regression, is_sample)
    # TODO(dcramer): ideally this would fanout, but serializing giant
    # objects back and forth isn't super efficient
    for callback, futures in rp.apply():
        safe_execute(callback, event, futures)

    for plugin in plugins.for_project(event.project):
        plugin_post_process_group(
            plugin_slug=plugin.slug,
            event=event,
            is_new=is_new,
            is_regresion=is_regression,
            is_sample=is_sample,
        )

    event_processed.send_robust(
        sender=post_process_group,
        project=event.project,
        group=event.group,
        event=event,
    )
示例#45
0
    def test_disable_plugin_when_fully_migrated(self):
        project = Project.objects.create(organization_id=self.organization.id)

        plugin = plugins.get("bitbucket")
        plugin.enable(project)

        # Accessible to new Integration
        Repository.objects.create(
            organization_id=self.organization.id,
            name="sentryuser/repo",
            url="https://bitbucket.org/sentryuser/repo",
            provider="bitbucket",
            external_id="123456",
            config={"name": "sentryuser/repo"},
        )

        self.client.post(self.path, data=self.data_from_bitbucket)

        integration = Integration.objects.get(provider=self.provider,
                                              external_id=self.client_key)

        responses.add(
            responses.GET,
            u"https://api.bitbucket.org/2.0/repositories/sentryuser/repo/hooks",
            json={"values": [{
                "description": "sentry-bitbucket-repo-hook"
            }]},
        )

        assert "bitbucket" in [p.slug for p in plugins.for_project(project)]

        with self.tasks():
            BitbucketIntegrationProvider().post_install(
                integration, self.organization)

            assert "bitbucket" not in [
                p.slug for p in plugins.for_project(project)
            ]
示例#46
0
def get_tags(group, request=None):
    project = group.project

    tag_list = []
    for plugin in plugins.for_project(project):
        results = safe_execute(plugin.tags, request, group, tag_list)

        if not results:
            continue

        tag_list = results

    for tag in tag_list:
        yield tag
示例#47
0
def get_tags(group, request=None):
    project = group.project

    tag_list = []
    for plugin in plugins.for_project(project):
        results = safe_execute(plugin.tags, request, group, tag_list)

        if not results:
            continue

        tag_list = results

    for tag in tag_list:
        yield tag
示例#48
0
def post_process_group(group, event, **kwargs):
    """
    Fires post processing hooks for a group.
    """
    for plugin in plugins.for_project(group.project):
        plugin_post_process_group.delay(plugin.slug,
                                        group=group,
                                        event=event,
                                        **kwargs)

    if settings.SENTRY_ENABLE_EXPLORE_CODE:
        record_affected_code.delay(group=group, event=event)

    if settings.SENTRY_ENABLE_EXPLORE_USERS:
        record_affected_user.delay(group=group, event=event)
示例#49
0
def handle_before_events(request, event_list):
    if not event_list:
        return ''

    if not hasattr(event_list, '__iter__'):
        project = event_list.project
        event_list = [event_list]
    else:
        projects = set(e.project for e in event_list)
        if len(projects) == 1:
            project = projects.pop()
        else:
            project = None

    for plugin in plugins.for_project(project):
        safe_execute(plugin.before_events, request, event_list)

    return ''
示例#50
0
def get_filters(model=None, project=None):
    filter_list = []

    # Add builtins (specified with the FILTERS setting)
    for class_path in settings.FILTERS:
        if class_path not in FILTER_CACHE:
            module_name, class_name = class_path.rsplit('.', 1)
            try:
                module = __import__(module_name, {}, {}, class_name)
                cls = getattr(module, class_name)
            except Exception:
                logger = logging.getLogger('sentry.errors.filters')
                logger.exception('Unable to import %s', class_path)
                continue
            FILTER_CACHE[class_path] = cls
        filter_list.append(FILTER_CACHE[class_path])

    if project:
        for tag in project.get_tags():
            if tag not in TAG_FILTER_CACHE:
                # Generate a new filter class because we are lazy and do
                # not want to rewrite code
                class new(TagFilter):
                    label = _(tag.replace('_', ' ').title())
                    column = tag

                new.__name__ = '__%sGeneratedFilter' % str(tag)
                TAG_FILTER_CACHE[tag] = new
            filter_list.append(TAG_FILTER_CACHE[tag])

    # Add plugin-provided filters
    for plugin in plugins.for_project(project):
        results = safe_execute(plugin.get_filters, project)
        if results:
            for filter_cls in results:
                if filter_cls not in filter_list:
                    filter_list.append(filter_cls)

    # yield all filters which support ``model``
    for filter_cls in filter_list:
        if model and model not in filter_cls.types:
            continue
        yield filter_cls
示例#51
0
def post_process_group(event, is_new, is_regression, is_sample, **kwargs):
    """
    Fires post processing hooks for a group.
    """
    from sentry.models import Project
    from sentry.rules.processor import RuleProcessor

    project_id = event.group.project_id
    Raven.tags_context({
        'project': project_id,
    })

    project = Project.objects.get_from_cache(id=project_id)

    _capture_stats(event, is_new)

    rp = RuleProcessor(event, is_new, is_regression, is_sample)
    # TODO(dcramer): ideally this would fanout, but serializing giant
    # objects back and forth isn't super efficient
    for callback, futures in rp.apply():
        safe_execute(callback, event, futures)

    for plugin in plugins.for_project(project):
        plugin_post_process_group(
            plugin_slug=plugin.slug,
            event=event,
            is_new=is_new,
            is_regresion=is_regression,
            is_sample=is_sample,
        )

    event_processed.send_robust(
        sender=post_process_group,
        project=project,
        group=event.group,
        event=event,
    )
示例#52
0
    def test_does_not_disable_any_plugin(self):
        plugin = plugins.get('webhooks')
        plugin.enable(self.project)

        self.migrator.call()
        assert plugin in plugins.for_project(self.project)
示例#53
0
    def test_call(self):
        plugin = plugins.get('example')
        plugin.enable(self.project)

        self.migrator.call()
        assert plugin not in plugins.for_project(self.project)
示例#54
0
def post_process_group(event, is_new, is_regression, is_sample,
                       is_new_group_environment, **kwargs):
    """
    Fires post processing hooks for a group.
    """
    with snuba.options_override({'consistent': True}):
        if check_event_already_post_processed(event):
            logger.info('post_process.skipped',
                        extra={
                            'project_id': event.project_id,
                            'event_id': event.event_id,
                            'reason': 'duplicate',
                        })
            return

        # NOTE: we must pass through the full Event object, and not an
        # event_id since the Event object may not actually have been stored
        # in the database due to sampling.
        from sentry.models import Project
        from sentry.models.group import get_group_with_redirect
        from sentry.rules.processor import RuleProcessor
        from sentry.tasks.servicehooks import process_service_hook

        # Re-bind node data to avoid renormalization. We only want to
        # renormalize when loading old data from the database.
        event.data = EventDict(event.data, skip_renormalization=True)

        # Re-bind Group since we're pickling the whole Event object
        # which may contain a stale Group.
        event.group, _ = get_group_with_redirect(event.group_id)
        event.group_id = event.group.id

        project_id = event.group.project_id
        with configure_scope() as scope:
            scope.set_tag("project", project_id)

        # Re-bind Project since we're pickling the whole Event object
        # which may contain a stale Project.
        event.project = Project.objects.get_from_cache(id=project_id)

        _capture_stats(event, is_new)

        # we process snoozes before rules as it might create a regression
        has_reappeared = process_snoozes(event.group)

        handle_owner_assignment(event.project, event.group, event)

        rp = RuleProcessor(event, is_new, is_regression,
                           is_new_group_environment, has_reappeared)
        has_alert = False
        # TODO(dcramer): ideally this would fanout, but serializing giant
        # objects back and forth isn't super efficient
        for callback, futures in rp.apply():
            has_alert = True
            safe_execute(callback, event, futures)

        if features.has(
                'projects:servicehooks',
                project=event.project,
        ):
            allowed_events = set(['event.created'])
            if has_alert:
                allowed_events.add('event.alert')

            if allowed_events:
                for servicehook_id, events in _get_service_hooks(
                        project_id=event.project_id):
                    if any(e in allowed_events for e in events):
                        process_service_hook.delay(
                            servicehook_id=servicehook_id,
                            event=event,
                        )

        if event.get_event_type(
        ) == 'error' and _should_send_error_created_hooks(event.project):
            process_resource_change_bound.delay(
                action='created',
                sender='Error',
                instance_id=event.event_id,
                instance=event,
            )
        if is_new:
            process_resource_change_bound.delay(
                action='created',
                sender='Group',
                instance_id=event.group_id,
            )

        for plugin in plugins.for_project(event.project):
            plugin_post_process_group(
                plugin_slug=plugin.slug,
                event=event,
                is_new=is_new,
                is_regresion=is_regression,
                is_sample=is_sample,
            )

        event_processed.send_robust(
            sender=post_process_group,
            project=event.project,
            event=event,
            primary_hash=kwargs.get('primary_hash'),
        )
示例#55
0
    def save(self, project, raw=False):
        from sentry.tasks.post_process import index_event_tags

        project = Project.objects.get_from_cache(id=project)

        data = self.data.copy()

        # First we pull out our top-level (non-data attr) kwargs
        event_id = data.pop('event_id')
        level = data.pop('level')

        culprit = data.pop('culprit', None)
        logger_name = data.pop('logger', None)
        server_name = data.pop('server_name', None)
        site = data.pop('site', None)
        checksum = data.pop('checksum', None)
        fingerprint = data.pop('fingerprint', None)
        platform = data.pop('platform', None)
        release = data.pop('release', None)
        environment = data.pop('environment', None)

        # unused
        time_spent = data.pop('time_spent', None)
        message = data.pop('message', '')

        if not culprit:
            # if we generate an implicit culprit, lets not call it a
            # transaction
            transaction_name = None
            culprit = generate_culprit(data, platform=platform)
        else:
            transaction_name = culprit

        date = datetime.fromtimestamp(data.pop('timestamp'))
        date = date.replace(tzinfo=timezone.utc)

        kwargs = {
            'platform': platform,
        }

        event = Event(
            project_id=project.id,
            event_id=event_id,
            data=data,
            time_spent=time_spent,
            datetime=date,
            **kwargs
        )

        # convert this to a dict to ensure we're only storing one value per key
        # as most parts of Sentry dont currently play well with multiple values
        tags = dict(data.get('tags') or [])
        tags['level'] = LOG_LEVELS[level]
        if logger_name:
            tags['logger'] = logger_name
        if server_name:
            tags['server_name'] = server_name
        if site:
            tags['site'] = site
        if environment:
            tags['environment'] = environment
        if transaction_name:
            tags['transaction'] = transaction_name

        if release:
            # dont allow a conflicting 'release' tag
            if 'release' in tags:
                del tags['release']
            tags['sentry:release'] = release

        event_user = self._get_event_user(project, data)
        if event_user:
            # dont allow a conflicting 'user' tag
            if 'user' in tags:
                del tags['user']
            tags['sentry:user'] = event_user.tag_value

        for plugin in plugins.for_project(project, version=None):
            added_tags = safe_execute(plugin.get_tags, event,
                                      _with_transaction=False)
            if added_tags:
                # plugins should not override user provided tags
                for key, value in added_tags:
                    tags.setdefault(key, value)

        # tags are stored as a tuple
        tags = tags.items()

        # XXX(dcramer): we're relying on mutation of the data object to ensure
        # this propagates into Event
        data['tags'] = tags

        data['fingerprint'] = fingerprint or ['{{ default }}']

        for path, iface in six.iteritems(event.interfaces):
            data['tags'].extend(iface.iter_tags())
            # Get rid of ephemeral interface data
            if iface.ephemeral:
                data.pop(iface.get_path(), None)

        # prioritize fingerprint over checksum as its likely the client defaulted
        # a checksum whereas the fingerprint was explicit
        if fingerprint:
            hashes = [
                md5_from_hash(h)
                for h in get_hashes_from_fingerprint(event, fingerprint)
            ]
        elif checksum:
            hashes = [checksum]
            data['checksum'] = checksum
        else:
            hashes = [
                md5_from_hash(h)
                for h in get_hashes_for_event(event)
            ]

        # TODO(dcramer): temp workaround for complexity
        data['message'] = message
        event_type = eventtypes.get(data.get('type', 'default'))(data)
        event_metadata = event_type.get_metadata()
        # TODO(dcramer): temp workaround for complexity
        del data['message']

        data['type'] = event_type.key
        data['metadata'] = event_metadata

        # index components into ``Event.message``
        # See GH-3248
        if event_type.key != 'default':
            if 'sentry.interfaces.Message' in data and \
                    data['sentry.interfaces.Message']['message'] != message:
                message = u'{} {}'.format(
                    message,
                    data['sentry.interfaces.Message']['message'],
                )

        if not message:
            message = ''
        elif not isinstance(message, six.string_types):
            message = force_text(message)

        for value in six.itervalues(event_metadata):
            value_u = force_text(value, errors='replace')
            if value_u not in message:
                message = u'{} {}'.format(message, value_u)

        if culprit and culprit not in message:
            culprit_u = force_text(culprit, errors='replace')
            message = u'{} {}'.format(message, culprit_u)

        message = trim(message.strip(), settings.SENTRY_MAX_MESSAGE_LENGTH)

        event.message = message
        kwargs['message'] = message

        group_kwargs = kwargs.copy()
        group_kwargs.update({
            'culprit': culprit,
            'logger': logger_name,
            'level': level,
            'last_seen': date,
            'first_seen': date,
            'active_at': date,
            'data': {
                'last_received': event.data.get('received') or float(event.datetime.strftime('%s')),
                'type': event_type.key,
                # we cache the events metadata on the group to ensure its
                # accessible in the stream
                'metadata': event_metadata,
            },
        })

        if release:
            release = Release.get_or_create(
                project=project,
                version=release,
                date_added=date,
            )

            group_kwargs['first_release'] = release

        group, is_new, is_regression, is_sample = self._save_aggregate(
            event=event,
            hashes=hashes,
            release=release,
            **group_kwargs
        )

        event.group = group
        # store a reference to the group id to guarantee validation of isolation
        event.data.bind_ref(event)

        try:
            with transaction.atomic(using=router.db_for_write(EventMapping)):
                EventMapping.objects.create(
                    project=project, group=group, event_id=event_id)
        except IntegrityError:
            self.logger.info('duplicate.found', extra={'event_id': event.id}, exc_info=True)
            return event

        environment = Environment.get_or_create(
            project=project,
            name=environment,
        )

        if release:
            ReleaseEnvironment.get_or_create(
                project=project,
                release=release,
                environment=environment,
                datetime=date,
            )

            grouprelease = GroupRelease.get_or_create(
                group=group,
                release=release,
                environment=environment,
                datetime=date,
            )

        counters = [
            (tsdb.models.group, group.id),
            (tsdb.models.project, project.id),
        ]

        if release:
            counters.append((tsdb.models.release, release.id))

        tsdb.incr_multi(counters, timestamp=event.datetime)

        frequencies = [
            # (tsdb.models.frequent_projects_by_organization, {
            #     project.organization_id: {
            #         project.id: 1,
            #     },
            # }),
            # (tsdb.models.frequent_issues_by_project, {
            #     project.id: {
            #         group.id: 1,
            #     },
            # })
            (tsdb.models.frequent_environments_by_group, {
                group.id: {
                    environment.id: 1,
                },
            })
        ]

        if release:
            frequencies.append(
                (tsdb.models.frequent_releases_by_group, {
                    group.id: {
                        grouprelease.id: 1,
                    },
                })
            )

        tsdb.record_frequency_multi(frequencies, timestamp=event.datetime)

        UserReport.objects.filter(
            project=project, event_id=event_id,
        ).update(group=group)

        # save the event unless its been sampled
        if not is_sample:
            try:
                with transaction.atomic(using=router.db_for_write(Event)):
                    event.save()
            except IntegrityError:
                self.logger.info('duplicate.found', extra={'event_id': event.id}, exc_info=True)
                return event

            index_event_tags.delay(
                project_id=project.id,
                group_id=group.id,
                event_id=event.id,
                tags=tags,
            )

        if event_user:
            tsdb.record_multi((
                (tsdb.models.users_affected_by_group, group.id, (event_user.tag_value,)),
                (tsdb.models.users_affected_by_project, project.id, (event_user.tag_value,)),
            ), timestamp=event.datetime)

        if is_new and release:
            buffer.incr(Release, {'new_groups': 1}, {
                'id': release.id,
            })

        safe_execute(Group.objects.add_tags, group, tags,
                     _with_transaction=False)

        if not raw:
            if not project.first_event:
                project.update(first_event=date)
                first_event_received.send(project=project, group=group, sender=Project)

            post_process_group.delay(
                group=group,
                event=event,
                is_new=is_new,
                is_sample=is_sample,
                is_regression=is_regression,
            )
        else:
            self.logger.info('post_process.skip.raw_event', extra={'event_id': event.id})

        # TODO: move this to the queue
        if is_regression and not raw:
            regression_signal.send_robust(sender=Group, instance=group)

        return event
示例#56
0
def post_process_group(event, is_new, is_regression, is_sample, **kwargs):
    """
    Fires post processing hooks for a group.
    """
    from sentry.models import GroupRuleStatus, Project

    project = Project.objects.get_from_cache(id=event.group.project_id)

    if settings.SENTRY_ENABLE_EXPLORE_CODE:
        record_affected_code.delay(event=event)

    if settings.SENTRY_ENABLE_EXPLORE_USERS:
        record_affected_user.delay(event=event)

    for plugin in plugins.for_project(project):
        plugin_post_process_group.apply_async(
            kwargs={
                'plugin_slug': plugin.slug,
                'event': event,
                'is_new': is_new,
                'is_regresion': is_regression,
                'is_sample': is_sample,
            },
            expires=120,
        )

    for rule in get_rules(project):
        match = rule.data.get('action_match', 'all')
        condition_list = rule.data.get('conditions', ())

        if not condition_list:
            continue

        # TODO(dcramer): this might not make sense for other rule actions
        # so we should find a way to abstract this into actions
        # TODO(dcramer): this isnt the most efficient query pattern for this
        rule_status, _ = GroupRuleStatus.objects.get_or_create(
            rule=rule,
            group=event.group,
            defaults={
                'project': project,
                'status': STATUS_INACTIVE,
            },
        )

        state = EventState(
            is_new=is_new,
            is_regression=is_regression,
            is_sample=is_sample,
            rule_is_active=rule_status.status == STATUS_ACTIVE,
        )

        condition_iter = (condition_matches(project, c, event, state)
                          for c in condition_list)

        if match == 'all':
            passed = all(condition_iter)
        elif match == 'any':
            passed = any(condition_iter)
        elif match == 'none':
            passed = not any(condition_iter)
        else:
            rules_logger.error('Unsupported action_match %r for rule %d',
                               match, rule.id)
            continue

        if passed and rule_status.status == STATUS_INACTIVE:
            # we only fire if we're able to say that the state has changed
            GroupRuleStatus.objects.filter(
                id=rule_status.id,
                status=STATUS_INACTIVE,
            ).update(status=STATUS_ACTIVE)
        elif not passed and rule_status.status == STATUS_ACTIVE:
            # update the state to suggest this rule can fire again
            GroupRuleStatus.objects.filter(
                id=rule_status.id,
                status=STATUS_ACTIVE,
            ).update(status=STATUS_INACTIVE)

        if passed:
            execute_rule.apply_async(
                kwargs={
                    'rule_id': rule.id,
                    'event': event,
                    'state': state,
                },
                expires=120,
            )
示例#57
0
文件: group.py 项目: webZW/sentry
    def get_attrs(self, item_list, user):
        from sentry.plugins import plugins

        GroupMeta.objects.populate_cache(item_list)

        attach_foreignkey(item_list, Group.project)

        if user.is_authenticated() and item_list:
            bookmarks = set(
                GroupBookmark.objects.filter(
                    user=user,
                    group__in=item_list,
                ).values_list('group_id', flat=True))
            seen_groups = dict(
                GroupSeen.objects.filter(
                    user=user,
                    group__in=item_list,
                ).values_list('group_id', 'last_seen'))
            subscriptions = self._get_subscriptions(item_list, user)
        else:
            bookmarks = set()
            seen_groups = {}
            subscriptions = defaultdict(lambda: (False, None))

        assignees = {
            a.group_id: a.assigned_actor()
            for a in GroupAssignee.objects.filter(group__in=item_list, )
        }
        resolved_assignees = Actor.resolve_dict(assignees)

        ignore_items = {
            g.group_id: g
            for g in GroupSnooze.objects.filter(group__in=item_list, )
        }

        resolved_item_list = [
            i for i in item_list if i.status == GroupStatus.RESOLVED
        ]
        if resolved_item_list:
            release_resolutions = {
                i[0]: i[1:]
                for i in GroupResolution.objects.filter(
                    group__in=resolved_item_list, ).values_list(
                        'group',
                        'type',
                        'release__version',
                        'actor_id',
                    )
            }

            # due to our laziness, and django's inability to do a reasonable join here
            # we end up with two queries
            commit_results = list(
                Commit.objects.extra(
                    select={
                        'group_id': 'sentry_grouplink.group_id',
                    },
                    tables=['sentry_grouplink'],
                    where=[
                        'sentry_grouplink.linked_id = sentry_commit.id',
                        'sentry_grouplink.group_id IN ({})'.format(', '.join(
                            six.text_type(i.id) for i in resolved_item_list)),
                        'sentry_grouplink.linked_type = %s',
                        'sentry_grouplink.relationship = %s',
                    ],
                    params=[
                        int(GroupLink.LinkedType.commit),
                        int(GroupLink.Relationship.resolves),
                    ]))
            commit_resolutions = {
                i.group_id: d
                for i, d in itertools.izip(commit_results,
                                           serialize(commit_results, user))
            }
        else:
            release_resolutions = {}
            commit_resolutions = {}

        actor_ids = set(r[-1] for r in six.itervalues(release_resolutions))
        actor_ids.update(r.actor_id for r in six.itervalues(ignore_items))
        if actor_ids:
            users = list(
                User.objects.filter(
                    id__in=actor_ids,
                    is_active=True,
                ))
            actors = {
                u.id: d
                for u, d in itertools.izip(users, serialize(users, user))
            }
        else:
            actors = {}

        share_ids = dict(
            GroupShare.objects.filter(group__in=item_list, ).values_list(
                'group_id', 'uuid'))

        result = {}

        seen_stats = self._get_seen_stats(item_list, user)

        for item in item_list:
            active_date = item.active_at or item.first_seen

            annotations = []
            for plugin in plugins.for_project(project=item.project, version=1):
                safe_execute(plugin.tags,
                             None,
                             item,
                             annotations,
                             _with_transaction=False)
            for plugin in plugins.for_project(project=item.project, version=2):
                annotations.extend(
                    safe_execute(plugin.get_annotations,
                                 group=item,
                                 _with_transaction=False) or ())

            from sentry.integrations import IntegrationFeatures
            for integration in Integration.objects.filter(
                    organizations=item.project.organization_id):
                if not (integration.has_feature(
                        IntegrationFeatures.ISSUE_BASIC)
                        or integration.has_feature(
                            IntegrationFeatures.ISSUE_SYNC)):
                    continue

                install = integration.get_installation(
                    item.project.organization_id)
                annotations.extend(
                    safe_execute(install.get_annotations,
                                 group=item,
                                 _with_transaction=False) or ())

            from sentry.models import PlatformExternalIssue
            annotations.extend(
                safe_execute(PlatformExternalIssue.get_annotations,
                             group=item,
                             _with_transaction=False) or ())

            resolution_actor = None
            resolution_type = None
            resolution = release_resolutions.get(item.id)
            if resolution:
                resolution_type = 'release'
                resolution_actor = actors.get(resolution[-1])
            if not resolution:
                resolution = commit_resolutions.get(item.id)
                if resolution:
                    resolution_type = 'commit'

            ignore_item = ignore_items.get(item.id)
            if ignore_item:
                ignore_actor = actors.get(ignore_item.actor_id)
            else:
                ignore_actor = None

            result[item] = {
                'assigned_to': resolved_assignees.get(item.id),
                'is_bookmarked': item.id in bookmarks,
                'subscription': subscriptions[item.id],
                'has_seen':
                seen_groups.get(item.id, active_date) > active_date,
                'annotations': annotations,
                'ignore_until': ignore_item,
                'ignore_actor': ignore_actor,
                'resolution': resolution,
                'resolution_type': resolution_type,
                'resolution_actor': resolution_actor,
                'share_id': share_ids.get(item.id),
            }

            result[item].update(seen_stats.get(item, {}))
        return result
示例#58
0
def post_process_group(group, event, is_new, is_regression, is_sample,
                       **kwargs):
    """
    Fires post processing hooks for a group.
    """
    from sentry.models import GroupRuleStatus, Project

    project = Project.objects.get_from_cache(id=group.project_id)

    child_kwargs = {
        'event': event,
        'is_new': is_new,
        'is_regression': is_regression,
        'is_sample': is_sample,
    }

    if settings.SENTRY_ENABLE_EXPLORE_CODE:
        record_affected_code.delay(group=group, event=event)

    if settings.SENTRY_ENABLE_EXPLORE_USERS:
        record_affected_user.delay(group=group, event=event)

    for plugin in plugins.for_project(project):
        plugin_post_process_group.delay(plugin.slug,
                                        group=group,
                                        **child_kwargs)

    for rule in get_rules(project):
        match = rule.data.get('action_match', 'all')
        condition_list = rule.data.get('conditions', ())

        if not condition_list:
            continue

        passed = True
        if match == 'all':
            if not all(
                    condition_matches(project, c, **child_kwargs)
                    for c in condition_list):
                passed = False
        elif match == 'any':
            if not any(
                    condition_matches(project, c, **child_kwargs)
                    for c in condition_list):
                passed = False
        elif match == 'none':
            if any(
                    condition_matches(project, c, **child_kwargs)
                    for c in condition_list):
                passed = False
        else:
            rules_logger.error('Unsupported action_match %r for rule %d',
                               match, rule.id)
            continue

        # TODO(dcramer): this might not make sense for other rule actions
        # so we should find a way to abstract this into actions
        # TODO(dcramer): this isnt the most efficient query pattern for this
        rule_status, created = GroupRuleStatus.objects.get_or_create(
            rule=rule,
            group=group,
            defaults={
                'project': group.project,
                'status': STATUS_ACTIVE if passed else STATUS_INACTIVE,
            },
        )

        if passed:
            if rule_status.status == STATUS_INACTIVE:
                # we only fire if we're able to say that the state has changed
                should_fire = GroupRuleStatus.objects.filter(
                    status=STATUS_INACTIVE,
                    id=rule_status.id,
                ).update(status=STATUS_ACTIVE)
            else:
                should_fire = False

        else:
            should_fire = False
            if rule_status.status == STATUS_ACTIVE:
                # update the state to suggest this rule can fire again
                GroupRuleStatus.objects.filter(
                    status=STATUS_ACTIVE,
                    id=rule_status.id,
                ).update(status=STATUS_INACTIVE)

        if should_fire:
            execute_rule.delay(rule_id=rule.id, **child_kwargs)
示例#59
0
    def save(self, project, raw=False):
        from sentry.tasks.post_process import index_event_tags
        data = self.data

        project = Project.objects.get_from_cache(id=project)

        # Check to make sure we're not about to do a bunch of work that's
        # already been done if we've processed an event with this ID. (This
        # isn't a perfect solution -- this doesn't handle ``EventMapping`` and
        # there's a race condition between here and when the event is actually
        # saved, but it's an improvement. See GH-7677.)
        try:
            event = Event.objects.get(
                project_id=project.id,
                event_id=data['event_id'],
            )
        except Event.DoesNotExist:
            pass
        else:
            self.logger.info('duplicate.found',
                             exc_info=True,
                             extra={
                                 'event_uuid': data['event_id'],
                                 'project_id': project.id,
                                 'model': Event.__name__,
                             })
            return event

        # First we pull out our top-level (non-data attr) kwargs
        event_id = data.pop('event_id')
        level = data.pop('level')
        culprit = data.pop('transaction', None)
        if not culprit:
            culprit = data.pop('culprit', None)
        logger_name = data.pop('logger', None)
        server_name = data.pop('server_name', None)
        site = data.pop('site', None)
        checksum = data.pop('checksum', None)
        fingerprint = data.pop('fingerprint', None)
        platform = data.pop('platform', None)
        release = data.pop('release', None)
        dist = data.pop('dist', None)
        environment = data.pop('environment', None)

        # unused
        time_spent = data.pop('time_spent', None)
        message = data.pop('message', '')

        if not culprit:
            # if we generate an implicit culprit, lets not call it a
            # transaction
            transaction_name = None
            culprit = generate_culprit(data, platform=platform)
        else:
            transaction_name = culprit

        culprit = force_text(culprit)

        recorded_timestamp = data.pop('timestamp')
        date = datetime.fromtimestamp(recorded_timestamp)
        date = date.replace(tzinfo=timezone.utc)

        kwargs = {
            'platform': platform,
        }

        event = Event(project_id=project.id,
                      event_id=event_id,
                      data=data,
                      time_spent=time_spent,
                      datetime=date,
                      **kwargs)
        event._project_cache = project

        # convert this to a dict to ensure we're only storing one value per key
        # as most parts of Sentry dont currently play well with multiple values
        tags = dict(data.get('tags') or [])
        tags['level'] = LOG_LEVELS[level]
        if logger_name:
            tags['logger'] = logger_name
        if server_name:
            tags['server_name'] = server_name
        if site:
            tags['site'] = site
        if environment:
            tags['environment'] = environment
        if transaction_name:
            tags['transaction'] = transaction_name

        if release:
            # dont allow a conflicting 'release' tag
            if 'release' in tags:
                del tags['release']
            release = Release.get_or_create(
                project=project,
                version=release,
                date_added=date,
            )

            tags['sentry:release'] = release.version

        if dist and release:
            dist = release.add_dist(dist, date)
            tags['sentry:dist'] = dist.name
        else:
            dist = None

        event_user = self._get_event_user(project, data)
        if event_user:
            # dont allow a conflicting 'user' tag
            if 'user' in tags:
                del tags['user']
            tags['sentry:user'] = event_user.tag_value

        # At this point we want to normalize the in_app values in case the
        # clients did not set this appropriately so far.
        normalize_in_app(data)

        for plugin in plugins.for_project(project, version=None):
            added_tags = safe_execute(plugin.get_tags,
                                      event,
                                      _with_transaction=False)
            if added_tags:
                # plugins should not override user provided tags
                for key, value in added_tags:
                    tags.setdefault(key, value)

        for path, iface in six.iteritems(event.interfaces):
            for k, v in iface.iter_tags():
                tags[k] = v
            # Get rid of ephemeral interface data
            if iface.ephemeral:
                data.pop(iface.get_path(), None)

        # tags are stored as a tuple
        tags = tags.items()

        data['tags'] = tags
        data['fingerprint'] = fingerprint or ['{{ default }}']

        # prioritize fingerprint over checksum as its likely the client defaulted
        # a checksum whereas the fingerprint was explicit
        if fingerprint:
            hashes = [
                md5_from_hash(h)
                for h in get_hashes_from_fingerprint(event, fingerprint)
            ]
        elif checksum:
            if HASH_RE.match(checksum):
                hashes = [checksum]
            else:
                hashes = [md5_from_hash([checksum]), checksum]
            data['checksum'] = checksum
        else:
            hashes = [md5_from_hash(h) for h in get_hashes_for_event(event)]

        # TODO(dcramer): temp workaround for complexity
        data['message'] = message
        event_type = eventtypes.get(data.get('type', 'default'))(data)
        event_metadata = event_type.get_metadata()
        # TODO(dcramer): temp workaround for complexity
        del data['message']

        data['type'] = event_type.key
        data['metadata'] = event_metadata

        # index components into ``Event.message``
        # See GH-3248
        if event_type.key != 'default':
            if 'sentry.interfaces.Message' in data and \
                    data['sentry.interfaces.Message']['message'] != message:
                message = u'{} {}'.format(
                    message,
                    data['sentry.interfaces.Message']['message'],
                )

        if not message:
            message = ''
        elif not isinstance(message, six.string_types):
            message = force_text(message)

        for value in six.itervalues(event_metadata):
            value_u = force_text(value, errors='replace')
            if value_u not in message:
                message = u'{} {}'.format(message, value_u)

        if culprit and culprit not in message:
            culprit_u = force_text(culprit, errors='replace')
            message = u'{} {}'.format(message, culprit_u)

        message = trim(message.strip(), settings.SENTRY_MAX_MESSAGE_LENGTH)

        event.message = message
        kwargs['message'] = message

        received_timestamp = event.data.get('received') or float(
            event.datetime.strftime('%s'))
        group_kwargs = kwargs.copy()
        group_kwargs.update({
            'culprit': culprit,
            'logger': logger_name,
            'level': level,
            'last_seen': date,
            'first_seen': date,
            'active_at': date,
            'data': {
                'last_received': received_timestamp,
                'type': event_type.key,
                # we cache the events metadata on the group to ensure its
                # accessible in the stream
                'metadata': event_metadata,
            },
        })

        if release:
            group_kwargs['first_release'] = release

        try:
            group, is_new, is_regression, is_sample = self._save_aggregate(
                event=event, hashes=hashes, release=release, **group_kwargs)
        except HashDiscarded:
            event_discarded.send_robust(
                project=project,
                sender=EventManager,
            )

            metrics.incr(
                'events.discarded',
                skip_internal=True,
                tags={
                    'organization_id': project.organization_id,
                    'platform': platform,
                },
            )
            raise
        else:
            event_saved.send_robust(
                project=project,
                sender=EventManager,
            )

        event.group = group
        # store a reference to the group id to guarantee validation of isolation
        event.data.bind_ref(event)

        # When an event was sampled, the canonical source of truth
        # is the EventMapping table since we aren't going to be writing out an actual
        # Event row. Otherwise, if the Event isn't being sampled, we can safely
        # rely on the Event table itself as the source of truth and ignore
        # EventMapping since it's redundant information.
        if is_sample:
            try:
                with transaction.atomic(
                        using=router.db_for_write(EventMapping)):
                    EventMapping.objects.create(project=project,
                                                group=group,
                                                event_id=event_id)
            except IntegrityError:
                self.logger.info('duplicate.found',
                                 exc_info=True,
                                 extra={
                                     'event_uuid': event_id,
                                     'project_id': project.id,
                                     'group_id': group.id,
                                     'model': EventMapping.__name__,
                                 })
                return event

        environment = Environment.get_or_create(
            project=project,
            name=environment,
        )

        group_environment, is_new_group_environment = GroupEnvironment.get_or_create(
            group_id=group.id,
            environment_id=environment.id,
            defaults={
                'first_release_id': release.id if release else None,
            },
        )

        if release:
            ReleaseEnvironment.get_or_create(
                project=project,
                release=release,
                environment=environment,
                datetime=date,
            )

            ReleaseProjectEnvironment.get_or_create(
                project=project,
                release=release,
                environment=environment,
                datetime=date,
            )

            grouprelease = GroupRelease.get_or_create(
                group=group,
                release=release,
                environment=environment,
                datetime=date,
            )

        counters = [
            (tsdb.models.group, group.id),
            (tsdb.models.project, project.id),
        ]

        if release:
            counters.append((tsdb.models.release, release.id))

        tsdb.incr_multi(counters,
                        timestamp=event.datetime,
                        environment_id=environment.id)

        frequencies = [
            # (tsdb.models.frequent_projects_by_organization, {
            #     project.organization_id: {
            #         project.id: 1,
            #     },
            # }),
            # (tsdb.models.frequent_issues_by_project, {
            #     project.id: {
            #         group.id: 1,
            #     },
            # })
            (tsdb.models.frequent_environments_by_group, {
                group.id: {
                    environment.id: 1,
                },
            })
        ]

        if release:
            frequencies.append((tsdb.models.frequent_releases_by_group, {
                group.id: {
                    grouprelease.id: 1,
                },
            }))

        tsdb.record_frequency_multi(frequencies, timestamp=event.datetime)

        UserReport.objects.filter(
            project=project,
            event_id=event_id,
        ).update(
            group=group,
            environment=environment,
        )

        # save the event unless its been sampled
        if not is_sample:
            try:
                with transaction.atomic(using=router.db_for_write(Event)):
                    event.save()
            except IntegrityError:
                self.logger.info('duplicate.found',
                                 exc_info=True,
                                 extra={
                                     'event_uuid': event_id,
                                     'project_id': project.id,
                                     'group_id': group.id,
                                     'model': Event.__name__,
                                 })
                return event

            index_event_tags.delay(
                organization_id=project.organization_id,
                project_id=project.id,
                group_id=group.id,
                environment_id=environment.id,
                event_id=event.id,
                tags=tags,
                date_added=event.datetime,
            )

        if event_user:
            tsdb.record_multi(
                (
                    (tsdb.models.users_affected_by_group, group.id,
                     (event_user.tag_value, )),
                    (tsdb.models.users_affected_by_project, project.id,
                     (event_user.tag_value, )),
                ),
                timestamp=event.datetime,
                environment_id=environment.id,
            )
        if release:
            if is_new:
                buffer.incr(ReleaseProject, {'new_groups': 1}, {
                    'release_id': release.id,
                    'project_id': project.id,
                })
            if is_new_group_environment:
                buffer.incr(ReleaseProjectEnvironment, {'new_issues_count': 1},
                            {
                                'project_id': project.id,
                                'release_id': release.id,
                                'environment_id': environment.id,
                            })

        safe_execute(Group.objects.add_tags,
                     group,
                     environment,
                     tags,
                     _with_transaction=False)

        if not raw:
            if not project.first_event:
                project.update(first_event=date)
                first_event_received.send(project=project,
                                          group=group,
                                          sender=Project)

            post_process_group.delay(
                group=group,
                event=event,
                is_new=is_new,
                is_sample=is_sample,
                is_regression=is_regression,
                is_new_group_environment=is_new_group_environment,
                primary_hash=hashes[0],
            )
        else:
            self.logger.info('post_process.skip.raw_event',
                             extra={'event_id': event.id})

        metrics.timing(
            'events.latency',
            received_timestamp - recorded_timestamp,
            tags={
                'project_id': project.id,
            },
        )

        return event