def test_notify_digest_subject_prefix(self): ProjectOption.objects.set_value(project=self.project, key=u"mail:subject_prefix", value="[Example prefix] ") event = self.store_event( data={ "timestamp": iso_format(before_now(minutes=1)), "fingerprint": ["group-1"] }, project_id=self.project.id, ) event2 = self.store_event( data={ "timestamp": iso_format(before_now(minutes=1)), "fingerprint": ["group-2"] }, project_id=self.project.id, ) rule = self.project.rule_set.all()[0] digest = build_digest( self.project, (event_to_record(event, (rule, )), event_to_record(event2, (rule, )))) with self.tasks(): self.adapter.notify_digest(self.project, digest, ActionTargetType.ISSUE_OWNERS) assert len(mail.outbox) == 1 msg = mail.outbox[0] assert msg.subject.startswith("[Example prefix]")
def run_test(self, key, digests): """ Simple integration test to make sure that digests are firing as expected. """ backend = RedisBackend() rule = Rule.objects.create(project=self.project, label="Test Rule", data={}) event = self.store_event( data={ "timestamp": iso_format(before_now(days=1)), "fingerprint": ["group-1"] }, project_id=self.project.id, ) event_2 = self.store_event( data={ "timestamp": iso_format(before_now(days=1)), "fingerprint": ["group-2"] }, project_id=self.project.id, ) key = f"mail:p:{self.project.id}" backend.add(key, event_to_record(event, [rule]), increment_delay=0, maximum_delay=0) backend.add(key, event_to_record(event_2, [rule]), increment_delay=0, maximum_delay=0) digests.digest = backend.digest with self.tasks(): deliver_digest(key) assert "2 new alerts since" in mail.outbox[0].subject
def test_notify_digest(self, notify): project = self.project event = self.store_event( data={ "timestamp": iso_format(before_now(minutes=1)), "fingerprint": ["group-1"] }, project_id=project.id, ) event2 = self.store_event( data={ "timestamp": iso_format(before_now(minutes=1)), "fingerprint": ["group-2"] }, project_id=project.id, ) rule = project.rule_set.all()[0] digest = build_digest( project, (event_to_record(event, (rule, )), event_to_record(event2, (rule, )))) with self.tasks(): self.adapter.notify_digest(project, digest, ActionTargetType.ISSUE_OWNERS) assert notify.call_count == 0 assert len(mail.outbox) == 1 message = mail.outbox[0] assert "List-ID" in message.message()
def test_notify_digest(self, send_async, notify): project = self.event.project rule = project.rule_set.all()[0] digest = build_digest( project, ( event_to_record(self.create_event(group=self.create_group()), (rule,)), event_to_record(self.event, (rule,)), ), ) self.plugin.notify_digest(project, digest) assert send_async.call_count is 1 assert notify.call_count is 0
def test_notify_digest(self, send, notify): project = self.event.project rule = project.rule_set.all()[0] digest = build_digest( project, ( event_to_record(self.create_event(group=self.create_group()), (rule,)), event_to_record(self.event, (rule,)), ), ) self.plugin.notify_digest(project, digest) assert send.call_count is 1 assert notify.call_count is 0
def rule_notify(self, event, futures): rules = [] for future in futures: rules.append(future.rule) if not future.kwargs: continue raise NotImplementedError( 'The default behavior for notification de-duplication does not support args' ) project = event.group.project if hasattr(self, 'notify_digest') and digests.enabled(project): get_digest_option = lambda key: ProjectOption.objects.get_value( project, get_digest_option_key(self.get_conf_key(), key), ) digest_key = unsplit_key(self, event.group.project) immediate_delivery = digests.add( digest_key, event_to_record(event, rules), increment_delay=get_digest_option('increment_delay'), maximum_delay=get_digest_option('maximum_delay'), ) if immediate_delivery: deliver_digest.delay(digest_key) else: notification = Notification(event=event, rules=rules) self.notify(notification)
def rule_notify(self, event, futures): rules = [] for future in futures: rules.append(future.rule) if not future.kwargs: continue raise NotImplementedError('The default behavior for notification de-duplication does not support args') project = event.group.project if hasattr(self, 'notify_digest') and digests.enabled(project): get_digest_option = lambda key: ProjectOption.objects.get_value( project, get_digest_option_key(self.get_conf_key(), key), ) digest_key = unsplit_key(self, event.group.project) immediate_delivery = digests.add( digest_key, event_to_record(event, rules), increment_delay=get_digest_option('increment_delay'), maximum_delay=get_digest_option('maximum_delay'), ) if immediate_delivery: deliver_digest.delay(digest_key) else: notification = Notification( event=event, rules=rules, ) self.notify(notification)
def rule_notify(self, event, futures): rules = [] for future in futures: rules.append(future.rule) if not future.kwargs: continue raise NotImplementedError('The default behavior for notification de-duplication does not support args') if hasattr(self, 'notify_digest'): project = event.group.project # If digest delivery is disabled, we still need to send a # notification -- we also need to check rate limits, since # ``should_notify`` skips this step if the plugin supports digests. if not features.has('projects:digests:deliver', project): if self.__is_rate_limited(event.group, event): logger = logging.getLogger('sentry.plugins.{0}'.format(self.get_conf_key())) logger.info('Notification for project %r dropped due to rate limiting', project) return notification = Notification(event=event, rules=rules) self.notify(notification) if features.has('projects:digests:store', project): key = unsplit_key(self, event.group.project) if digests.add(key, event_to_record(event, rules)): deliver_digest.delay(key) else: notification = Notification(event=event, rules=rules) self.notify(notification)
def test_notify_digest_single_record(self, send_async, notify): event = self.store_event(data={}, project_id=self.project.id) rule = self.project.rule_set.all()[0] digest = build_digest(self.project, (event_to_record(event, (rule,)),)) self.plugin.notify_digest(self.project, digest) assert send_async.call_count == 1 assert notify.call_count == 1
def test_digest_enabled(self, digests, mock_func): """ Test that with digests enabled, but Slack notification settings (and not email settings), we send a Slack notification """ backend = RedisBackend() digests.digest = backend.digest digests.enabled.return_value = True rule = Rule.objects.create(project=self.project, label="my rule") event = self.store_event( data={"message": "Hello world", "level": "error"}, project_id=self.project.id ) key = f"mail:p:{self.project.id}" backend.add(key, event_to_record(event, [rule]), increment_delay=0, maximum_delay=0) with self.tasks(): deliver_digest(key) assert digests.call_count == 0 attachment, text = get_attachment() assert attachment["title"] == "Hello world" assert attachment["text"] == ""
def test_notify_digest_single_record(self, send_async, notify): event = self.store_event(data={}, project_id=self.project.id) rule = self.project.rule_set.all()[0] digest = build_digest(self.project, (event_to_record(event, (rule,)),)) self.adapter.notify_digest(self.project, digest, ActionTargetType.ISSUE_OWNERS) assert send_async.call_count == 1 assert notify.call_count == 1
def test_get_event_from_groups_in_digest(self): project = self.create_project(fire_project_created=True) rule = project.rule_set.all()[0] events = [ self.store_event( data={ "fingerprint": ["group1"], "timestamp": iso_format(before_now(minutes=2)) }, project_id=project.id, ), self.store_event( data={ "fingerprint": ["group1"], "timestamp": iso_format(before_now(minutes=1)) }, project_id=project.id, ), self.store_event( data={ "fingerprint": ["group2"], "timestamp": iso_format(before_now(minutes=1)) }, project_id=project.id, ), self.store_event( data={ "fingerprint": ["group3"], "timestamp": iso_format(before_now(minutes=1)) }, project_id=project.id, ), self.store_event( data={ "fingerprint": ["group4"], "timestamp": iso_format(before_now(minutes=1)) }, project_id=project.id, ), self.store_event( data={ "fingerprint": ["group5"], "timestamp": iso_format(before_now(minutes=1)) }, project_id=project.id, ), ] digest = build_digest( project, sort_records( [event_to_record(event, (rule, )) for event in events])) events.pop(0) # remove event with same group assert {e.event_id for e in get_event_from_groups_in_digest(digest) } == {e.event_id for e in events}
def test_notify_digest(self, has, send, notify): has.side_effect = lambda label, *a, **k: { 'projects:digests:deliver': True, }.get(label, False) project = self.event.project rule = project.rule_set.all()[0] digest = build_digest( project, ( event_to_record(self.create_event(group=self.create_group()), (rule,)), event_to_record(self.event, (rule,)), ), ) self.plugin.notify_digest(project, digest) assert send.call_count is 1 assert notify.call_count is 0
def test_notify_digest_subject_prefix(self): project = self.event.project rule = project.rule_set.all()[0] digest = build_digest( project, ( event_to_record(self.create_event(group=self.create_group()), (rule,)), event_to_record(self.event, (rule,)), ), ) self.plugin.notify_digest(project, digest) assert len(mail.outbox) == 1 msg = mail.outbox[0] assert msg.subject.startswith('[Example prefix] [foo Bar]')
def test_notify_digest_single_record(self, send_async, notify): project = self.event.project rule = project.rule_set.all()[0] digest = build_digest(project, (event_to_record(self.event, (rule, )), )) self.plugin.notify_digest(project, digest) assert send_async.call_count == 1 assert notify.call_count == 1
def add_event(self, fingerprint: str, backend: Backend) -> None: event = self.store_event( data={"timestamp": iso_format(before_now(days=1)), "fingerprint": [fingerprint]}, project_id=self.project.id, ) backend.add( self.key, event_to_record(event, [self.rule]), increment_delay=0, maximum_delay=0 )
def test_notify_digest_user_does_not_exist(self, notify): """Test that in the event a rule has been created with an action to send to a user who no longer exists, we don't blow up when getting users in get_send_to """ project = self.project event = self.store_event( data={ "timestamp": iso_format(before_now(minutes=1)), "fingerprint": ["group-1"] }, project_id=project.id, ) event2 = self.store_event( data={ "timestamp": iso_format(before_now(minutes=1)), "fingerprint": ["group-2"] }, project_id=project.id, ) action_data = { "id": "sentry.mail.actions.NotifyEmailAction", "targetType": "Member", "targetIdentifier": str(444), } rule = Rule.objects.create( project=self.project, label="a rule", data={ "match": "all", "actions": [action_data], }, ) digest = build_digest( project, (event_to_record(event, (rule, )), event_to_record(event2, (rule, )))) with self.tasks(): self.adapter.notify_digest(project, digest, ActionTargetType.MEMBER, 444) assert notify.call_count == 0 assert len(mail.outbox) == 0
def test_notify_digest(self, has, send, notify): has.side_effect = lambda label, *a, **k: { 'projects:digests:deliver': True, }.get(label, False) project = self.event.project rule = project.rule_set.all()[0] digest = build_digest( project, ( event_to_record(self.create_event(group=self.create_group()), (rule, )), event_to_record(self.event, (rule, )), ), ) self.plugin.notify_digest(project, digest) assert send.call_count is 1 assert notify.call_count is 0
def test_notify_digest_subject_prefix(self): project = self.event.project rule = project.rule_set.all()[0] digest = build_digest( project, ( event_to_record(self.create_event(group=self.create_group()), (rule, )), event_to_record(self.event, (rule, )), ), ) self.plugin.notify_digest(project, digest) assert len(mail.outbox) == 1 msg = mail.outbox[0] assert msg.subject.startswith('[Example prefix] [foo Bar]')
def test_direct_email(self): self.project_ownership.update(fallthrough=False) rule = self.project.rule_set.all()[0] records = [event_to_record(event, (rule,)) for event in self.team1_events] digest = build_digest(self.project, sort_records(records)) expected_result = {self.user1.id: set(self.team1_events)} self.assert_get_personalized_digests( self.project, digest, [self.user1.id], expected_result, ActionTargetType.MEMBER )
def test_notify_digest(self, notify): project = self.event.project rule = project.rule_set.all()[0] digest = build_digest( project, ( event_to_record(self.create_event(group=self.create_group()), (rule, )), event_to_record(self.event, (rule, )), ), ) with self.tasks(): self.plugin.notify_digest(project, digest) assert notify.call_count is 0 assert len(mail.outbox) == 1 message = mail.outbox[0] assert 'List-ID' in message.message()
def test_notify_digest_single_record(self, send_async, notify): project = self.event.project rule = project.rule_set.all()[0] digest = build_digest( project, (event_to_record(self.event, (rule, )), ), ) self.plugin.notify_digest(project, digest) assert send_async.call_count is 1 assert notify.call_count is 1
def test_direct_email(self): """When the action type is not Issue Owners, then the target actor gets a digest.""" self.project_ownership.update(fallthrough=False) rule = self.project.rule_set.all()[0] records = [event_to_record(event, (rule,)) for event in self.team1_events] digest = build_digest(self.project, sort_records(records))[0] expected_result = {self.user1.actor_id: set(self.team1_events)} assert_get_personalized_digests( self.project, digest, expected_result, ActionTargetType.MEMBER, self.user1.id )
def rule_notify( self, event: Any, futures: Sequence[Any], target_type: ActionTargetType, target_identifier: Optional[int] = None, ) -> None: metrics.incr("mail_adapter.rule_notify") rules = [] extra = { "event_id": event.event_id, "group_id": event.group_id, "is_from_mail_action_adapter": True, "target_type": target_type.value, "target_identifier": target_identifier, } log_event = "dispatched" for future in futures: rules.append(future.rule) extra["rule_id"] = future.rule.id if not future.kwargs: continue raise NotImplementedError( "The default behavior for notification de-duplication does not support args" ) project = event.group.project extra["project_id"] = project.id if digests.enabled(project): def get_digest_option(key): return ProjectOption.objects.get_value( project, get_digest_option_key("mail", key)) digest_key = unsplit_key(event.group.project, target_type, target_identifier) extra["digest_key"] = digest_key immediate_delivery = digests.add( digest_key, event_to_record(event, rules), increment_delay=get_digest_option("increment_delay"), maximum_delay=get_digest_option("maximum_delay"), ) if immediate_delivery: deliver_digest.delay(digest_key) else: log_event = "digested" else: notification = Notification(event=event, rules=rules) self.notify(notification, target_type, target_identifier) logger.info("mail.adapter.notification.%s" % log_event, extra=extra)
def test_simple(self): rule = self.project.rule_set.all()[0] records = [event_to_record(event, (rule, )) for event in self.team1_events + self.team2_events + self.user4_events] digest = build_digest(self.project, sort_records(records)) expected_result = { self.user1.id: set(self.team1_events), self.user2.id: set(self.team2_events), self.user3.id: set(self.team1_events + self.team2_events), self.user4.id: set(self.user4_events), } self.assert_get_personalized_digests(self.project, digest, self.user_ids, expected_result)
def rule_notify(self, event, futures): from sentry.models import ProjectOption # Django 1.9 setup issue rules = [] extra = { 'event_id': event.id, 'group_id': event.group_id, 'plugin': self.slug, } log_event = 'dispatched' for future in futures: rules.append(future.rule) extra['rule_id'] = future.rule.id if not future.kwargs: continue raise NotImplementedError( 'The default behavior for notification de-duplication does not support args' ) project = event.group.project extra['project_id'] = project.id if hasattr(self, 'notify_digest') and digests.enabled(project): def get_digest_option(key): return ProjectOption.objects.get_value( project, get_digest_option_key(self.get_conf_key(), key), ) digest_key = unsplit_key(self, event.group.project) extra['digest_key'] = digest_key immediate_delivery = digests.add( digest_key, event_to_record(event, rules), increment_delay=get_digest_option('increment_delay'), maximum_delay=get_digest_option('maximum_delay'), ) if immediate_delivery: deliver_digest.delay(digest_key) else: log_event = 'digested' else: notification = Notification( event=event, rules=rules, ) self.notify(notification) self.logger.info('notification.%s' % log_event, extra=extra)
def test_simple(self): rule = self.project.rule_set.all()[0] records = [ event_to_record(event, (rule,)) for event in self.team1_events + self.team2_events + self.user4_events ] digest = build_digest(self.project, sort_records(records)) expected_result = { self.user1.id: set(self.team1_events), self.user2.id: set(self.team2_events), self.user3.id: set(self.team1_events + self.team2_events), self.user4.id: set(self.user4_events), } self.assert_get_personalized_digests(self.project, digest, self.user_ids, expected_result)
def rule_notify(self, event, futures): rules = [] for future in futures: rules.append(future.rule) if not future.kwargs: continue raise NotImplementedError('The default behavior for notification de-duplication does not support args') if self.__can_be_digested(event): key = unsplit_key(self, event.group.project) if digests.add(key, event_to_record(event, rules)): deliver_digest.delay(key) else: notification = Notification(event=event, rules=rules) self.notify(notification)
def test_everyone_with_owners(self): rule = self.project.rule_set.all()[0] events = self.create_events( timezone.now(), self.project, [ 'hello.moz', 'goodbye.moz', 'hola.moz', 'adios.moz']) records = [event_to_record(event, (rule, )) for event in events + self.team1_events] digest = build_digest(self.project, sort_records(records)) expected_result = { self.user1.id: set(events + self.team1_events), self.user2.id: set(events), self.user3.id: set(events + self.team1_events), self.user4.id: set(events), self.user5.id: set(events), } self.assert_get_personalized_digests(self.project, digest, self.user_ids, expected_result)
def test_only_everyone(self): rule = self.project.rule_set.all()[0] events = self.create_events_from_filenames( self.project, ["hello.moz", "goodbye.moz", "hola.moz", "adios.moz"] ) records = [event_to_record(event, (rule,)) for event in events] digest = build_digest(self.project, sort_records(records))[0] expected_result = { self.user1.actor_id: set(events), self.user2.actor_id: set(events), self.user3.actor_id: set(events), self.user4.actor_id: set(events), self.user5.actor_id: set(events), } assert_get_personalized_digests(self.project, digest, expected_result)
def rule_notify(self, event, futures): rules = [] extra = { 'event_id': event.id, 'group_id': event.group_id, 'plugin': self.slug, } log_event = 'dispatched' for future in futures: rules.append(future.rule) extra['rule_id'] = future.rule.id if not future.kwargs: continue raise NotImplementedError( 'The default behavior for notification de-duplication does not support args' ) project = event.group.project extra['project_id'] = project.id if hasattr(self, 'notify_digest') and digests.enabled(project): def get_digest_option(key): return ProjectOption.objects.get_value( project, get_digest_option_key(self.get_conf_key(), key), ) digest_key = unsplit_key(self, event.group.project) extra['digest_key'] = digest_key immediate_delivery = digests.add( digest_key, event_to_record(event, rules), increment_delay=get_digest_option('increment_delay'), maximum_delay=get_digest_option('maximum_delay'), ) if immediate_delivery: deliver_digest.delay(digest_key) else: log_event = 'digested' else: notification = Notification( event=event, rules=rules, ) self.notify(notification) self.logger.info('notification.%s' % log_event, extra=extra)
def test_only_everyone(self): rule = self.project.rule_set.all()[0] events = self.create_events( timezone.now(), self.project, ["hello.moz", "goodbye.moz", "hola.moz", "adios.moz"]) records = [event_to_record(event, (rule, )) for event in events] digest = build_digest(self.project, sort_records(records)) expected_result = { self.user1.id: set(events), self.user2.id: set(events), self.user3.id: set(events), self.user4.id: set(events), self.user5.id: set(events), } self.assert_get_personalized_digests(self.project, digest, self.user_ids, expected_result)
def rule_notify(self, event, futures): rules = [] for future in futures: rules.append(future.rule) if not future.kwargs: continue raise NotImplementedError('The default behavior for notification de-duplication does not support args') if self.__can_be_digested(event): digests.add( unsplit_key(self, event.group.project), # TODO: Improve this abstraction. event_to_record(event, rules), ) else: notification = Notification(event=event, rules=rules) self.notify(notification)
def test_everyone_with_owners(self): rule = self.project.rule_set.all()[0] events = self.create_events_from_filenames( self.project, ["hello.moz", "goodbye.moz", "hola.moz", "adios.moz"] ) records = [event_to_record(event, (rule,)) for event in events + self.team1_events] digest = build_digest(self.project, sort_records(records))[0] expected_result = { self.user1.actor_id: set(events + self.team1_events), self.user2.actor_id: set(events), self.user3.actor_id: set(events + self.team1_events), self.user4.actor_id: set(events), self.user5.actor_id: set(events), } with self.feature("organizations:notification-all-recipients"): assert_get_personalized_digests(self.project, digest, expected_result)
def rule_notify(self, event, futures): rules = [] for future in futures: rules.append(future.rule) if not future.kwargs: continue raise NotImplementedError( 'The default behavior for notification de-duplication does not support args' ) if hasattr(self, 'notify_digest'): project = event.group.project # If digest delivery is disabled, we still need to send a # notification -- we also need to check rate limits, since # ``should_notify`` skips this step if the plugin supports digests. if not features.has('projects:digests:deliver', project): if self.__is_rate_limited(event.group, event): logger = logging.getLogger('sentry.plugins.{0}'.format( self.get_conf_key())) logger.info( 'Notification for project %r dropped due to rate limiting', project) return notification = Notification(event=event, rules=rules) self.notify(notification) if features.has('projects:digests:store', project): get_digest_option = lambda key: ProjectOption.objects.get_value( project, '{0}:digests:{1}'.format(self.get_conf_key(), key), ) digest_key = unsplit_key(self, event.group.project) immediate_delivery = digests.add( digest_key, event_to_record(event, rules), increment_delay=get_digest_option('increment_delay'), maximum_delay=get_digest_option('maximum_delay'), ) if immediate_delivery: deliver_digest.delay(digest_key) else: notification = Notification(event=event, rules=rules) self.notify(notification)
def test_simple(self): rule = self.project.rule_set.all()[0] records = [ event_to_record(event, (rule,)) for event in self.team1_events + self.team2_events + self.user4_events ] digest = build_digest(self.project, sort_records(records))[0] expected_result = { self.user1.actor_id: set(self.team1_events), self.user2.actor_id: set(self.team2_events), self.user3.actor_id: set(self.team1_events + self.team2_events), self.user4.actor_id: set(self.user4_events), } with self.feature("organizations:notification-all-recipients"): assert_get_personalized_digests(self.project, digest, expected_result)
def rule_notify(self, event, futures): rules = [] for future in futures: rules.append(future.rule) if not future.kwargs: continue raise NotImplementedError( 'The default behavior for notification de-duplication does not support args' ) if self.__can_be_digested(event): key = unsplit_key(self, event.group.project) if digests.add(key, event_to_record(event, rules)): deliver_digest.delay(key) else: notification = Notification(event=event, rules=rules) self.notify(notification)
def rule_notify(self, event, futures): rules = [] extra = { "event_id": event.event_id, "group_id": event.group_id, "plugin": "mail" } log_event = "dispatched" for future in futures: rules.append(future.rule) extra["rule_id"] = future.rule.id if not future.kwargs: continue raise NotImplementedError( "The default behavior for notification de-duplication does not support args" ) project = event.group.project extra["project_id"] = project.id if digests.enabled(project): def get_digest_option(key): return ProjectOption.objects.get_value( project, get_digest_option_key("mail", key)) digest_key = unsplit_key(self, event.group.project) extra["digest_key"] = digest_key immediate_delivery = digests.add( digest_key, event_to_record(event, rules), increment_delay=get_digest_option("increment_delay"), maximum_delay=get_digest_option("maximum_delay"), ) if immediate_delivery: deliver_digest.delay(digest_key) else: log_event = "digested" else: notification = Notification(event=event, rules=rules) self.notify(notification) logger.info("mail.notification.%s" % log_event, extra=extra)
def test_get_event_from_groups_in_digest(self): project = self.create_project() rule = project.rule_set.all()[0] same_group = self.create_group(project=project) events = [ self.create_event(group=same_group), self.create_event(group=same_group), self.create_event(group=self.create_group(project=project)), self.create_event(group=self.create_group(project=project)), self.create_event(group=self.create_group(project=project)), self.create_event(group=self.create_group(project=project)), ] digest = build_digest( project, sort_records( [event_to_record(event, (rule, )) for event in events])) events.pop(0) # remove event with same group assert get_event_from_groups_in_digest(digest) == set(events)
def rule_notify(self, event, futures): rules = [] for future in futures: rules.append(future.rule) if not future.kwargs: continue raise NotImplementedError( 'The default behavior for notification de-duplication does not support args' ) if self.__can_be_digested(event): digests.add( unsplit_key( self, event.group.project), # TODO: Improve this abstraction. event_to_record(event, rules), ) else: notification = Notification(event=event, rules=rules) self.notify(notification)
def test_get_event_from_groups_in_digest(self): project = self.create_project() rule = project.rule_set.all()[0] same_group = self.create_group(project=project) events = [ self.create_event(group=same_group), self.create_event(group=same_group), self.create_event(group=self.create_group(project=project)), self.create_event(group=self.create_group(project=project)), self.create_event(group=self.create_group(project=project)), self.create_event(group=self.create_group(project=project)), ] digest = build_digest( project, sort_records([event_to_record(event, (rule, )) for event in events]), ) events.pop(0) # remove event with same group assert get_event_from_groups_in_digest(digest) == set(events)
def test_team_without_members(self): team = self.create_team() project = self.create_project(teams=[team], fire_project_created=True) ProjectOwnership.objects.create( project_id=project.id, schema=dump_schema([Rule(Matcher("path", "*.cpp"), [Owner("team", team.slug)])]), fallthrough=True, ) rule = project.rule_set.all()[0] records = [ event_to_record(event, (rule,)) for event in self.create_events_from_filenames( project, ["hello.py", "goodbye.py", "hola.py", "adios.py"] ) ] digest = build_digest(project, sort_records(records)) user_ids = [member.user_id for member in team.member_set] assert not user_ids for user_id, user_digest in get_personalized_digests(project.id, digest, user_ids): assert False # no users in this team no digests should be processed
def test_team_without_members(self): team = self.create_team() project = self.create_project(teams=[team]) ProjectOwnership.objects.create( project_id=project.id, schema=dump_schema([ Rule(Matcher('path', '*.cpp'), [ Owner('team', team.slug), ]), ]), fallthrough=True, ) rule = project.rule_set.all()[0] records = [ event_to_record(event, (rule, )) for event in self.create_events(timezone.now(), project, [ 'hello.py', 'goodbye.py', 'hola.py', 'adios.py']) ] digest = build_digest(project, sort_records(records)) user_ids = [member.user_id for member in team.member_set] assert not user_ids for user_id, user_digest in get_personalized_digests(project.id, digest, user_ids): assert False # no users in this team no digests should be processed
def record(self): return event_to_record(self.event, (self.rule,))