def _run_publishers(alert): """Runs publishers for all currently configured outputs on the given alert Args: - alert (Alert): The alert Returns: dict: A dict keyed by output:descriptor strings, mapped to nested dicts. The nested dicts have 2 keys: - publication (dict): The dict publication - success (bool): True if the publishing finished, False if it errored. """ configured_outputs = alert.outputs results = {} for configured_output in configured_outputs: [output_name, descriptor] = configured_output.split(':') try: output = MagicMock(spec=OutputDispatcher, __service__=output_name) results[configured_output] = { 'publication': compose_alert(alert, output, descriptor), 'success': True, } except (RuntimeError, TypeError, NameError) as err: results[configured_output] = { 'success': False, 'error': err, } return results
def _dispatch(self, alert, descriptor): """Send alert to an SQS queue Publishing: By default it sends the alert.record to SQS as a JSON string. You can override it with the following fields: - @aws-sqs.message_data (dict): Replace alert.record with your own JSON-serializable dict. Will send this as a JSON string to SQS. Args: alert (Alert): Alert instance which triggered a rule descriptor (str): Output descriptor Returns: bool: True if alert was sent successfully, False otherwise """ queue_name = self.config[self.__service__][descriptor] sqs = boto3.resource('sqs', region_name=self.region) queue = sqs.get_queue_by_name(QueueName=queue_name) publication = compose_alert(alert, self, descriptor) # Presentation defaults default_message_data = alert.record # Presentation values message_data = publication.get('@aws-sqs.message_data', default_message_data) # Transform the body from a dict to a string for SQS sqs_message = json.dumps(message_data, separators=(',', ':')) queue.send_message(MessageBody=sqs_message) return True
def test_pretty_print_arrays(): """Publishers - PagerDuty - PrettyPrintArrays""" alert = get_alert( context={'populate_fields': ['publishers', 'cb_server', 'staged']}) alert.created = datetime(2019, 1, 1) alert.publishers = { 'pagerduty': [ 'stream_alert.shared.publisher.DefaultPublisher', 'publishers.community.generic.populate_fields', 'publishers.community.pagerduty.pagerduty_layout.PrettyPrintArrays' ] } output = MagicMock(spec=OutputDispatcher) output.__service__ = 'pagerduty' descriptor = 'unit_test_channel' publication = compose_alert(alert, output, descriptor) expectation = { 'publishers': [{ 'pagerduty': ('stream_alert.shared.publisher.DefaultPublisher\n\n----------\n\n' 'publishers.community.generic.populate_fields\n\n----------\n\n' 'publishers.community.pagerduty.pagerduty_layout.PrettyPrintArrays' ) }], 'staged': 'False', 'cb_server': 'cbserver' } assert_equal(publication, expectation)
def _dispatch(self, alert, descriptor): """Send alert to an SNS topic Args: alert (Alert): Alert instance which triggered a rule descriptor (str): Output descriptor Returns: bool: True if alert was sent successfully, False otherwise """ # SNS topics can only be accessed via their ARN topic_name = self.config[self.__service__][descriptor] topic_arn = 'arn:aws:sns:{}:{}:{}'.format(self.region, self.account_id, topic_name) topic = boto3.resource('sns', region_name=self.region).Topic(topic_arn) publication = compose_alert(alert, self, descriptor) # Presentation defaults default_subject = '{} triggered alert {}'.format(alert.rule_name, alert.alert_id) default_message = json.dumps(publication, indent=2, sort_keys=True) # Published presentation fields # Subject must be < 100 characters long; subject = elide_string_middle(publication.get('@aws-sns.topic', default_subject), 99) message = publication.get('@aws-sns.message', default_message) topic.publish( Message=message, Subject=subject ) return True
def test_attach_image(): """Publishers - PagerDuty - AttachImage""" alert = get_alert() alert.created = datetime(2019, 1, 1) alert.publishers = { 'pagerduty': ['publishers.community.pagerduty.pagerduty_layout.AttachImage'] } output = MagicMock(spec=OutputDispatcher) output.__service__ = 'pagerduty' descriptor = 'unit_test_channel' publication = compose_alert(alert, output, descriptor) expectation = { '@pagerduty-v2.images': [{ 'src': 'https://streamalert.io/en/stable/_images/sa-banner.png', 'alt': 'StreamAlert Docs', 'href': 'https://streamalert.io/en/stable/' }], '@pagerduty.contexts': [{ 'src': 'https://streamalert.io/en/stable/_images/sa-banner.png', 'type': 'image' }] } assert_equal(publication, expectation)
def test_default_publisher(self): """AlertPublisher - DefaultPublisher - Positive Case""" publication = compose_alert(self._alert, self._output, 'test') expectation = { 'publishers': ['stream_alert.shared.publisher.DefaultPublisher'], 'source_entity': 'corp-prefix.prod.cb.region', 'outputs': ['slack:unit_test_channel'], 'cluster': '', 'rule_description': 'Info about this rule and what actions to take', 'log_type': 'json', 'rule_name': 'cb_binarystore_file_added', 'source_service': 's3', 'created': '2019-01-01T00:00:00.000000Z', 'log_source': 'carbonblack:binarystore.file.added', 'id': '79192344-4a6d-4850-8d06-9c3fef1060a4', 'record': { 'compressed_size': '9982', 'node_id': '1', 'cb_server': 'cbserver', 'timestamp': '1496947381.18', 'md5': '0F9AA55DA3BDE84B35656AD8911A22E1', 'type': 'binarystore.file.added', 'file_path': '/tmp/5DA/AD8/0F9AA55DA3BDE84B35656AD8911A22E1.zip', 'size': '21504' }, 'context': { 'context': 'value' }, 'staged': False } assert_equal(publication, expectation)
def test_as_custom_details_ignores_custom_fields(): """Publishers - PagerDuty - as_custom_details - Ignore Magic Keys""" alert = get_alert(context={'context': 'value'}) alert.created = datetime(2019, 1, 1) alert.publishers = { 'pagerduty': [ 'stream_alert.shared.publisher.DefaultPublisher', 'publishers.community.pagerduty.pagerduty_layout.ShortenTitle', 'publishers.community.pagerduty.pagerduty_layout.as_custom_details', ] } output = MagicMock(spec=OutputDispatcher) output.__service__ = 'pagerduty' descriptor = 'unit_test_channel' publication = compose_alert(alert, output, descriptor) # We don't care about the entire payload; let's check a few top-level keys we know # are supposed to be here.. assert_true(publication['source_entity']) assert_true(publication['outputs']) assert_true(publication['log_source']) # Check that the title keys exists assert_true(publication['@pagerduty.description']) # now check that the details key exists assert_true(publication['@pagerduty.details']) # And check that it has no magic keys assert_false('@pagerduty.description' in publication['@pagerduty.details']) assert_false('@pagerduty-v2.summary' in publication['@pagerduty.details'])
def _dispatch(self, alert, descriptor): """Send alert to Komand Publishing: By default this output sends the current publication to Komand. There is no "magic" field to "override" it: Simply publish what you want to send! Args: alert (Alert): Alert instance which triggered a rule descriptor (str): Output descriptor Returns: bool: True if alert was sent successfully, False otherwise """ creds = self._load_creds(descriptor) if not creds: return False headers = {'Authorization': creds['komand_auth_token']} LOGGER.debug('sending alert to Komand') publication = compose_alert(alert, self, descriptor) resp = self._post_request(creds['url'], {'data': publication}, headers, False) return self._check_http_response(resp)
def _dispatch(self, alert, descriptor): """Send alert to Github Publishing: This output provides a default issue title and a very basic issue body containing the alert record. To override: - @github.title (str): Override the Issue's title - @github.body (str): Overrides the default github issue body. Remember: this string is in Github's syntax, so it supports markdown and respects linebreaks characters (e.g. \n). Args: alert (Alert): Alert instance which triggered a rule descriptor (str): Output descriptor Returns: bool: True if alert was sent successfully, False otherwise """ credentials = self._load_creds(descriptor) if not credentials: return False username_password = "******".format(credentials['username'], credentials['access_token']) encoded_credentials = base64.b64encode(username_password) headers = {'Authorization': "Basic {}".format(encoded_credentials)} url = '{}/repos/{}/issues'.format(credentials['api'], credentials['repository']) publication = compose_alert(alert, self, descriptor) # Default presentation to the output default_title = "StreamAlert: {}".format(alert.rule_name) default_body = "### Description\n{}\n\n### Event data\n\n```\n{}\n```".format( alert.rule_description, json.dumps(alert.record, indent=2, sort_keys=True) ) # Override presentation defaults issue_title = publication.get('@github.title', default_title) issue_body = publication.get('@github.body', default_body) # Github Issue to be created issue = { 'title': issue_title, 'body': issue_body, 'labels': credentials['labels'].split(',') } LOGGER.debug('sending alert to Github repository %s', credentials['repository']) try: self._post_request_retry(url, issue, headers) except OutputRequestFailure: return False return True
def test_remove_fields(self): """AlertPublisher - enumerate_fields - enforce alphabetical order""" publication = compose_alert(self._alert, self._output, 'test') expectation = { 'staged': False, 'source_entity': 'corp-prefix.prod.cb.region', 'rule_name': 'cb_binarystore_file_added', 'created': '2019-01-01T00:00:00.000000Z', 'log_source': 'carbonblack:binarystore.file.added', 'source_service': 's3', 'id': '79192344-4a6d-4850-8d06-9c3fef1060a4', 'rule_description': 'Info about this rule and what actions to take', 'record': { 'compressed_size': '9982', 'timestamp': '1496947381.18', 'node_id': '1', 'cb_server': 'cbserver', 'size': '21504', 'file_path': '/tmp/5DA/AD8/0F9AA55DA3BDE84B35656AD8911A22E1.zip', 'md5': '0F9AA55DA3BDE84B35656AD8911A22E1' } } assert_equal(publication, expectation)
def _dispatch(self, alert, descriptor): """Send a new Incident to Demisto Publishing: Demisto offers a suite of default incident values. You can override any of the following: - @demisto.incident_type (str): - @demisto.severity (str): Controls the severity of the incident. Any of the following: 'info', 'informational', 'low', 'med', 'medium', 'high', 'critical', 'unknown' - @demisto.owner (str): Controls which name shows up under the owner. This can be any name, even of users that are not registered on Demisto. Incidents can be filtered by name. - @demisto.details (str): A string that briefly describes the nature of the incident and how to respond. - @demisto.incident_name (str): Incident name shows up as the title of the Incident. - @demisto.label_data (dict): By default, this output sends the entire publication into the Demisto labels section, where the label names are the keys of the publication and the label values are the values of the publication. For deeply nested dictionary publications, the label names become the full path of all nest dictionary keys, concatenated with periods ("."). By providing this override field, you can send a different dict of data to Demisto, other than the entire publication. Just like in the default case, if this provided dict is deeply nested, the keys will be flattened. Args: alert (Alert): Alert instance which triggered a rule descriptor (str): Output descriptor Returns: bool: True if alert was sent successfully, False otherwise """ creds = self._load_creds(descriptor) if not creds: return False request = DemistoRequestAssembler.assemble( alert, compose_alert(alert, self, descriptor)) integration = DemistoApiIntegration(creds, self) LOGGER.debug('Sending alert to Demisto: %s', creds['url']) try: integration.send(request) return True except OutputRequestFailure as e: LOGGER.exception('Failed to create Demisto incident: %s.', e) return False
def _dispatch(self, alert, descriptor): """Send alert to Cloudwatch Logger for Lambda Args: alert (Alert): Alert instance which triggered a rule descriptor (str): Output descriptor """ publication = compose_alert(alert, self, descriptor) LOGGER.info('New Alert:\n%s', json.dumps(publication, indent=2)) return True
def test_max_attachments(self, log_mock): """SlackOutput - Max Attachment Reached""" alert = get_alert() alert.record = {'info': 'test' * 20000} output = MagicMock(spec=SlackOutput) alert_publication = compose_alert(alert, output, 'asdf') SlackOutput._format_default_attachments(alert, alert_publication, 'foo') log_mock.assert_called_with( '%s: %d-part message truncated to %d parts', alert_publication, 21, 20)
def _dispatch(self, alert, descriptor): """Send alert to a Kinesis Firehose Delivery Stream Publishing: By default this output sends the current publication in JSON to Kinesis. There is no "magic" field to "override" it: Simply publish what you want to send! Args: alert (Alert): Alert instance which triggered a rule descriptor (str): Output descriptor Returns: bool: True if alert was sent successfully, False otherwise """ @backoff.on_exception(backoff.fibo, ClientError, max_tries=self.MAX_BACKOFF_ATTEMPTS, jitter=backoff.full_jitter, on_backoff=backoff_handler(), on_success=success_handler(), on_giveup=giveup_handler()) def _firehose_request_wrapper(json_alert, delivery_stream): """Make the PutRecord request to Kinesis Firehose with backoff Args: json_alert (str): The JSON dumped alert body delivery_stream (str): The Firehose Delivery Stream to send to Returns: dict: Firehose response in the format below {'RecordId': 'string'} """ self.__aws_client__.put_record( DeliveryStreamName=delivery_stream, Record={'Data': json_alert} ) if self.__aws_client__ is None: self.__aws_client__ = boto3.client('firehose', region_name=self.region) publication = compose_alert(alert, self, descriptor) json_alert = json.dumps(publication, separators=(',', ':')) + '\n' if len(json_alert) > self.MAX_RECORD_SIZE: LOGGER.error('Alert too large to send to Firehose: \n%s...', json_alert[0:1000]) return False delivery_stream = self.config[self.__service__][descriptor] LOGGER.info('Sending %s to aws-firehose:%s', alert, delivery_stream) _firehose_request_wrapper(json_alert, delivery_stream) LOGGER.info('%s successfully sent to aws-firehose:%s', alert, delivery_stream) return True
def test_remove_fields(self): """AlertPublisher - populate_fields""" publication = compose_alert(self._alert, self._output, 'test') expectation = { 'compressed_size': ['9982'], 'oof': [], 'id': ['79192344-4a6d-4850-8d06-9c3fef1060a4'], 'multi_field': [1, 2] } assert_equal(publication, expectation)
def test_format_message_default_rule_description(self): """SlackOutput - Format Message, Default Rule Description""" rule_name = 'test_empty_rule_description' alert = get_random_alert(10, rule_name, True) output = MagicMock(spec=SlackOutput) alert_publication = compose_alert(alert, output, 'asdf') loaded_message = SlackOutput._format_message(alert, alert_publication) # tests default_rule_description = '*Rule Description:*\nNo rule description provided\n' assert_equal(loaded_message['attachments'][0]['pretext'], default_rule_description)
def test_format_message_custom_attachment(self): """SlackOutput - Format Message, Custom Attachment""" rule_name = 'test_empty_rule_description' alert = get_random_alert(10, rule_name, True) output = MagicMock(spec=SlackOutput) alert_publication = compose_alert(alert, output, 'asdf') alert_publication['@slack.attachments'] = [{'text': 'aasdfkjadfj'}] loaded_message = SlackOutput._format_message(alert, alert_publication) # tests assert_equal(len(loaded_message['attachments']), 1) assert_equal(loaded_message['attachments'][0]['text'], 'aasdfkjadfj')
def test_format_message_single(self): """SlackOutput - Format Single Message - Slack""" rule_name = 'test_rule_single' alert = get_random_alert(25, rule_name) output = MagicMock(spec=SlackOutput) alert_publication = compose_alert(alert, output, 'asdf') loaded_message = SlackOutput._format_message(alert, alert_publication) # tests assert_set_equal(set(loaded_message.keys()), {'text', 'mrkdwn', 'attachments'}) assert_equal(loaded_message['text'], '*StreamAlert Rule Triggered: test_rule_single*') assert_equal(len(loaded_message['attachments']), 1)
def test_as_custom_details_default(): """Publishers - PagerDuty - as_custom_details - Default""" alert = get_alert(context={'context': 'value'}) alert.created = datetime(2019, 1, 1) alert.publishers = { 'pagerduty': [ 'stream_alert.shared.publisher.DefaultPublisher', 'publishers.community.pagerduty.pagerduty_layout.as_custom_fields' ] } output = MagicMock(spec=OutputDispatcher) output.__service__ = 'pagerduty' descriptor = 'unit_test_channel' publication = compose_alert(alert, output, descriptor) expectation = { 'publishers': { 'pagerduty': [ 'stream_alert.shared.publisher.DefaultPublisher', 'publishers.community.pagerduty.pagerduty_layout.as_custom_fields' ] }, 'source_entity': 'corp-prefix.prod.cb.region', 'outputs': ['slack:unit_test_channel'], 'cluster': '', 'rule_description': 'Info about this rule and what actions to take', 'log_type': 'json', 'rule_name': 'cb_binarystore_file_added', 'source_service': 's3', 'created': '2019-01-01T00:00:00.000000Z', 'log_source': 'carbonblack:binarystore.file.added', 'id': '79192344-4a6d-4850-8d06-9c3fef1060a4', 'record': { 'compressed_size': '9982', 'node_id': '1', 'cb_server': 'cbserver', 'timestamp': '1496947381.18', 'md5': '0F9AA55DA3BDE84B35656AD8911A22E1', 'type': 'binarystore.file.added', 'file_path': '/tmp/5DA/AD8/0F9AA55DA3BDE84B35656AD8911A22E1.zip', 'size': '21504' }, 'context': { 'context': 'value' }, 'staged': False } assert_equal(publication, expectation)
def test_format_message_custom_text(self): """SlackOutput - Format Single Message - Custom Text""" rule_name = 'test_rule_single' alert = get_random_alert(25, rule_name) output = MagicMock(spec=SlackOutput) alert_publication = compose_alert(alert, output, 'asdf') alert_publication['@slack.text'] = 'Lorem ipsum foobar' loaded_message = SlackOutput._format_message(alert, alert_publication) # tests assert_set_equal(set(loaded_message.keys()), {'text', 'mrkdwn', 'attachments'}) assert_equal(loaded_message['text'], 'Lorem ipsum foobar') assert_equal(len(loaded_message['attachments']), 1)
def test_publish(self): """AlertPublisher - StringifyArrays - publish""" publication = compose_alert(self._alert, self._output, 'test') expectation = { 'not_array': ['a', { 'b': 'c' }, 'd'], 'array': 'a\nb\nc', 'nest': { 'deep_array': 'a\nb\nc' } } assert_equal(publication['context'], expectation)
def test_format_message_multiple(self): """SlackOutput - Format Multi-Message""" rule_name = 'test_rule_multi-part' alert = get_random_alert(30, rule_name) output = MagicMock(spec=SlackOutput) alert_publication = compose_alert(alert, output, 'asdf') loaded_message = SlackOutput._format_message(alert, alert_publication) # tests assert_set_equal(set(loaded_message.keys()), {'text', 'mrkdwn', 'attachments'}) assert_equal(loaded_message['text'], '*StreamAlert Rule Triggered: test_rule_multi-part*') assert_equal(len(loaded_message['attachments']), 2) assert_equal( loaded_message['attachments'][1]['text'].split('\n')[3][1:7], '000028')
def test_default_publisher(self): """AlertPublisher - add_record - Positive Case""" publication = compose_alert(self._alert, self._output, 'test') expectation = { 'record': { 'compressed_size': '9982', 'node_id': '1', 'cb_server': 'cbserver', 'timestamp': '1496947381.18', 'md5': '0F9AA55DA3BDE84B35656AD8911A22E1', 'type': 'binarystore.file.added', 'file_path': '/tmp/5DA/AD8/0F9AA55DA3BDE84B35656AD8911A22E1.zip', 'size': '21504' }, } assert_equal(publication, expectation)
def _dispatch(self, alert, descriptor): """Send alert to Phantom Publishing: By default this output sends the current publication in as JSON to Phantom. There is no "magic" field to "override" it: Simply publish what you want to send! Args: alert (Alert): Alert instance which triggered a rule descriptor (str): Output descriptor Returns: bool: True if alert was sent successfully, False otherwise """ creds = self._load_creds(descriptor) if not creds: return False publication = compose_alert(alert, self, descriptor) record = alert.record headers = {"ph-auth-token": creds['ph_auth_token']} container_id = self._setup_container(alert.rule_name, alert.rule_description, creds['url'], headers) LOGGER.debug('sending alert to Phantom container with id %s', container_id) if not container_id: return False artifact = { 'cef': record, 'container_id': container_id, 'data': publication, 'name': 'Phantom Artifact', 'label': 'Alert' } artifact_url = os.path.join(creds['url'], self.ARTIFACT_ENDPOINT) try: self._post_request_retry(artifact_url, artifact, headers, False) except OutputRequestFailure: return False return True
def test_format_message_custom_attachment_limit(self, log_warning): """SlackOutput - Format Message, Custom Attachment is Truncated""" rule_name = 'test_empty_rule_description' alert = get_random_alert(10, rule_name, True) output = MagicMock(spec=SlackOutput) alert_publication = compose_alert(alert, output, 'asdf') long_message = 'a' * (SlackOutput.MAX_MESSAGE_SIZE + 1) alert_publication['@slack.attachments'] = [{'text': long_message}] loaded_message = SlackOutput._format_message(alert, alert_publication) # tests assert_equal(len(loaded_message['attachments'][0]['text']), 3999) # bug in elide log_warning.assert_called_with( 'Custom attachment was truncated to length %d. Full message: %s', SlackOutput.MAX_MESSAGE_SIZE, long_message)
def _dispatch(self, alert, descriptor): """Send alert text to Slack Publishing: By default the slack output sends a slack message comprising some default intro text and a series of attachments containing: * alert description * alert record, chunked into pieces if it's too long To override this behavior use the following fields: - @slack.text (str): Replaces the text that appears as the first line in the slack message. - @slack.attachments (list[dict]): A list of individual slack attachments to include in the message. Each element of this list is a dict that must adhere to the syntax of attachments on Slack's API. @see cls._standardize_custom_attachments() for some insight into how individual attachments can be written. Args: alert (Alert): Alert instance which triggered a rule descriptor (str): Output descriptor Returns: bool: True if alert was sent successfully, False otherwise """ creds = self._load_creds(descriptor) if not creds: return False publication = compose_alert(alert, self, descriptor) slack_message = self._format_message(alert, publication) try: self._post_request_retry(creds['url'], slack_message) except OutputRequestFailure: return False return True
def test_format_message_custom_attachment_multi_limit(self, log_warning): """SlackOutput - Format Message, Too many Custom Attachments is truncated""" rule_name = 'test_empty_rule_description' alert = get_random_alert(10, rule_name, True) output = MagicMock(spec=SlackOutput) alert_publication = compose_alert(alert, output, 'asdf') alert_publication['@slack.attachments'] = [] for _ in range(SlackOutput.MAX_ATTACHMENTS + 1): alert_publication['@slack.attachments'].append({'text': 'yay'}) loaded_message = SlackOutput._format_message(alert, alert_publication) # tests assert_equal(len(loaded_message['attachments']), SlackOutput.MAX_ATTACHMENTS) assert_equal(loaded_message['attachments'][19]['text'], 'yay') log_warning.assert_called_with( 'Message with %d custom attachments was truncated to %d attachments', SlackOutput.MAX_ATTACHMENTS + 1, SlackOutput.MAX_ATTACHMENTS)
def test_assemble(): """DemistoRequestAssembler - assemble""" alert = get_alert(context=SAMPLE_CONTEXT) alert.created = datetime(2019, 1, 1) output = MagicMock(spec=DemistoOutput) alert_publication = compose_alert(alert, output, 'asdf') request = DemistoRequestAssembler.assemble(alert, alert_publication) assert_equal(request.incident_name, 'cb_binarystore_file_added') assert_equal(request.incident_type, 'Unclassified') assert_equal(request.severity, 0) assert_equal(request.owner, 'StreamAlert') assert_equal(request.labels, EXPECTED_LABELS_FOR_SAMPLE_ALERT) assert_equal(request.details, 'Info about this rule and what actions to take') assert_equal(request.custom_fields, {}) assert_equal(request.create_investigation, True)
def test_v2_low_urgency(): """Publishers - PagerDuty - v2_low_urgency""" alert = get_alert(context={'context': 'value'}) alert.created = datetime(2019, 1, 1) alert.publishers = { 'pagerduty': ['publishers.community.pagerduty.pagerduty_layout.v2_low_urgency'] } output = MagicMock(spec=OutputDispatcher) output.__service__ = 'pagerduty' descriptor = 'unit_test_channel' publication = compose_alert(alert, output, descriptor) expectation = { '@pagerduty-incident.urgency': 'low', '@pagerduty-v2.severity': 'warning' } assert_equal(publication, expectation)
def test_shorten_title(): """Publishers - PagerDuty - ShortenTitle""" alert = get_alert(context={'context': 'value'}) alert.created = datetime(2019, 1, 1) alert.publishers = { 'pagerduty': 'publishers.community.pagerduty.pagerduty_layout.ShortenTitle', } output = MagicMock(spec=OutputDispatcher) output.__service__ = 'pagerduty' descriptor = 'unit_test_channel' publication = compose_alert(alert, output, descriptor) expectation = { '@pagerduty.description': 'cb_binarystore_file_added', '@pagerduty-v2.summary': 'cb_binarystore_file_added', '@pagerduty-incident.incident_title': 'cb_binarystore_file_added' } assert_equal(publication, expectation)