Beispiel #1
0
 def test_can_merge_merge_keys_absent(self):
     """Alert Class - Can Merge - True if Merge Keys Do Not Exist in Either Record"""
     alert1 = Alert('', {},
                    set(),
                    merge_by_keys=['key'],
                    merge_window=timedelta(minutes=10))
     alert2 = Alert('', {},
                    set(),
                    merge_by_keys=['key'],
                    merge_window=timedelta(minutes=10))
     assert_true(alert1.can_merge(alert2))
     assert_true(alert2.can_merge(alert1))
Beispiel #2
0
 def test_can_merge_different_values(self):
     """Alert Class - Can Merge - False if Merge Key has Different Values"""
     alert1 = Alert('', {'key': True},
                    set(),
                    merge_by_keys=['key'],
                    merge_window=timedelta(minutes=10))
     alert2 = Alert('', {'key': False},
                    set(),
                    merge_by_keys=['key'],
                    merge_window=timedelta(minutes=10))
     assert_false(alert1.can_merge(alert2))
     assert_false(alert2.can_merge(alert1))
Beispiel #3
0
 def test_can_merge_key_not_common(self):
     """Alert Class - Can Merge - False if Merge Key Not Present in Both Records"""
     alert1 = Alert('', {'key': True},
                    set(),
                    merge_by_keys=['key'],
                    merge_window=timedelta(minutes=10))
     alert2 = Alert('', {'other': True},
                    set(),
                    merge_by_keys=['key'],
                    merge_window=timedelta(minutes=10))
     assert_false(alert1.can_merge(alert2))
     assert_false(alert2.can_merge(alert1))
Beispiel #4
0
 def test_can_merge_different_merge_keys(self):
     """Alert Class - Can Merge - False if Different Merge Keys Defined"""
     alert1 = Alert('', {'key': True},
                    set(),
                    merge_by_keys=['key'],
                    merge_window=timedelta(minutes=10))
     alert2 = Alert('', {'key': True},
                    set(),
                    merge_by_keys=['other'],
                    merge_window=timedelta(minutes=10))
     assert_false(alert1.can_merge(alert2))
     assert_false(alert2.can_merge(alert1))
Beispiel #5
0
 def test_add_not_mergeable(self):
     """Alert Merger - Merge Group - Did Not Add Alert to Group"""
     alert1 = Alert('', {'key': True},
                    set(),
                    merge_by_keys=['key'],
                    merge_window=timedelta(minutes=5))
     alert2 = Alert('', {'key': True},
                    set(),
                    merge_by_keys=['other'],
                    merge_window=timedelta(minutes=5))
     group = main.AlertMergeGroup(alert1)
     assert_false(group.add(alert2))
     assert_equal([alert1], group.alerts)
Beispiel #6
0
 def test_can_merge_too_far_apart(self):
     """Alert Class - Can Merge - False if Outside Merge Window"""
     alert1 = Alert('', {'key': True},
                    set(),
                    created=datetime(year=2000, month=1, day=1, minute=0),
                    merge_by_keys=['key'],
                    merge_window=timedelta(minutes=10))
     alert2 = Alert('', {'key': True},
                    set(),
                    created=datetime(year=2000, month=1, day=1, minute=11),
                    merge_by_keys=['key'],
                    merge_window=timedelta(minutes=10))
     assert_false(alert1.can_merge(alert2))
     assert_false(alert2.can_merge(alert1))
Beispiel #7
0
    def _rule_analysis(self, payload, rule):
        """Analyze a record with the rule, adding a new alert if applicable

        Args:
            payload (dict): Representation of event to perform rule analysis against
            rule (rule.Rule): Attributes for the rule which triggered the alert
        """
        # Run the rule, using the statistic tracker in case stats are being tracked
        rule_result = self._rule_stat_tracker.run_rule(rule, payload['record'])
        if not rule_result:
            return

        # Define the outputs
        outputs = self._configure_outputs(payload['record'], rule)

        alert = Alert(
            rule.name, payload['record'], outputs,
            cluster=payload['cluster'],
            context=rule.context,
            log_source=payload['log_schema_type'],
            log_type=payload['data_type'],
            merge_by_keys=rule.merge_by_keys,
            merge_window=timedelta(minutes=rule.merge_window_mins),
            publishers=self._configure_publishers(rule, outputs),
            rule_description=rule.description,
            source_entity=payload['resource'],
            source_service=payload['service'],
            staged=rule.is_staged(self._rule_table)
        )

        LOGGER.info('Rule \'%s\' triggered alert \'%s\' on log type \'%s\' from resource \'%s\' '
                    'in service \'%s\'', rule.name, alert.alert_id, payload['log_schema_type'],
                    payload['resource'], payload['service'])

        return alert
Beispiel #8
0
def get_random_alert(key_count, rule_name, omit_rule_desc=False):
    """This loop generates key/value pairs with a key of length 6 and
        value of length 148. when formatted, each line should consume
        160 characters, account for newline and asterisk for bold.

        For example:
        '*000001:* 6D829150B0154BF9BAC733FD25C61FA3D8CD3868AC2A92F19EEE119B
        9CE8D6094966AA7592CE371002F1F7D82617673FCC9A9DB2A8F432AA791D74AB80BBCAD9\n'

        Therefore, 25*160 = 4000 character message size (exactly the 4000 limit)
        Anything over 4000 characters will result in multi-part slack messages:
        55*160 = 8800 & 8800/4000 = ceil(2.2) = 3 messages needed
    """
    # This default value is set in the rules engine
    rule_description = 'No rule description provided' if omit_rule_desc else 'rule test description'

    return Alert(
        rule_name,
        {
            '{:06}'.format(key): '{:0148X}'.format(random.randrange(16**128))
            for key in range(key_count)
        },
        {'slack:unit_test_channel'},
        rule_description=rule_description
    )
Beispiel #9
0
def get_alert(context=None):
    """This function generates a sample alert for testing purposes

    Args:
        context (dict): Optional alert context
    """
    return Alert(
        'cb_binarystore_file_added',
        {
            'compressed_size': '9982',
            'timestamp': '1496947381.18',
            'node_id': '1',
            'cb_server': 'cbserver',
            'size': '21504',
            'type': 'binarystore.file.added',
            'file_path': '/tmp/5DA/AD8/0F9AA55DA3BDE84B35656AD8911A22E1.zip',
            'md5': '0F9AA55DA3BDE84B35656AD8911A22E1'
        },
        {'slack:unit_test_channel'},
        alert_id='79192344-4a6d-4850-8d06-9c3fef1060a4',
        context=context,
        log_source='carbonblack:binarystore.file.added',
        log_type='json',
        rule_description='Info about this rule and what actions to take',
        source_entity='corp-prefix.prod.cb.region',
        source_service='s3'
    )
Beispiel #10
0
 def setup(self):
     """Alert Processor - Test Setup"""
     # pylint: disable=attribute-defined-outside-init
     self.processor = AlertProcessor()
     self.alert = Alert('hello_world', {
         'abc': 123,
         Normalizer.NORMALIZATION_KEY: {}
     }, {'slack:unit-test-channel'})
Beispiel #11
0
 def test_can_merge_true(self):
     """Alert Class - Can Merge - True Result"""
     alert1 = Alert('', {'key': True},
                    set(),
                    created=datetime(year=2000, month=1, day=1, minute=0),
                    merge_by_keys=['key'],
                    merge_window=timedelta(minutes=10))
     alert2 = Alert('', {
         'key': True,
         'other': True
     },
                    set(),
                    created=datetime(year=2000, month=1, day=1, minute=10),
                    merge_by_keys=['key'],
                    merge_window=timedelta(minutes=10))
     assert_true(alert1.can_merge(alert2))
     assert_true(alert2.can_merge(alert1))
Beispiel #12
0
 def test_add_mergeable(self):
     """Alert Merger - Merge Group - Add Alert to Group"""
     alert = Alert('', {'key': True},
                   set(),
                   merge_by_keys=['key'],
                   merge_window=timedelta(minutes=5))
     group = main.AlertMergeGroup(alert)
     assert_true(group.add(alert))  # An alert can always merge with itself
     assert_equal([alert, alert], group.alerts)
Beispiel #13
0
 def test_merge_groups_too_recent(self):
     """Alert Merger - Alert Collection - All Alerts Too Recent to Merge"""
     alerts = [
         Alert('', {'key': True},
               set(),
               merge_by_keys=['key'],
               merge_window=timedelta(minutes=10))
     ]
     assert_equal([], main.AlertMerger._merge_groups(alerts))
Beispiel #14
0
    def test_merge_groups_single(self):
        """Alert Merger - Alert Collection - Single Merge Group"""
        alerts = [
            Alert('', {'key': True},
                  set(),
                  created=datetime(year=2000, month=1, day=1),
                  merge_by_keys=['key'],
                  merge_window=timedelta(minutes=5)),
            Alert('', {
                'key': True,
                'other': True
            },
                  set(),
                  created=datetime(year=2000, month=1, day=1),
                  merge_by_keys=['key'],
                  merge_window=timedelta(minutes=5))
        ]

        groups = main.AlertMerger._merge_groups(alerts)
        assert_equal(1, len(groups))
        assert_equal(alerts, groups[0].alerts)
Beispiel #15
0
def generate_alerts_table_schema():
    """Generate the schema for alerts table in terraform by using a fake alert

    Returns:
        athena_schema (dict): Equivalent Athena schema used for generating create table statement
    """
    alert = Alert('temp_rule_name', {}, {})
    output = alert.output_dict()
    schema = record_to_schema(output)
    athena_schema = logs_schema_to_athena_schema(schema, False)

    return format_schema_tf(athena_schema)
Beispiel #16
0
 def test_dynamo_record(self):
     """Alert Class - Dynamo Record"""
     # Make sure there are no empty strings nor sets (not allowed in Dynamo)
     alert = Alert('test_rule', {}, {'aws-sns:test-output'},
                   cluster='',
                   created='',
                   log_source='',
                   log_type='',
                   outputs_sent=set(),
                   rule_description='',
                   source_entity='',
                   source_service='')
     record = alert.dynamo_record()
     assert_not_in('', list(record.values()))
     assert_not_in(set(), list(record.values()))
Beispiel #17
0
    def test_dispatch(self, mock_logger):
        """Alert Merger - Dispatch to Alert Processor Lambda"""
        self.merger.lambda_client = MagicMock()

        self.merger.table.add_alerts([
            # An alert without any merge criteria
            Alert('no_merging', {}, {'output'}),

            # 2 Alerts which will be merged (and will be be too large to send the entire record)
            Alert('merge_me', {'key': True}, {'output'},
                  created=datetime(year=2000, month=1, day=1),
                  merge_by_keys=['key'],
                  merge_window=timedelta(minutes=5)),
            Alert('merge_me', {
                'key': True,
                'other': 'abc' * 50
            }, {'output'},
                  created=datetime(year=2000, month=1, day=1, minute=1),
                  merge_by_keys=['key'],
                  merge_window=timedelta(minutes=5)),

            # Alert which has already sent successfully (will be deleted)
            Alert('already_sent', {}, {'output'}, outputs_sent={'output'})
        ])

        self.merger.dispatch()
        # NOTE (Bobby): The following assertion was modified during the py2 -> py3
        # conversion to disregard order of calls.
        mock_logger.assert_has_calls([
            call.info('Merged %d alerts into a new alert with ID %s', 2, ANY),
            call.info('Dispatching %s to %s (attempt %d)', ANY,
                      _ALERT_PROCESSOR, 1),
            call.info('Dispatching %s to %s (attempt %d)', ANY,
                      _ALERT_PROCESSOR, 1)
        ],
                                     any_order=True)
Beispiel #18
0
    def test_merge_groups_limit_reached(self):
        """Alert Merger - Alert Collection - Max Alerts Per Group"""
        alerts = [
            Alert('same_rule_name', {'key': 'A'},
                  set(),
                  created=datetime(year=2000, month=1, day=1),
                  merge_by_keys=['key'],
                  merge_window=timedelta(minutes=5)),
        ] * 5

        # Since max alerts per group is 2, it should create 3 merged groups.
        groups = main.AlertMerger._merge_groups(alerts)
        assert_equal(3, len(groups))
        assert_equal(alerts[0:2], groups[0].alerts)
        assert_equal(alerts[2:4], groups[1].alerts)
        assert_equal(alerts[4:], groups[2].alerts)
Beispiel #19
0
    def test_alert_generator(self, mock_logger):
        """Alert Merger - Sorted Alerts - Invalid Alerts are Logged"""
        records = [
            Alert('test_rule', {}, {'output'}).dynamo_record(), {
                'Nonsense': 'Record'
            }
        ]

        with patch.object(self.merger.table,
                          'get_alert_records',
                          return_value=records):
            result = list(self.merger._alert_generator('test_rule'))
            # Valid record is returned
            assert_equal(1, len(result))
            assert_equal(records[0]['AlertID'], result[0].alert_id)
            # Invalid record logs an exception
            mock_logger.exception.assert_called_once_with(
                'Invalid alert record %s', records[1])
Beispiel #20
0
 def _customized_alert():
     return Alert('test_rule', {'abc': 123}, {
         'aws-firehose:alerts', 'aws-sns:test-output', 'aws-s3:other-output'
     },
                  alert_id='abc-123',
                  attempts=1,
                  cluster='',
                  context={'rule': 'context'},
                  created=datetime.utcnow(),
                  dispatched=datetime.utcnow(),
                  log_source='source',
                  log_type='csv',
                  merge_by_keys=['abc'],
                  merge_window=timedelta(minutes=5),
                  outputs_sent={'aws-sns:test-output'},
                  rule_description='A Test Rule',
                  source_entity='entity',
                  source_service='s3',
                  staged=True)
Beispiel #21
0
def create_table(table, bucket, config, schema_override=None):
    """Create a 'streamalert' Athena table

    Args:
        table (str): The name of the table being rebuilt
        bucket (str): The s3 bucket to be used as the location for Athena data
        table_type (str): The type of table being refreshed
        config (CLIConfig): Loaded StreamAlert config
        schema_override (set): An optional set of key=value pairs to be used for
            overriding the configured column_name=value_type.

    Returns:
        bool: False if errors occurred, True otherwise
    """
    enabled_logs = FirehoseClient.load_enabled_log_sources(
        config['global']['infrastructure']['firehose'], config['logs'])

    # Convert special characters in schema name to underscores
    sanitized_table_name = FirehoseClient.firehose_log_name(table)

    # Check that the log type is enabled via Firehose
    if sanitized_table_name != 'alerts' and sanitized_table_name not in enabled_logs:
        LOGGER.error(
            'Table name %s missing from configuration or '
            'is not enabled.', sanitized_table_name)
        return False

    athena_client = get_athena_client(config)

    config_data_bucket = firehose_data_bucket(config)
    if not config_data_bucket:
        LOGGER.error('The \'firehose\' module is not enabled in global.json')
        return False

    # Check if the table exists
    if athena_client.check_table_exists(sanitized_table_name):
        LOGGER.info('The \'%s\' table already exists.', sanitized_table_name)
        return False

    if table == 'alerts':
        # get a fake alert so we can get the keys needed and their types
        alert = Alert('temp_rule_name', {}, {})
        output = alert.output_dict()
        schema = record_to_schema(output)
        athena_schema = helpers.logs_schema_to_athena_schema(schema)

        # Use the bucket if supplied, otherwise use the default alerts bucket
        bucket = bucket or firehose_alerts_bucket(config)

        query = _construct_create_table_statement(schema=athena_schema,
                                                  table_name=table,
                                                  bucket=bucket)

    else:  # all other tables are log types

        # Use the bucket if supplied, otherwise use the default data bucket
        bucket = bucket or config_data_bucket

        log_info = config['logs'][table.replace('_', ':', 1)]

        schema = dict(log_info['schema'])
        sanitized_schema = FirehoseClient.sanitize_keys(schema)

        athena_schema = helpers.logs_schema_to_athena_schema(sanitized_schema)

        # Add envelope keys to Athena Schema
        configuration_options = log_info.get('configuration')
        if configuration_options:
            envelope_keys = configuration_options.get('envelope_keys')
            if envelope_keys:
                sanitized_envelope_key_schema = FirehoseClient.sanitize_keys(
                    envelope_keys)
                # Note: this key is wrapped in backticks to be Hive compliant
                athena_schema[
                    '`streamalert:envelope_keys`'] = helpers.logs_schema_to_athena_schema(
                        sanitized_envelope_key_schema)

        # Handle Schema overrides
        #   This is useful when an Athena schema needs to differ from the normal log schema
        if schema_override:
            for override in schema_override:
                column_name, column_type = override.split('=')
                # Columns are escaped to avoid Hive issues with special characters
                column_name = '`{}`'.format(column_name)
                if column_name in athena_schema:
                    athena_schema[column_name] = column_type
                    LOGGER.info('Applied schema override: %s:%s', column_name,
                                column_type)
                else:
                    LOGGER.error(
                        'Schema override column %s not found in Athena Schema, skipping',
                        column_name)

        query = _construct_create_table_statement(
            schema=athena_schema,
            table_name=sanitized_table_name,
            bucket=bucket)

    success = athena_client.run_query(query=query)
    if not success:
        LOGGER.error('The %s table could not be created', sanitized_table_name)
        return False

    # Update the CLI config
    if table != 'alerts' and bucket != config_data_bucket:
        # Only add buckets to the config if they are not one of the default/configured buckets
        # Ensure 'buckets' exists in the config (since it is not required)
        config['lambda']['athena_partition_refresh_config']['buckets'] = (
            config['lambda']['athena_partition_refresh_config'].get(
                'buckets', {}))
        if bucket not in config['lambda']['athena_partition_refresh_config'][
                'buckets']:
            config['lambda']['athena_partition_refresh_config']['buckets'][
                bucket] = 'data'
            config.write()

    LOGGER.info('The %s table was successfully created!', sanitized_table_name)

    return True
Beispiel #22
0
 def _basic_alert():
     return Alert('test_rule', {'abc': 123},
                  {'aws-firehose:alerts', 'aws-sns:test-output'})
Beispiel #23
0
    def test_merge(self):
        """Alert Class - Merge - Create Merged Alert"""
        # Example based on a CarbonBlack log
        record1 = {
            'alliance_data_virustotal': [],
            'alliance_link_virustotal': '',
            'alliance_score_virustotal': 0,
            'cmdline': 'whoami',
            'comms_ip': '1.2.3.4',
            'hostname': 'my-computer-name',
            'path': '/usr/bin/whoami',
            'streamalert:ioc': {
                'hello': 'world'
            },
            'timestamp': 1234.5678,
            'username': '******'
        }
        alert1 = Alert('RuleName',
                       record1, {'aws-sns:topic'},
                       created=datetime(year=2000, month=1, day=1),
                       merge_by_keys=['hostname', 'username'],
                       merge_window=timedelta(minutes=5))

        # Second alert has slightly different record and different outputs
        record2 = copy.deepcopy(record1)
        record2['streamalert:ioc'] = {'goodbye': 'world'}
        record2['timestamp'] = 9999
        alert2 = Alert('RuleName',
                       record2, {'slack:channel'},
                       created=datetime(year=2000, month=1, day=2),
                       merge_by_keys=['hostname', 'username'],
                       merge_window=timedelta(minutes=5))

        merged = Alert.merge([alert1, alert2])
        assert_is_instance(merged, Alert)
        assert_equal({'slack:channel'},
                     merged.outputs)  # Most recent outputs were used

        expected_record = {
            'AlertCount': 2,
            'AlertTimeFirst': '2000-01-01T00:00:00.000000Z',
            'AlertTimeLast': '2000-01-02T00:00:00.000000Z',
            'MergedBy': {
                'hostname': 'my-computer-name',
                'username': '******'
            },
            'OtherCommonKeys': {
                'alliance_data_virustotal': [],
                'alliance_link_virustotal': '',
                'alliance_score_virustotal': 0,
                'cmdline': 'whoami',
                'comms_ip': '1.2.3.4',
                'path': '/usr/bin/whoami',
            },
            'ValueDiffs': {
                '2000-01-01T00:00:00.000000Z': {
                    'streamalert:ioc': {
                        'hello': 'world'
                    },
                    'timestamp': 1234.5678
                },
                '2000-01-02T00:00:00.000000Z': {
                    'streamalert:ioc': {
                        'goodbye': 'world'
                    },
                    'timestamp': 9999
                }
            }
        }
        assert_equal(expected_record, merged.record)
Beispiel #24
0
    def test_merge_groups_complex(self):
        """Alert Merger - Alert Collection - Complex Merge Groups"""
        alerts = [
            # Merge group 1 - key 'A' minutes 0-5
            Alert('same_rule_name', {'key': 'A'},
                  set(),
                  created=datetime(year=2000, month=1, day=1),
                  merge_by_keys=['key'],
                  merge_window=timedelta(minutes=5)),
            Alert('same_rule_name', {'key': 'A'},
                  set(),
                  created=datetime(year=2000, month=1, day=1, minute=1),
                  merge_by_keys=['key'],
                  merge_window=timedelta(minutes=5)),

            # Merge group 2 - Key B minutes 0-5
            Alert('same_rule_name', {'key': 'B'},
                  set(),
                  created=datetime(year=2000, month=1, day=1, minute=2),
                  merge_by_keys=['key'],
                  merge_window=timedelta(minutes=5)),
            Alert('same_rule_name', {'key': 'B'},
                  set(),
                  created=datetime(year=2000,
                                   month=1,
                                   day=1,
                                   minute=2,
                                   second=30),
                  merge_by_keys=['key'],
                  merge_window=timedelta(minutes=5)),
            Alert('same_rule_name', {'key': 'B'},
                  set(),
                  created=datetime(year=2000, month=1, day=1, minute=3),
                  merge_by_keys=['key'],
                  merge_window=timedelta(minutes=5)),

            # Merge group 3 - Different merge keys
            Alert('same_rule_name', {
                'key': 'A',
                'other': 'B'
            },
                  set(),
                  created=datetime(year=2000, month=1, day=1, minute=4),
                  merge_by_keys=['key', 'other'],
                  merge_window=timedelta(minutes=5)),
            Alert('same_rule_name', {
                'key': 'A',
                'other': 'B'
            },
                  set(),
                  created=datetime(year=2000, month=1, day=1, minute=5),
                  merge_by_keys=['key', 'other'],
                  merge_window=timedelta(minutes=5)),

            # Merge group 4 - key A minutes 50-55
            Alert('same_rule_name', {'key': 'A'},
                  set(),
                  created=datetime(year=2000, month=1, day=1, minute=50),
                  merge_by_keys=['key'],
                  merge_window=timedelta(minutes=5)),

            # This alert (created now) is too recent to fit in any merge group.
            Alert('same_rule_name', {'key': 'A'},
                  set(),
                  merge_by_keys=['key'],
                  merge_window=timedelta(minutes=10))
        ]

        groups = main.AlertMerger._merge_groups(alerts)
        assert_equal(4, len(groups))
        assert_equal(alerts[0:2], groups[0].alerts)
        assert_equal(alerts[2:5], groups[1].alerts)
        assert_equal(alerts[5:7], groups[2].alerts)
        assert_equal([alerts[7]], groups[3].alerts)
Beispiel #25
0
    def test_merge_nested(self):
        """Alert Class - Merge - Merge with Nested Keys"""
        record1 = {
            'NumMatchedRules': 1,
            'FileInfo': {
                'Deleted': None,
                'Nested': [1, 2, 'three']
            },
            'MatchedRules': {
                'Rule1': 'MatchedStrings'
            }
        }
        alert1 = Alert('RuleName',
                       record1, {'slack:channel'},
                       created=datetime(year=2000, month=1, day=1),
                       merge_by_keys=['Nested'],
                       merge_window=timedelta(minutes=5))

        record2 = {
            'NumMatchedRules': 2,
            'FileInfo': {
                'Deleted': None,
                'Nested': [1, 2, 'three']
            },
            'MatchedRules': {
                'Rule1': 'MatchedStrings'
            }
        }
        alert2 = Alert('RuleName',
                       record2, {'slack:channel'},
                       created=datetime(year=2000, month=1, day=2),
                       merge_by_keys=['Nested'],
                       merge_window=timedelta(minutes=5))

        record3 = {
            'MatchedRules': {
                'Rule1': 'MatchedStrings'
            },
            'Nested': [1, 2,
                       'three']  # This is in a different place in the record
        }
        alert3 = Alert('RuleName',
                       record3, {'slack:channel'},
                       created=datetime(year=2000, month=1, day=3),
                       merge_by_keys=['Nested'],
                       merge_window=timedelta(minutes=5))

        merged = Alert.merge([alert1, alert2, alert3])

        expected_record = {
            'AlertCount': 3,
            'AlertTimeFirst': '2000-01-01T00:00:00.000000Z',
            'AlertTimeLast': '2000-01-03T00:00:00.000000Z',
            'MergedBy': {
                'Nested': [1, 2, 'three']
            },
            'OtherCommonKeys': {
                'MatchedRules': {
                    'Rule1': 'MatchedStrings'
                }
            },
            'ValueDiffs': {
                '2000-01-01T00:00:00.000000Z': {
                    'NumMatchedRules': 1,
                    'FileInfo': {
                        'Deleted': None
                    }
                },
                '2000-01-02T00:00:00.000000Z': {
                    'NumMatchedRules': 2,
                    'FileInfo': {
                        'Deleted': None
                    }
                },
                '2000-01-03T00:00:00.000000Z': {}
            }
        }

        assert_equal(expected_record, merged.record)