Ejemplo n.º 1
0
def test_deserialize_current_record_to_current_model(historical_role,
                                                     current_s3_table,
                                                     buckets):
    """Tests that a current table event can be deserialized back into proper Current record object."""
    from historical.common.dynamodb import deserialize_current_record_to_current_model
    from historical.s3.models import CurrentS3Model

    # Create the event to fetch the Current data from:
    bucket = S3_BUCKET.copy()
    bucket['eventTime'] = datetime(
        year=2017, month=5, day=12, hour=10, minute=30,
        second=0).isoformat() + 'Z'
    ddb_record = json.loads(
        json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
            NewImage=bucket, Keys={'arn': bucket['arn']}),
                                         eventName='INSERT'),
                   default=serialize))

    result = deserialize_current_record_to_current_model(
        ddb_record, CurrentS3Model)
    assert result.BucketName == "testbucket1"
    assert isinstance(result, CurrentS3Model)

    # And for event_too_big:
    # Create the bucket in the current table:
    now = datetime.utcnow().replace(tzinfo=None, microsecond=0)
    create_event = json.loads(
        json.dumps(CloudwatchEventFactory(detail=DetailFactory(
            requestParameters={"bucketName": "testbucket1"},
            eventSource="aws.s3",
            eventName="CreateBucket",
            eventTime=now)),
                   default=serialize))
    process_update_records([create_event])

    del bucket['configuration']
    ddb_record = json.loads(
        json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
            NewImage=bucket, Keys={'arn': bucket['arn']}),
                                         eventName='INSERT'),
                   default=serialize))
    ddb_record[EVENT_TOO_BIG_FLAG] = True

    result = deserialize_current_record_to_current_model(
        ddb_record, CurrentS3Model)
    assert result.BucketName == "testbucket1"
    assert isinstance(result, CurrentS3Model)

    # And if the object isn't in the current table:
    ddb_record = json.loads(
        json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
            NewImage=bucket, Keys={'arn': 'arn:aws:s3:::notarealbucket'}),
                                         eventName='INSERT'),
                   default=serialize))
    ddb_record[EVENT_TOO_BIG_FLAG] = True

    result = deserialize_current_record_to_current_model(
        ddb_record, CurrentS3Model)
    assert not result
Ejemplo n.º 2
0
def test_deserialize_durable_record_to_durable_model(historical_role,
                                                     durable_s3_table,
                                                     buckets):
    """Tests that a durable table event can be deserialized back into proper Durable record object."""
    from historical.common.dynamodb import deserialize_durable_record_to_durable_model, \
        deserialize_current_record_to_durable_model
    from historical.s3.models import CurrentS3Model, DurableS3Model

    # Create the event to fetch the Durable data from:
    bucket = S3_BUCKET.copy()
    del bucket['eventSource']
    bucket['eventTime'] = datetime(
        year=2017, month=5, day=12, hour=10, minute=30,
        second=0).isoformat() + 'Z'
    ddb_record = json.loads(
        json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
            NewImage=bucket, Keys={'arn': bucket['arn']}),
                                         eventName='INSERT'),
                   default=serialize))
    result = deserialize_durable_record_to_durable_model(
        ddb_record, DurableS3Model)
    assert result
    assert result.BucketName == "testbucket1"
    assert result.eventTime == bucket['eventTime']
    assert isinstance(result, DurableS3Model)

    # And for event_too_big:
    # Create the bucket in the durable table:
    ddb_record = json.loads(
        json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
            NewImage=bucket, Keys={'arn': bucket['arn']}),
                                         eventName='INSERT'),
                   default=serialize))
    revision = deserialize_current_record_to_durable_model(
        ddb_record, CurrentS3Model, DurableS3Model)
    revision.save()
    ddb_record[EVENT_TOO_BIG_FLAG] = True
    del bucket['configuration']

    result = deserialize_durable_record_to_durable_model(
        ddb_record, DurableS3Model)
    assert result
    assert result.BucketName == "testbucket1"
    assert result.eventTime == bucket['eventTime']
    assert isinstance(result, DurableS3Model)

    # And if the object isn't in the durable table:
    ddb_record = json.loads(
        json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
            NewImage=bucket, Keys={'arn': 'arn:aws:s3:::notarealbucket'}),
                                         eventName='INSERT'),
                   default=serialize))
    ddb_record[EVENT_TOO_BIG_FLAG] = True

    # Raises an exception:
    with pytest.raises(DurableItemIsMissingException):
        deserialize_durable_record_to_durable_model(ddb_record, DurableS3Model)
Ejemplo n.º 3
0
def test_make_sns_blob():
    from historical.common.sns import shrink_sns_blob

    ttl = int(time.time() + TTL_EXPIRY)
    new_bucket = S3_BUCKET.copy()
    new_bucket['eventTime'] = datetime(
        year=2017, month=5, day=12, hour=10, minute=30,
        second=0).isoformat() + 'Z'
    new_bucket["ttl"] = ttl
    ddb_record = DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
        NewImage=new_bucket,
        Keys={'arn': new_bucket['arn']},
        OldImage=new_bucket),
                                       eventName='INSERT')
    new_item = DynamoDBRecordsFactory(records=[ddb_record])
    data = json.loads(json.dumps(new_item, default=serialize))['Records'][0]

    shrunken_blob = shrink_sns_blob(data)

    assert shrunken_blob['userIdentity'] == data['userIdentity']
    assert shrunken_blob['sns_too_big']
    assert shrunken_blob['eventName'] == data['eventName']
    assert shrunken_blob['dynamodb']['Keys'] == data['dynamodb']['Keys']

    assert not shrunken_blob['dynamodb']['NewImage'].get('configuration')
    assert not shrunken_blob['dynamodb']['OldImage'].get('configuration')
Ejemplo n.º 4
0
def test_differ(durable_s3_table, mock_lambda_environment):
    from historical.s3.models import DurableS3Model
    from historical.s3.differ import handler
    from historical.models import TTL_EXPIRY

    ttl = int(time.time() + TTL_EXPIRY)
    new_bucket = S3_BUCKET.copy()
    new_bucket['eventTime'] = datetime(
        year=2017, month=5, day=12, hour=10, minute=30,
        second=0).isoformat() + 'Z'
    new_bucket["ttl"] = ttl
    new_item = DynamoDBRecordsFactory(records=[
        DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
            NewImage=new_bucket, Keys={'arn': new_bucket['arn']}),
                              eventName='INSERT')
    ])
    data = json.loads(json.dumps(new_item, default=serialize))
    handler(data, None)
    assert DurableS3Model.count() == 1

    # Test duplicates don't change anything:
    data = json.loads(json.dumps(new_item, default=serialize))
    handler(data, None)
    assert DurableS3Model.count() == 1

    # Test ephemeral changes don't add new models:
    ephemeral_changes = S3_BUCKET.copy()
    ephemeral_changes["eventTime"] = \
        datetime(year=2017, month=5, day=12, hour=11, minute=30, second=0).isoformat() + 'Z'
    ephemeral_changes["configuration"]["_version"] = 99999
    ephemeral_changes["ttl"] = ttl

    data = DynamoDBRecordsFactory(records=[
        DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
            NewImage=ephemeral_changes, Keys={'arn': ephemeral_changes['arn']
                                              }),
                              eventName='MODIFY')
    ])
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, None)
    assert DurableS3Model.count() == 1

    # Add an update:
    new_changes = S3_BUCKET.copy()
    new_date = datetime(
        year=2017, month=5, day=12, hour=11, minute=30,
        second=0).isoformat() + 'Z'
    new_changes["eventTime"] = new_date
    new_changes["Tags"] = {"ANew": "Tag"}
    new_changes["configuration"]["Tags"] = {"ANew": "Tag"}
    new_changes["ttl"] = ttl
    data = DynamoDBRecordsFactory(records=[
        DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
            NewImage=new_changes, Keys={'arn': new_changes['arn']}),
                              eventName='MODIFY')
    ])
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, None)
    results = list(DurableS3Model.query("arn:aws:s3:::testbucket1"))
    assert len(results) == 2
    assert results[1].Tags["ANew"] == results[
        1].configuration.attribute_values["Tags"]["ANew"] == "Tag"
    assert results[1].eventTime == new_date

    # And deletion (ensure new record -- testing TTL):
    delete_bucket = S3_BUCKET.copy()
    delete_bucket["eventTime"] = datetime(
        year=2017, month=5, day=12, hour=12, minute=30,
        second=0).isoformat() + 'Z'
    delete_bucket["ttl"] = ttl
    data = DynamoDBRecordsFactory(records=[
        DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
            OldImage=delete_bucket, Keys={'arn': delete_bucket['arn']}),
                              eventName='REMOVE',
                              userIdentity=UserIdentityFactory(
                                  type='Service',
                                  principalId='dynamodb.amazonaws.com'))
    ])
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, None)
    assert DurableS3Model.count() == 3
Ejemplo n.º 5
0
def test_differ(current_security_group_table, durable_security_group_table, mock_lambda_environment):
    from historical.security_group.models import DurableSecurityGroupModel
    from historical.security_group.differ import handler
    from historical.models import TTL_EXPIRY

    ttl = int(time.time() + TTL_EXPIRY)
    new_group = SECURITY_GROUP.copy()
    new_group.pop("eventSource")
    new_group['eventTime'] = datetime(year=2017, month=5, day=12, hour=10, minute=30, second=0).isoformat() + 'Z'
    new_group["ttl"] = ttl
    data = json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
        NewImage=new_group,
        Keys={
            'arn': new_group['arn']
        }
    ), eventName='INSERT'), default=serialize)
    data = RecordsFactory(records=[SQSDataFactory(body=json.dumps(SnsDataFactory(Message=data), default=serialize))])
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, mock_lambda_environment)
    assert DurableSecurityGroupModel.count() == 1

    # ensure no new record for the same data
    duplicate_group = SECURITY_GROUP.copy()
    duplicate_group.pop("eventSource")
    duplicate_group['eventTime'] = datetime(year=2017, month=5, day=12, hour=11, minute=30, second=0).isoformat() + 'Z'
    duplicate_group["ttl"] = ttl
    data = json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
        NewImage=duplicate_group,
        Keys={
            'arn': duplicate_group['arn']
        }
    ), eventName='MODIFY'), default=serialize)
    data = RecordsFactory(records=[SQSDataFactory(body=json.dumps(SnsDataFactory(Message=data), default=serialize))])
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, mock_lambda_environment)
    assert DurableSecurityGroupModel.count() == 1

    updated_group = SECURITY_GROUP.copy()
    updated_group.pop("eventSource")
    updated_group['eventTime'] = datetime(year=2017, month=5, day=12, hour=11, minute=30, second=0).isoformat() + 'Z'
    updated_group['configuration']['Description'] = 'changeme'
    updated_group["ttl"] = ttl
    data = json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
        NewImage=updated_group,
        Keys={
            'arn': SECURITY_GROUP['arn']
        }
    ), eventName='MODIFY'), default=serialize)
    data = RecordsFactory(records=[SQSDataFactory(body=json.dumps(SnsDataFactory(Message=data), default=serialize))])
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, mock_lambda_environment)
    assert DurableSecurityGroupModel.count() == 2

    updated_group = SECURITY_GROUP.copy()
    updated_group.pop("eventSource")
    updated_group['eventTime'] = datetime(year=2017, month=5, day=12, hour=9, minute=30, second=0).isoformat() + 'Z'
    updated_group['configuration']['IpPermissions'][0]['IpRanges'][0]['CidrIp'] = 'changeme'
    updated_group["ttl"] = ttl
    data = json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
        NewImage=updated_group,
        Keys={
            'arn': SECURITY_GROUP['arn']
        }
    ), eventName='MODIFY'), default=serialize)
    data = RecordsFactory(records=[SQSDataFactory(body=json.dumps(SnsDataFactory(Message=data), default=serialize))])
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, mock_lambda_environment)
    assert DurableSecurityGroupModel.count() == 3

    deleted_group = SECURITY_GROUP.copy()
    deleted_group.pop("eventSource")
    deleted_group['eventTime'] = datetime(year=2017, month=5, day=12, hour=12, minute=30, second=0).isoformat() + 'Z'
    deleted_group["ttl"] = ttl

    # ensure new record
    data = json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
        OldImage=deleted_group,
        Keys={
            'arn': SECURITY_GROUP['arn']
        }),
        eventName='REMOVE',
        userIdentity=UserIdentityFactory(
                type='Service',
                principalId='dynamodb.amazonaws.com'
        )), default=serialize)
    data = RecordsFactory(records=[SQSDataFactory(body=json.dumps(SnsDataFactory(Message=data), default=serialize))])
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, mock_lambda_environment)
    assert DurableSecurityGroupModel.count() == 4
Ejemplo n.º 6
0
def test_differ(current_vpc_table, durable_vpc_table, mock_lambda_environment):
    from historical.vpc.models import DurableVPCModel
    from historical.vpc.differ import handler
    from historical.models import TTL_EXPIRY

    ttl = int(time.time() + TTL_EXPIRY)
    new_vpc = VPC.copy()
    new_vpc.pop("eventSource")
    new_vpc['eventTime'] = datetime(
        year=2017, month=5, day=12, hour=10, minute=30,
        second=0).isoformat() + 'Z'
    new_vpc['ttl'] = ttl
    data = json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
        NewImage=new_vpc, Keys={'arn': new_vpc['arn']}),
                                            eventName='INSERT'),
                      default=serialize)
    data = RecordsFactory(records=[
        SQSDataFactory(
            body=json.dumps(SnsDataFactory(Message=data), default=serialize))
    ])
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, None)
    assert DurableVPCModel.count() == 1

    # ensure no new record for the same data
    duplicate_vpc = VPC.copy()
    duplicate_vpc.pop("eventSource")
    duplicate_vpc['eventTime'] = datetime(
        year=2017, month=5, day=12, hour=11, minute=30,
        second=0).isoformat() + 'Z'
    duplicate_vpc['ttl'] = ttl
    data = json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
        NewImage=duplicate_vpc, Keys={'arn': duplicate_vpc['arn']}),
                                            eventName='MODIFY'),
                      default=serialize)
    data = RecordsFactory(records=[
        SQSDataFactory(
            body=json.dumps(SnsDataFactory(Message=data), default=serialize))
    ])
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, None)
    assert DurableVPCModel.count() == 1

    updated_vpc = VPC.copy()
    updated_vpc.pop("eventSource")
    updated_vpc['eventTime'] = datetime(
        year=2017, month=5, day=12, hour=11, minute=30,
        second=0).isoformat() + 'Z'
    updated_vpc['configuration']['State'] = 'changeme'
    updated_vpc['ttl'] = ttl
    data = json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
        NewImage=updated_vpc, Keys={'arn': VPC['arn']}),
                                            eventName='MODIFY'),
                      default=serialize)
    data = RecordsFactory(records=[
        SQSDataFactory(
            body=json.dumps(SnsDataFactory(Message=data), default=serialize))
    ])
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, None)
    assert DurableVPCModel.count() == 2

    updated_vpc = VPC.copy()
    updated_vpc.pop("eventSource")
    updated_vpc['eventTime'] = datetime(
        year=2017, month=5, day=12, hour=9, minute=30,
        second=0).isoformat() + 'Z'
    updated_vpc['configuration']['CidrBlock'] = 'changeme'
    updated_vpc['ttl'] = ttl
    data = json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
        NewImage=updated_vpc, Keys={'arn': VPC['arn']}),
                                            eventName='MODIFY'),
                      default=serialize)
    data = RecordsFactory(records=[
        SQSDataFactory(
            body=json.dumps(SnsDataFactory(Message=data), default=serialize))
    ])
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, None)
    assert DurableVPCModel.count() == 3

    updated_vpc = VPC.copy()
    updated_vpc.pop("eventSource")
    updated_vpc['eventTime'] = datetime(
        year=2017, month=5, day=12, hour=9, minute=31,
        second=0).isoformat() + 'Z'
    updated_vpc.update({'Name': 'blah'})
    updated_vpc['ttl'] = ttl
    data = json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
        NewImage=updated_vpc, Keys={'arn': VPC['arn']}),
                                            eventName='MODIFY'),
                      default=serialize)
    data = RecordsFactory(records=[
        SQSDataFactory(
            body=json.dumps(SnsDataFactory(Message=data), default=serialize))
    ])
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, None)
    assert DurableVPCModel.count() == 4

    deleted_vpc = VPC.copy()
    deleted_vpc.pop("eventSource")
    deleted_vpc['eventTime'] = datetime(
        year=2017, month=5, day=12, hour=12, minute=30,
        second=0).isoformat() + 'Z'
    deleted_vpc['ttl'] = ttl

    # ensure new record
    data = json.dumps(DynamoDBRecordFactory(
        dynamodb=DynamoDBDataFactory(OldImage=deleted_vpc,
                                     Keys={'arn': VPC['arn']}),
        eventName='REMOVE',
        userIdentity=UserIdentityFactory(
            type='Service', principalId='dynamodb.amazonaws.com')),
                      default=serialize)
    data = RecordsFactory(records=[
        SQSDataFactory(
            body=json.dumps(SnsDataFactory(Message=data), default=serialize))
    ])
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, None)
    assert DurableVPCModel.count() == 5
Ejemplo n.º 7
0
def test_snsproxy_dynamodb_differ(historical_role, current_s3_table,
                                  durable_s3_table, mock_lambda_environment,
                                  buckets):
    """
    This mostly checks that the differ is able to properly load the reduced dataset from the SNSProxy.
    """
    # Create the item in the current table:
    from historical.s3.collector import handler as current_handler
    from historical.s3.differ import handler as diff_handler
    from historical.s3.models import CurrentS3Model, DurableS3Model
    from historical.common.sns import shrink_sns_blob

    # Mock out the loggers:
    import historical.common.dynamodb
    old_logger = historical.common.dynamodb.log
    mocked_logger = MagicMock()
    historical.common.dynamodb.log = mocked_logger

    now = datetime.utcnow().replace(tzinfo=None, microsecond=0)
    create_event = CloudwatchEventFactory(
        detail=DetailFactory(requestParameters={"bucketName": "testbucket1"},
                             eventSource="aws.s3",
                             eventName="CreateBucket",
                             eventTime=now))
    data = json.dumps(create_event, default=serialize)
    data = RecordsFactory(records=[SQSDataFactory(body=data)])
    data = json.dumps(data, default=serialize)
    data = json.loads(data)

    current_handler(data, mock_lambda_environment)
    result = list(CurrentS3Model.query("arn:aws:s3:::testbucket1"))
    assert len(result) == 1

    # Mock out the DDB Stream for this creation and for an item that is NOT in the current table::
    ttl = int(time.time() + TTL_EXPIRY)
    new_bucket = S3_BUCKET.copy()
    new_bucket['eventTime'] = datetime(
        year=2017, month=5, day=12, hour=10, minute=30,
        second=0).isoformat() + 'Z'
    new_bucket['ttl'] = ttl
    ddb_existing_item = DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
        NewImage=new_bucket,
        Keys={'arn': new_bucket['arn']},
        OldImage=new_bucket),
                                              eventName='INSERT')

    missing_bucket = S3_BUCKET.copy()
    missing_bucket['eventTime'] = datetime(
        year=2017, month=5, day=12, hour=10, minute=30,
        second=0).isoformat() + 'Z'
    missing_bucket['ttl'] = ttl
    missing_bucket['BucketName'] = 'notinthecurrenttable'
    missing_bucket['arn'] = 'arn:aws:s3:::notinthecurrenttable'
    missing_bucket['configuration']['Name'] = 'notinthecurrenttable'
    ddb_missing_item = DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
        NewImage=missing_bucket,
        Keys={'arn': 'arn:aws:s3:::notinthecurrenttable'},
        OldImage=new_bucket),
                                             eventName='INSERT')

    # Get the shrunken blob:
    shrunken_existing = json.dumps(
        shrink_sns_blob(
            json.loads(json.dumps(ddb_existing_item, default=serialize))))
    shrunken_missing = json.dumps(
        shrink_sns_blob(
            json.loads(json.dumps(ddb_missing_item, default=serialize))))

    records = RecordsFactory(records=[
        SQSDataFactory(body=json.dumps(
            SnsDataFactory(Message=shrunken_existing), default=serialize)),
        SQSDataFactory(body=json.dumps(
            SnsDataFactory(Message=shrunken_missing), default=serialize))
    ])
    records_event = json.loads(json.dumps(records, default=serialize))

    # Run the differ:
    diff_handler(records_event, mock_lambda_environment)

    # Verify that the existing bucket in the Current table is in the Durable table with the correct configuration:
    result = list(DurableS3Model.query("arn:aws:s3:::testbucket1"))
    assert len(result) == 1
    assert result[0].configuration.attribute_values['Name'] == 'testbucket1'

    # Verify that the missing bucket is ignored -- as it will be processed presumably later:
    result = list(DurableS3Model.query("arn:aws:s3:::notinthecurrenttable"))
    assert not result

    # Verify that the proper log statements were reached:
    assert mocked_logger.debug.called
    assert mocked_logger.error.called
    debug_calls = [
        '[-->] Item with ARN: arn:aws:s3:::notinthecurrenttable was too big for SNS '
        '-- fetching it from the Current table...',
        '[+] Saving new revision to durable table.',
        '[-->] Item with ARN: arn:aws:s3:::testbucket1 was too big for SNS -- fetching it from the Current table...'
    ]
    for dc in debug_calls:
        mocked_logger.debug.assert_any_call(dc)

    mocked_logger.error.assert_called_once_with(
        '[?] Received item too big for SNS, and was not able to '
        'find the original item with ARN: arn:aws:s3:::notinthecurrenttable')

    # Unmock the logger:
    historical.common.dynamodb.log = old_logger
Ejemplo n.º 8
0
def test_process_sns_forward():
    import historical.common.sns

    test_blob = {'value': None}
    old_publish_message = historical.common.sns._publish_message
    old_logger = historical.common.sns.log

    def mock_publish_message(client, blob, topic_arn):
        assert math.ceil(sys.getsizeof(blob) / 1024) < 256

        # Sort the JSON for easier comparisons later...
        test_blob['value'] = json.dumps(json.loads(blob), sort_keys=True)

    historical.common.sns._publish_message = mock_publish_message

    mock_logger = MagicMock()
    historical.common.sns.log = mock_logger

    from historical.common.sns import process_sns_forward

    # With a small item:
    ttl = int(time.time() + TTL_EXPIRY)
    new_bucket = S3_BUCKET.copy()
    new_bucket['eventTime'] = datetime(
        year=2017, month=5, day=12, hour=10, minute=30,
        second=0).isoformat() + 'Z'
    new_bucket["ttl"] = ttl
    ddb_record = DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
        NewImage=new_bucket,
        Keys={'arn': new_bucket['arn']},
        OldImage=new_bucket),
                                       eventName='INSERT')
    new_item = DynamoDBRecordsFactory(records=[ddb_record])
    data = json.loads(json.dumps(new_item, default=serialize))['Records'][0]

    # Nothing changed -- should be exactly the same:
    process_sns_forward(data, "sometopic", None)
    assert test_blob['value'] == json.dumps(data, sort_keys=True)
    assert not json.loads(test_blob['value']).get('sns_too_big')
    assert not mock_logger.debug.called

    # With a big item...
    new_bucket['configuration']['VeryLargeConfigItem'] = 'a' * 262144
    ddb_record = DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
        NewImage=new_bucket,
        Keys={'arn': new_bucket['arn']},
        OldImage=new_bucket),
                                       eventName='INSERT')
    new_item = DynamoDBRecordsFactory(records=[ddb_record])
    data = json.loads(json.dumps(new_item, default=serialize))['Records'][0]

    assert math.ceil(sys.getsizeof(json.dumps(data)) / 1024) >= 256
    process_sns_forward(data, "sometopic", None)
    assert test_blob['value'] != json.dumps(data, sort_keys=True)
    assert json.loads(test_blob['value'])['sns_too_big']
    assert not mock_logger.debug.called

    # With a region that is not in the SNSPROXY_REGIONS var:
    new_bucket['Region'] = "us-west-2"
    ddb_record = DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
        NewImage=new_bucket,
        Keys={'arn': new_bucket['arn']},
        OldImage=new_bucket),
                                       eventName='INSERT')
    new_item = DynamoDBRecordsFactory(records=[ddb_record])
    data = json.loads(json.dumps(new_item, default=serialize))['Records'][0]
    process_sns_forward(data, "sometopic", None)
    assert mock_logger.debug.called

    # Unmock:
    historical.common.sns._publish_message = old_publish_message
    historical.common.sns.log = old_logger
def test_differ(durable_{{cookiecutter.technology_slug}}_table, mock_lambda_environment):
    from .models import Durable{{cookiecutter.technology_slug | titlecase}}Model
    from .differ import handler
    from historical.models import TTL_EXPIRY

    ttl = int(time.time() + TTL_EXPIRY)
    new_item = ITEM.copy()
    new_item['eventTime'] = datetime(year=2017, month=5, day=12, hour=10, minute=30, second=0).isoformat() + 'Z'
    new_item['ttl'] = ttl
    data = DynamoDBRecordsFactory(
        records=[
            DynamoDBRecordFactory(
                dynamodb=DynamoDBDataFactory(
                    NewImage=new_item,
                    Keys={
                        'arn': new_item['arn']
                    }
                ),
                eventName='INSERT'
            )
        ]
    )
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, None)

    assert Durable{{cookiecutter.technology_slug | titlecase}}Model.count() == 1

    duplicate_item = ITEM.copy()
    duplicate_item['eventTime'] = datetime(year=2017, month=5, day=12, hour=11, minute=30, second=0).isoformat() + 'Z'
    duplicate_item['ttl'] = ttl