Ejemplo n.º 1
0
def test_differ(durable_s3_table, mock_lambda_environment):
    from historical.s3.models import DurableS3Model
    from historical.s3.differ import handler
    from historical.models import TTL_EXPIRY

    ttl = int(time.time() + TTL_EXPIRY)
    new_bucket = S3_BUCKET.copy()
    new_bucket['eventTime'] = datetime(
        year=2017, month=5, day=12, hour=10, minute=30,
        second=0).isoformat() + 'Z'
    new_bucket["ttl"] = ttl
    new_item = DynamoDBRecordsFactory(records=[
        DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
            NewImage=new_bucket, Keys={'arn': new_bucket['arn']}),
                              eventName='INSERT')
    ])
    data = json.loads(json.dumps(new_item, default=serialize))
    handler(data, None)
    assert DurableS3Model.count() == 1

    # Test duplicates don't change anything:
    data = json.loads(json.dumps(new_item, default=serialize))
    handler(data, None)
    assert DurableS3Model.count() == 1

    # Test ephemeral changes don't add new models:
    ephemeral_changes = S3_BUCKET.copy()
    ephemeral_changes["eventTime"] = \
        datetime(year=2017, month=5, day=12, hour=11, minute=30, second=0).isoformat() + 'Z'
    ephemeral_changes["configuration"]["_version"] = 99999
    ephemeral_changes["ttl"] = ttl

    data = DynamoDBRecordsFactory(records=[
        DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
            NewImage=ephemeral_changes, Keys={'arn': ephemeral_changes['arn']
                                              }),
                              eventName='MODIFY')
    ])
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, None)
    assert DurableS3Model.count() == 1

    # Add an update:
    new_changes = S3_BUCKET.copy()
    new_date = datetime(
        year=2017, month=5, day=12, hour=11, minute=30,
        second=0).isoformat() + 'Z'
    new_changes["eventTime"] = new_date
    new_changes["Tags"] = {"ANew": "Tag"}
    new_changes["configuration"]["Tags"] = {"ANew": "Tag"}
    new_changes["ttl"] = ttl
    data = DynamoDBRecordsFactory(records=[
        DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
            NewImage=new_changes, Keys={'arn': new_changes['arn']}),
                              eventName='MODIFY')
    ])
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, None)
    results = list(DurableS3Model.query("arn:aws:s3:::testbucket1"))
    assert len(results) == 2
    assert results[1].Tags["ANew"] == results[
        1].configuration.attribute_values["Tags"]["ANew"] == "Tag"
    assert results[1].eventTime == new_date

    # And deletion (ensure new record -- testing TTL):
    delete_bucket = S3_BUCKET.copy()
    delete_bucket["eventTime"] = datetime(
        year=2017, month=5, day=12, hour=12, minute=30,
        second=0).isoformat() + 'Z'
    delete_bucket["ttl"] = ttl
    data = DynamoDBRecordsFactory(records=[
        DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
            OldImage=delete_bucket, Keys={'arn': delete_bucket['arn']}),
                              eventName='REMOVE',
                              userIdentity=UserIdentityFactory(
                                  type='Service',
                                  principalId='dynamodb.amazonaws.com'))
    ])
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, None)
    assert DurableS3Model.count() == 3
Ejemplo n.º 2
0
def test_differ(current_vpc_table, durable_vpc_table, mock_lambda_environment):
    from historical.vpc.models import DurableVPCModel
    from historical.vpc.differ import handler
    from historical.models import TTL_EXPIRY

    ttl = int(time.time() + TTL_EXPIRY)
    new_vpc = VPC.copy()
    new_vpc.pop("eventSource")
    new_vpc['eventTime'] = datetime(
        year=2017, month=5, day=12, hour=10, minute=30,
        second=0).isoformat() + 'Z'
    new_vpc['ttl'] = ttl
    data = json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
        NewImage=new_vpc, Keys={'arn': new_vpc['arn']}),
                                            eventName='INSERT'),
                      default=serialize)
    data = RecordsFactory(records=[
        SQSDataFactory(
            body=json.dumps(SnsDataFactory(Message=data), default=serialize))
    ])
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, None)
    assert DurableVPCModel.count() == 1

    # ensure no new record for the same data
    duplicate_vpc = VPC.copy()
    duplicate_vpc.pop("eventSource")
    duplicate_vpc['eventTime'] = datetime(
        year=2017, month=5, day=12, hour=11, minute=30,
        second=0).isoformat() + 'Z'
    duplicate_vpc['ttl'] = ttl
    data = json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
        NewImage=duplicate_vpc, Keys={'arn': duplicate_vpc['arn']}),
                                            eventName='MODIFY'),
                      default=serialize)
    data = RecordsFactory(records=[
        SQSDataFactory(
            body=json.dumps(SnsDataFactory(Message=data), default=serialize))
    ])
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, None)
    assert DurableVPCModel.count() == 1

    updated_vpc = VPC.copy()
    updated_vpc.pop("eventSource")
    updated_vpc['eventTime'] = datetime(
        year=2017, month=5, day=12, hour=11, minute=30,
        second=0).isoformat() + 'Z'
    updated_vpc['configuration']['State'] = 'changeme'
    updated_vpc['ttl'] = ttl
    data = json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
        NewImage=updated_vpc, Keys={'arn': VPC['arn']}),
                                            eventName='MODIFY'),
                      default=serialize)
    data = RecordsFactory(records=[
        SQSDataFactory(
            body=json.dumps(SnsDataFactory(Message=data), default=serialize))
    ])
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, None)
    assert DurableVPCModel.count() == 2

    updated_vpc = VPC.copy()
    updated_vpc.pop("eventSource")
    updated_vpc['eventTime'] = datetime(
        year=2017, month=5, day=12, hour=9, minute=30,
        second=0).isoformat() + 'Z'
    updated_vpc['configuration']['CidrBlock'] = 'changeme'
    updated_vpc['ttl'] = ttl
    data = json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
        NewImage=updated_vpc, Keys={'arn': VPC['arn']}),
                                            eventName='MODIFY'),
                      default=serialize)
    data = RecordsFactory(records=[
        SQSDataFactory(
            body=json.dumps(SnsDataFactory(Message=data), default=serialize))
    ])
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, None)
    assert DurableVPCModel.count() == 3

    updated_vpc = VPC.copy()
    updated_vpc.pop("eventSource")
    updated_vpc['eventTime'] = datetime(
        year=2017, month=5, day=12, hour=9, minute=31,
        second=0).isoformat() + 'Z'
    updated_vpc.update({'Name': 'blah'})
    updated_vpc['ttl'] = ttl
    data = json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
        NewImage=updated_vpc, Keys={'arn': VPC['arn']}),
                                            eventName='MODIFY'),
                      default=serialize)
    data = RecordsFactory(records=[
        SQSDataFactory(
            body=json.dumps(SnsDataFactory(Message=data), default=serialize))
    ])
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, None)
    assert DurableVPCModel.count() == 4

    deleted_vpc = VPC.copy()
    deleted_vpc.pop("eventSource")
    deleted_vpc['eventTime'] = datetime(
        year=2017, month=5, day=12, hour=12, minute=30,
        second=0).isoformat() + 'Z'
    deleted_vpc['ttl'] = ttl

    # ensure new record
    data = json.dumps(DynamoDBRecordFactory(
        dynamodb=DynamoDBDataFactory(OldImage=deleted_vpc,
                                     Keys={'arn': VPC['arn']}),
        eventName='REMOVE',
        userIdentity=UserIdentityFactory(
            type='Service', principalId='dynamodb.amazonaws.com')),
                      default=serialize)
    data = RecordsFactory(records=[
        SQSDataFactory(
            body=json.dumps(SnsDataFactory(Message=data), default=serialize))
    ])
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, None)
    assert DurableVPCModel.count() == 5
Ejemplo n.º 3
0
def test_differ(current_security_group_table, durable_security_group_table, mock_lambda_environment):
    from historical.security_group.models import DurableSecurityGroupModel
    from historical.security_group.differ import handler
    from historical.models import TTL_EXPIRY

    ttl = int(time.time() + TTL_EXPIRY)
    new_group = SECURITY_GROUP.copy()
    new_group.pop("eventSource")
    new_group['eventTime'] = datetime(year=2017, month=5, day=12, hour=10, minute=30, second=0).isoformat() + 'Z'
    new_group["ttl"] = ttl
    data = json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
        NewImage=new_group,
        Keys={
            'arn': new_group['arn']
        }
    ), eventName='INSERT'), default=serialize)
    data = RecordsFactory(records=[SQSDataFactory(body=json.dumps(SnsDataFactory(Message=data), default=serialize))])
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, mock_lambda_environment)
    assert DurableSecurityGroupModel.count() == 1

    # ensure no new record for the same data
    duplicate_group = SECURITY_GROUP.copy()
    duplicate_group.pop("eventSource")
    duplicate_group['eventTime'] = datetime(year=2017, month=5, day=12, hour=11, minute=30, second=0).isoformat() + 'Z'
    duplicate_group["ttl"] = ttl
    data = json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
        NewImage=duplicate_group,
        Keys={
            'arn': duplicate_group['arn']
        }
    ), eventName='MODIFY'), default=serialize)
    data = RecordsFactory(records=[SQSDataFactory(body=json.dumps(SnsDataFactory(Message=data), default=serialize))])
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, mock_lambda_environment)
    assert DurableSecurityGroupModel.count() == 1

    updated_group = SECURITY_GROUP.copy()
    updated_group.pop("eventSource")
    updated_group['eventTime'] = datetime(year=2017, month=5, day=12, hour=11, minute=30, second=0).isoformat() + 'Z'
    updated_group['configuration']['Description'] = 'changeme'
    updated_group["ttl"] = ttl
    data = json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
        NewImage=updated_group,
        Keys={
            'arn': SECURITY_GROUP['arn']
        }
    ), eventName='MODIFY'), default=serialize)
    data = RecordsFactory(records=[SQSDataFactory(body=json.dumps(SnsDataFactory(Message=data), default=serialize))])
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, mock_lambda_environment)
    assert DurableSecurityGroupModel.count() == 2

    updated_group = SECURITY_GROUP.copy()
    updated_group.pop("eventSource")
    updated_group['eventTime'] = datetime(year=2017, month=5, day=12, hour=9, minute=30, second=0).isoformat() + 'Z'
    updated_group['configuration']['IpPermissions'][0]['IpRanges'][0]['CidrIp'] = 'changeme'
    updated_group["ttl"] = ttl
    data = json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
        NewImage=updated_group,
        Keys={
            'arn': SECURITY_GROUP['arn']
        }
    ), eventName='MODIFY'), default=serialize)
    data = RecordsFactory(records=[SQSDataFactory(body=json.dumps(SnsDataFactory(Message=data), default=serialize))])
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, mock_lambda_environment)
    assert DurableSecurityGroupModel.count() == 3

    deleted_group = SECURITY_GROUP.copy()
    deleted_group.pop("eventSource")
    deleted_group['eventTime'] = datetime(year=2017, month=5, day=12, hour=12, minute=30, second=0).isoformat() + 'Z'
    deleted_group["ttl"] = ttl

    # ensure new record
    data = json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory(
        OldImage=deleted_group,
        Keys={
            'arn': SECURITY_GROUP['arn']
        }),
        eventName='REMOVE',
        userIdentity=UserIdentityFactory(
                type='Service',
                principalId='dynamodb.amazonaws.com'
        )), default=serialize)
    data = RecordsFactory(records=[SQSDataFactory(body=json.dumps(SnsDataFactory(Message=data), default=serialize))])
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, mock_lambda_environment)
    assert DurableSecurityGroupModel.count() == 4
    deleted_item['eventTime'] = datetime(year=2017, month=5, day=12, hour=12, minute=30, second=0).isoformat() + 'Z'
    deleted_item['ttl'] = ttl

    # ensure new record
    data = DynamoDBRecordsFactory(
        records=[
            DynamoDBRecordFactory(
                dynamodb=DynamoDBDataFactory(
                    OldImage=deleted_item,
                    Keys={
                        'arn': ITEM['arn']
                    }
                ),
                eventName='REMOVE',
                userIdentity=UserIdentityFactory(
                    type='Service',
                    principalId='dynamodb.amazonaws.com'
                )
            )
        ]
    )
    data = json.loads(json.dumps(data, default=serialize))
    handler(data, None)
    assert Durable{{cookiecutter.technology_slug | titlecase}}Model.count() == 5


def test_collector(historical_role, mock_lambda_environment, {{cookiecutter.technology_slug}}s):
    from .models import Current{{cookiecutter.technology_slug | titlecase}}Model
    from .collector import handler

    # TODO modify event
    event = CloudwatchEventFactory(