def test_collector(historical_role, mock_lambda_environment, vpcs, current_vpc_table): from historical.vpc.models import CurrentVPCModel from historical.vpc.collector import handler event = CloudwatchEventFactory(detail=DetailFactory( requestParameters={'vpcId': vpcs['VpcId']}, eventName='CreateVpc'), ) data = json.dumps(event, default=serialize) data = RecordsFactory(records=[SQSDataFactory(body=data)]) data = json.dumps(data, default=serialize) data = json.loads(data) handler(data, None) assert CurrentVPCModel.count() == 1 event = CloudwatchEventFactory(detail=DetailFactory( requestParameters={'vpcId': vpcs['VpcId']}, eventName='DeleteVpc'), ) data = json.dumps(event, default=serialize) data = RecordsFactory(records=[SQSDataFactory(body=data)]) data = json.dumps(data, default=serialize) data = json.loads(data) handler(data, None) assert CurrentVPCModel.count() == 0
def test_poller(historical_sqs, historical_role, mock_lambda_environment, vpcs, swag_accounts): from historical.vpc.poller import handler handler({}, None) # Need to ensure that 2 total VPCs were added into SQS: sqs = boto3.client("sqs", region_name="us-east-1") queue_url = get_queue_url(os.environ['POLLER_QUEUE_NAME']) messages = sqs.receive_message(QueueUrl=queue_url, MaxNumberOfMessages=10)['Messages'] assert len(messages) == 2
def make_poller_events(): """A sort-of fixture to make polling events for tests.""" from historical.vpc.poller import poller_tasker_handler as handler handler({}, None) # Need to ensure that all of the accounts and regions were properly tasked (only 1 region for S3): sqs = boto3.client("sqs", region_name="us-east-1") queue_url = get_queue_url(os.environ['POLLER_TASKER_QUEUE_NAME']) messages = sqs.receive_message(QueueUrl=queue_url, MaxNumberOfMessages=10)['Messages'] # 'Body' needs to be made into 'body' for proper parsing later: for msg in messages: msg['body'] = msg.pop('Body') return messages
def test_poller(historical_kinesis, historical_role, mock_lambda_environment, vpcs, swag_accounts): from historical.vpc.poller import handler handler(None, None) shard_id = historical_kinesis.describe_stream( StreamName='historicalstream' )['StreamDescription']['Shards'][0]['ShardId'] iterator = historical_kinesis.get_shard_iterator( StreamName='historicalstream', ShardId=shard_id, ShardIteratorType='AT_SEQUENCE_NUMBER', StartingSequenceNumber='0') records = historical_kinesis.get_records( ShardIterator=iterator['ShardIterator']) assert len(records['Records']) == 2
def test_poller_processor_handler(historical_sqs, historical_role, mock_lambda_environment, vpcs, swag_accounts): """Test the Poller's processing component that tasks the collector.""" from historical.vpc.poller import poller_processor_handler as handler # Create the events and SQS records: messages = make_poller_events() event = json.loads(json.dumps(RecordsFactory(records=messages), default=serialize)) # Run the collector: handler(event, mock_lambda_environment) # Need to ensure that 2 total VPCs were added into SQS: sqs = boto3.client("sqs", region_name="us-east-1") queue_url = get_queue_url(os.environ['POLLER_QUEUE_NAME']) messages = sqs.receive_message(QueueUrl=queue_url, MaxNumberOfMessages=10)['Messages'] assert len(messages) == 2
def test_differ(current_vpc_table, durable_vpc_table, mock_lambda_environment): from historical.vpc.models import DurableVPCModel from historical.vpc.differ import handler from historical.models import TTL_EXPIRY ttl = int(time.time() + TTL_EXPIRY) new_vpc = VPC.copy() new_vpc.pop("eventSource") new_vpc['eventTime'] = datetime( year=2017, month=5, day=12, hour=10, minute=30, second=0).isoformat() + 'Z' new_vpc['ttl'] = ttl data = json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory( NewImage=new_vpc, Keys={'arn': new_vpc['arn']}), eventName='INSERT'), default=serialize) data = RecordsFactory(records=[ SQSDataFactory( body=json.dumps(SnsDataFactory(Message=data), default=serialize)) ]) data = json.loads(json.dumps(data, default=serialize)) handler(data, None) assert DurableVPCModel.count() == 1 # ensure no new record for the same data duplicate_vpc = VPC.copy() duplicate_vpc.pop("eventSource") duplicate_vpc['eventTime'] = datetime( year=2017, month=5, day=12, hour=11, minute=30, second=0).isoformat() + 'Z' duplicate_vpc['ttl'] = ttl data = json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory( NewImage=duplicate_vpc, Keys={'arn': duplicate_vpc['arn']}), eventName='MODIFY'), default=serialize) data = RecordsFactory(records=[ SQSDataFactory( body=json.dumps(SnsDataFactory(Message=data), default=serialize)) ]) data = json.loads(json.dumps(data, default=serialize)) handler(data, None) assert DurableVPCModel.count() == 1 updated_vpc = VPC.copy() updated_vpc.pop("eventSource") updated_vpc['eventTime'] = datetime( year=2017, month=5, day=12, hour=11, minute=30, second=0).isoformat() + 'Z' updated_vpc['configuration']['State'] = 'changeme' updated_vpc['ttl'] = ttl data = json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory( NewImage=updated_vpc, Keys={'arn': VPC['arn']}), eventName='MODIFY'), default=serialize) data = RecordsFactory(records=[ SQSDataFactory( body=json.dumps(SnsDataFactory(Message=data), default=serialize)) ]) data = json.loads(json.dumps(data, default=serialize)) handler(data, None) assert DurableVPCModel.count() == 2 updated_vpc = VPC.copy() updated_vpc.pop("eventSource") updated_vpc['eventTime'] = datetime( year=2017, month=5, day=12, hour=9, minute=30, second=0).isoformat() + 'Z' updated_vpc['configuration']['CidrBlock'] = 'changeme' updated_vpc['ttl'] = ttl data = json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory( NewImage=updated_vpc, Keys={'arn': VPC['arn']}), eventName='MODIFY'), default=serialize) data = RecordsFactory(records=[ SQSDataFactory( body=json.dumps(SnsDataFactory(Message=data), default=serialize)) ]) data = json.loads(json.dumps(data, default=serialize)) handler(data, None) assert DurableVPCModel.count() == 3 updated_vpc = VPC.copy() updated_vpc.pop("eventSource") updated_vpc['eventTime'] = datetime( year=2017, month=5, day=12, hour=9, minute=31, second=0).isoformat() + 'Z' updated_vpc.update({'Name': 'blah'}) updated_vpc['ttl'] = ttl data = json.dumps(DynamoDBRecordFactory(dynamodb=DynamoDBDataFactory( NewImage=updated_vpc, Keys={'arn': VPC['arn']}), eventName='MODIFY'), default=serialize) data = RecordsFactory(records=[ SQSDataFactory( body=json.dumps(SnsDataFactory(Message=data), default=serialize)) ]) data = json.loads(json.dumps(data, default=serialize)) handler(data, None) assert DurableVPCModel.count() == 4 deleted_vpc = VPC.copy() deleted_vpc.pop("eventSource") deleted_vpc['eventTime'] = datetime( year=2017, month=5, day=12, hour=12, minute=30, second=0).isoformat() + 'Z' deleted_vpc['ttl'] = ttl # ensure new record data = json.dumps(DynamoDBRecordFactory( dynamodb=DynamoDBDataFactory(OldImage=deleted_vpc, Keys={'arn': VPC['arn']}), eventName='REMOVE', userIdentity=UserIdentityFactory( type='Service', principalId='dynamodb.amazonaws.com')), default=serialize) data = RecordsFactory(records=[ SQSDataFactory( body=json.dumps(SnsDataFactory(Message=data), default=serialize)) ]) data = json.loads(json.dumps(data, default=serialize)) handler(data, None) assert DurableVPCModel.count() == 5