def test_failed_connection_logged(caplog, tanczos_bucket): with requests_mock.Mocker() as m: m.get(requests_mock.ANY, exc=requests.exceptions.RequestException) function.lambda_handler(0, 0) # caplog is built into pytest and captures logs assert 'Fetch failed' in caplog.text # no file written to s3 bucket assert len(list(tanczos_bucket.objects.all())) == 0
def test_failed_s3_interaction_logged(caplog, tanczos_bucket): function.download_data = MagicMock( return_value=open('test_files/bom.csv').read()) response = {'Error': {'Code': '403', 'Message': 'Fake Exception'}} function.save_to_s3 = MagicMock( side_effect=botocore.exceptions.ClientError(response, 'get')) function.lambda_handler(0, 0) assert len(list(tanczos_bucket.objects.all())) == 0 assert "AWS error" in caplog.text
def test_lambda_handler(sqs_ingest_data_event): """ Loads the event file in JSON """ # Setup session = localstack_client.session.Session() function.sqs_client = session.client('sqs') # Act function.lambda_handler(sqs_ingest_data_event, "") # Assert assert 1 == 1
def test_lambda_handler(): table_name = os.environ['TABLE_NAME'] dynamodb = boto3.resource('dynamodb', 'us-east-1') # Create our mock DynamoDB table table = dynamodb.create_table(TableName=table_name, KeySchema=[ { 'AttributeName': 'Site', 'KeyType': 'HASH' }, ], AttributeDefinitions=[ { 'AttributeName': 'Site', 'AttributeType': 'N' }, ], BillingMode='PAY_PER_REQUEST') # Put some data into our table table.put_item(Item={'Site': 0, 'Visits': 0}) # Call our lambda_handler and let it run against our mock AWS resources # It should return this: # {'statusCode': 200, # 'body': 1, # 'headers': {'Content-Type': 'applications/json', 'Access-Control-Allow-Origin': '*'} # } result = lambda_handler("", "") assert result['statusCode'] == 200 assert 'headers' in result assert result['body'] == 1
def test_success_adds_one_file(caplog, tanczos_bucket): #fetch.download_data = MagicMock(return_value= open('test_files/bom.csv').read()) with requests_mock.Mocker() as m: m.get(requests_mock.ANY, text=open('test_files/bom.csv').read()) function.lambda_handler(0, 0) assert len(list(tanczos_bucket.objects.all())) == 1 assert "Success" in caplog.text # UGH, getting the contents of a file is not easy s3 = boto3.resource('s3') for key in tanczos_bucket.objects.all(): # YYYYMMDDTHHMMSS.csv is 19 characters assert len(key.key) == 19 obj = s3.Object('tanczos-data', key.key) text = obj.get()['Body'].read().decode('utf-8') lines = text.split('\n') # last entry is empty because we split on \n lines.pop() headers = lines.pop(0).split(',') assert 'Name' in headers[0] assert 'timestamp' in headers[-1] for line in lines: assert len(line.split(',')) == 8
def main(): os.environ['EXEC_ENV'] = 'TEST' function.lambda_handler({}, {})
from function import lambda_handler # driver context class MyContext: def __init__(self): self.invoked_function_arn = 'arn:development' # lambda args event and context event = { "awslogs": { "data": "H4sIAAAAAAAAAHWPwQqCQBCGX0Xm7EFtK+smZBEUgXoLCdMhFtKV3akI8d0bLYmibvPPN3wz00CJxmQnTO41whwWQRIctmEcB6sQbFC3CjW3XW8kxpOpP+OC22d1Wml1qZkQGtoMsScxaczKN3\ plG8zlaHIta5KqWsozoTYw3/djzwhpLwivWFGHGpAFe7DL68JlBUk+l7KSN7tCOEJ4M3/qOI49vMHj+zCKdlFqLaU2ZHV2a4Ct/an0/ivdX8oYc1UVX860fQDQiMdxRQEAAA==" } } context = MyContext() # invoke lambda function try: lambda_handler(event, context) except: import traceback traceback.print_exc()
#!/usr/bin/env python3 import function print(function.lambda_handler(0, 0)) # print function.get_day_urls()
from function import lambda_handler if __name__ == '__main__': function = lambda_handler( event={ 'Step': 'testSecret', 'SecretId':'arn:aws:secretsmanager:us-east-2:581361757134:secret:HomeNet-CoreSvc-Ameritrade-Secrets-okJ0H3', 'ClientRequestToken':'6c160f55-3074-442b-b51b-fc2c0cd667f1' }, context={} )