def test_run(self): if os.environ.get('ON_TRAVIS', 'false') == 'false': event = { 'campaign_uuid': 'dd7b3627dc6942dc9798ea094f8c680b', 'type': 'Building' } lambda_handler(event, {})
def test_logs(self, mock_urlopen): event = {'logs': 'logs'} lambda_function.lambda_handler(event, Mock()) mock_urlopen.assert_called_once_with( ANY, data=b'{"access_token": "123", "logs": "logs"}', )
def test_create(): """ Create a server """ lambda_handler({"action": "create", "app_name": "factorio"}, object()) ip = controller.get_ip_address() status = check_output(["ping", ip]).decode() result_str = re.search(r"Received = (\d)+?,", status) if not result_str: raise Exception("Unexpected terminal response: " + status) received = int(result_str.groups()[0]) assert received > 0 # commands = [ # "sudo mkdir -p /opt/factorio", # "sudo chown 845:845 /opt/factorio", # " ".join( # [ # "sudo docker run -d" "-p 34197:34197/udp", # "-p 27015:27015/tcp", # "-v /opt/factorio:/factorio", # "--name factorio", # "--restart=always", # "factoriotools/factorio", # ] # ), # ] # for command in commands: # controller.exec(command) pass
def test_download_document_error(boto3_client_mock, pg_connect_mock, db_config, event, caplog): s3_mock = MagicMock() s3_mock.download_file.side_effect = Exception("Download document error") boto3_client_mock.return_value = s3_mock lambda_handler(event, None) assert all_in(["ERROR", "Download document"], caplog.text)
def test_backup_rds_verification(self): # Replace response of describe_db_snapshots api call lambda_function.rds_source.describe_db_snapshots = self.side_describe_db_snapshots # Replace response of describe_db_instances api call lambda_function.rds.describe_db_instances = self.side_describe_db_instances # Replace response of restore_db_instance_from_db_snapshot api call lambda_function.rds.restore_db_instance_from_db_snapshot = self.side_restore_db_instance_from_db_snapshot # Replace response of describe_events api call lambda_function.rds.describe_events = self.side_describe_events # Replace response of copy_db_snapshot api call lambda_function.rds.copy_db_snapshot = self.side_snapshot_copy # Replace response of modify_db_instance api call lambda_function.rds.modify_db_instance = self.side_modify_db_instance # Replace return value of get_account_id function lambda_function.get_account_id.return_value = "123456789012" # Replace return value of get_first_az function lambda_function.get_first_az.return_value = "us-west-2" # Replace response publish event data to Datadog and Jira lambda_function.requests.post.return_value.content = "==== Publish event ====" # Create sns topic sns = boto.sns.connect_to_region("us-west-2") sns.create_topic("hipchat-rds-mysql-db-check") # Replace return value of gatekeeper_allow_automation_instance_only function lambda_function.gatekeeper_allow_automation_instance_only.return_value = True print('==== Start testing with an event id {0} ====').format(self.EVENT_ID) lambda_function.lambda_handler(self.side_events(self.EVENT_ID), '')
def test_session_state(self): # create our launch request launch_event = { "request": { "type": "LaunchRequest" }, "session": { "new": True } } response = lambda_function.lambda_handler(event=launch_event, context=None) assert not response['response']['shouldEndSession'] # Session is open, now ask for a list of breweries list_breweries_event = { "request": { "type": "IntentRequest", "intent": { "name": "ListBreweries", "mocked": True } }, "session": { "new": False } } response = lambda_function.lambda_handler(event=list_breweries_event, context=None) assert response['response']['shouldEndSession']
def test_get_home_brewery(self): """Test that we can set a home brewery and get it back""" set_home_event = {"request" : {"type": "IntentRequest",\ "intent": {"name": "SetHomeBrewery", "mocked": True, "slots" : {"brewery":{"value":"Village Idiot"}}}},\ "session" : {"new": False, "user": {"userId": "valid_user_id"} } } response = lambda_function.lambda_handler(event=set_home_event, context=None) assert response is not None assert response['response']['outputSpeech']['type'] == 'PlainText' assert response['response']['outputSpeech']['text'].startswith( 'Your home brewery has been set to') assert response['response']['shouldEndSession'] # now read it back get_home_event = {"request" : {"type": "IntentRequest",\ "intent": {"name": "GetHomeBrewery", "mocked": True}},\ "session" : {"new": False, "user": {"userId": "valid_user_id"} } } response = lambda_function.lambda_handler(event=get_home_event, context=None) assert response is not None assert response['response']['outputSpeech']['type'] == 'PlainText' assert 'Village Idiot' in response['response']['outputSpeech']['text'] assert response['response']['shouldEndSession']
def main(): event = {} with open('events/s3.json', 'r', encoding='utf8') as f: json_str = f.read() event = json.loads(json_str) lambda_function.lambda_handler(event, None)
def test_lambda(): event = { "notification_type": "ui", "test_id": "d3caeeb7-32bd-4b4b-824d-702708a7116c", "report_id": 176, } lambda_handler(event, {})
def test_create(create_event, context, monkeypatch): """Tests resource creation.""" monkeypatch.setattr("urllib2.build_opener", Mock(return_value=Mock())) # Don't sleep! monkeypatch.setattr("time.sleep", lambda t: None) lambda_function.lambda_handler(create_event, context)
def test_non_existing_product_error(boto3_client_mock, os_remove_mock, put_partner_in_db_ids, success_event, rds, caplog): lambda_handler(success_event, None) assert all_in(["ERROR", "Put record in database", "IMPORT_FAILURE"], caplog.text) infringement_count = get_dummy_infringement_count(rds) assert infringement_count == 0
def test_import_document_success(boto3_client_mock, os_remove_mock, put_partner_in_db_ids, success_event, rds, caplog): lambda_handler(success_event, None) assert all_not_in(["CRITICAL", "ERROR", "WARNING"], caplog.text) assert all_in(["INFO", "SUCCESS", "IMPORT_SUCCESS"], caplog.text) product_count = get_dummy_product_count(rds) assert product_count == 2
def test_dispatch_wrong_appointment(self): # GIVEN evt = self.fetch_event() evt['currentIntent']['name'] = 'WrongName' # WHEN / THEN with self.assertRaises(Exception): lf.lambda_handler(evt, {})
def test_lambda_function_dummy(self): """ Really run locally """ with open('test_event.json', 'r') as test_event: event = json.load(test_event) lambda_handler(event, None)
def test_notification(self): event = [{ 'domain': 'fb', 'size': 2000 }, { 'domain': '9gag', 'size': 40000 }] lambda_function.lambda_handler(event, {})
def test_process_request_success(boto3_client_mock, pg_connect_mock, os_remove_mock, db_config, success_event, caplog): rds_mock = MagicMock() pg_connect_mock.return_value = rds_mock lambda_handler(success_event, None) assert all_in(["INFO", "SUCCESS", "IMPORT_SUCCESS"], caplog.text) rds_mock.commit.assert_called() rds_mock.close.assert_called_once()
def test_parse_empty_document_error( boto3_client_mock, pg_connect_mock, os_remove_mock, db_config, empty_document_event, caplog, ): lambda_handler(empty_document_event, None) assert all_in(["WARNING", "Empty document", "EMPTY_DOCUMENT"], caplog.text)
def test_base64_bz2_logs(self, mock_urlopen): event = { 'base64_bz2_logs': base64.b64encode(bz2.compress('logs'.encode())).decode() } lambda_function.lambda_handler(event, Mock()) mock_urlopen.assert_called_once_with( ANY, data=b'{"access_token": "123", "logs": "logs"}', )
def test_process_request_with_invalid_records_below_threshold_success( boto3_client_mock, pg_connect_mock, os_remove_mock, db_config, invalid_records_below_threshold_event, caplog, ): lambda_handler(invalid_records_below_threshold_event, None) assert all_in(["WARNING", "SUCCESS", "FAILED_RECORDS_BELOW_THRESHOLD"], caplog.text)
def test_validate_record_error( boto3_client_mock, pg_connect_mock, os_remove_mock, db_config, invalid_records_above_threshold_event, caplog, ): lambda_handler(invalid_records_above_threshold_event, None) assert all_in(["ERROR", "Validate record"], caplog.text)
def test_put_record_in_db_error(boto3_client_mock, pg_connect_mock, os_remove_mock, db_config, success_event, caplog): rds_mock = MagicMock() pg_connect_mock.return_value = rds_mock rds_mock.cursor.side_effect = Exception("Put record in database error") lambda_handler(success_event, None) assert all_in(["ERROR", "Put record in database"], caplog.text) rds_mock.rollback.assert_called() rds_mock.close.assert_called_once()
def test_process_request_with_invalid_records_above_threshold_error( boto3_client_mock, pg_connect_mock, os_remove_mock, db_config, invalid_records_above_threshold_event, caplog, ): lambda_handler(invalid_records_above_threshold_event, None) assert all_in(["ERROR", "IMPORT_FAILURE"], caplog.text)
def test_parse_non_existing_document_error( boto3_client_mock, pg_connect_mock, os_remove_mock, db_config, non_existing_document_event, caplog, ): lambda_handler(non_existing_document_event, None) assert all_in(["ERROR", "Parse document", "No such file or directory"], caplog.text)
def test_run(self): if os.environ.get('ON_TRAVIS', 'false') == 'false': event = { 'campaign_uuid': '76674eefa4524a6fbc20e147fe01f1fd', 'feature': 'building=yes', 'date': { 'from': '2018-01-01', 'to': '2019-12-31' } } lambda_handler(event, {})
def test_move_document_to_processed_error( boto3_client_mock, pg_connect_mock, os_remove_mock, db_config, success_event, caplog ): s3_mock = MagicMock() boto3_client_mock.return_value = s3_mock s3_mock.copy_object.side_effect = Exception("Move document to processed error") lambda_handler(success_event, None) assert all_in( ["ERROR", "Move document to processed", "INFO", "SUCCESS", "IMPORT_SUCCESS"], caplog.text, )
def test_terraform_upgrade_by_hashicorp_release(self): self.setup_dockerfile_test_repo(self.clone_dir, "test-terraform-upgrade") initial_values = { "TF_VERSION": "v0.11.8", "DOCKERFILE_VERSION": "1", "PK_VERSION": "v1.3.2" } self.mngr.write_object(self.internal_store_path, JSON_CONTENT_WITHOUT_FORCE_TEMPLATE.format(**initial_values)) try: with mock.patch.dict("os.environ", self.environ): expected_new_tfpk_ver = "vXX.YY.ZZ" expected_dockerfile_ver = "2" dockerfile_repo_name = os.environ.get("dockerfile_github_repository") github_access_token = os.environ.get("github_access_token") dockerfile_repo = GitHubRepository(dockerfile_repo_name, github_access_token) with mock.patch("lambda_function.get_latest_hashicorp_terraform_version", return_value=expected_new_tfpk_ver) : with mock.patch("lambda_function.get_latest_hashicorp_packer_version", return_value=expected_new_tfpk_ver): retcode = lambda_function.lambda_handler(None, None) self.assertEqual(retcode, 0) expected_values = {"TF_VERSION": expected_new_tfpk_ver, "DOCKERFILE_VERSION": expected_dockerfile_ver, "PK_VERSION": expected_new_tfpk_ver} expected_store_content = JSON_CONTENT_WITHOUT_FORCE_TEMPLATE.format(**expected_values) store_s3 = Store(self.mngr.read_object(self.internal_store_path), dockerfile_repo_name=dockerfile_repo_name) store_github = Store(dockerfile_repo.get_file_content(self.internal_store_path), dockerfile_repo_name=dockerfile_repo_name) store_expected = Store(expected_store_content, dockerfile_repo_name=dockerfile_repo_name) self.assertTrue(store_github.equals(store_s3)) self.assertTrue(store_github.equals(store_expected)) self.assertTrue(store_github.version(os.path.basename(dockerfile_repo_name)), expected_dockerfile_ver) # run the lambda function again with the same version of tf retcode = lambda_function.lambda_handler(None, None) store_s3 = Store(self.mngr.read_object(self.internal_store_path), dockerfile_repo_name=dockerfile_repo_name) store_github = Store(dockerfile_repo.get_file_content(self.internal_store_path), dockerfile_repo_name=dockerfile_repo_name) # version and Dockerfile has not been changed ... self.assertTrue(store_github.version(os.path.basename(dockerfile_repo_name)), expected_dockerfile_ver) self.assertTrue(store_github.equals(store_expected)) expected_new_tfpk_ver = "vAA.BB.CC" expected_dockerfile_ver = "3" with mock.patch("lambda_function.get_latest_hashicorp_terraform_version", return_value=expected_new_tfpk_ver) : with mock.patch("lambda_function.get_latest_hashicorp_packer_version", return_value=expected_new_tfpk_ver): retcode = lambda_function.lambda_handler(None, None) self.assertEqual(retcode, 0) expected_values = {"TF_VERSION": expected_new_tfpk_ver, "DOCKERFILE_VERSION": expected_dockerfile_ver, "PK_VERSION": expected_new_tfpk_ver} expected_store_content = JSON_CONTENT_WITHOUT_FORCE_TEMPLATE.format(**expected_values) store_s3 = Store(self.mngr.read_object(self.internal_store_path), dockerfile_repo_name=dockerfile_repo_name) store_github = Store(dockerfile_repo.get_file_content(self.internal_store_path), dockerfile_repo_name=dockerfile_repo_name) store_expected = Store(expected_store_content, dockerfile_repo_name=dockerfile_repo_name) self.assertTrue(store_github.equals(store_s3)) self.assertTrue(store_github.equals(store_expected)) self.assertTrue(store_github.version(os.path.basename(dockerfile_repo_name)), expected_dockerfile_ver) # run the lambda function again with the same version of tf retcode = lambda_function.lambda_handler(None, None) store_s3 = Store(self.mngr.read_object(self.internal_store_path), dockerfile_repo_name=dockerfile_repo_name) store_github = Store(dockerfile_repo.get_file_content(self.internal_store_path), dockerfile_repo_name=dockerfile_repo_name) # version and Dockerfile has not been changed ... self.assertTrue(store_github.version(os.path.basename(dockerfile_repo_name)), expected_dockerfile_ver) self.assertTrue(store_github.equals(store_expected)) self.assertTrue(store_github.version(os.path.basename(dockerfile_repo_name)), expected_dockerfile_ver) except lambda_function.LambdaException as e: self.fail(str(e))
def test_import_document_error( boto3_client_mock, os_remove_mock, put_partner_in_db_ids, invalid_records_above_threshold_event, rds, caplog, ): lambda_handler(invalid_records_above_threshold_event, None) assert all_in(["ERROR", "IMPORT_FAILURE"], caplog.text) product_count = get_dummy_product_count(rds) assert product_count == 2
def test_parse_invalid_document_error( boto3_client_mock, pg_connect_mock, os_remove_mock, db_config, invalid_document_event, caplog, ): lambda_handler(invalid_document_event, None) assert all_in( ["ERROR", "Parse record", "Expecting value", "IMPORT_FAILURE"], caplog.text)
def test_intent(): test_event = { "session": { "new": True, "sessionId": "SessionId.323e732c-a975-49b3-b796-82a5709b0c31", "application": { "applicationId": "amzn1.ask.skill.78652fba-466f-4a54-b5c0-ea7930de5788" }, "attributes": {}, "user": { "userId": "amzn1.ask.account.AESP3HPYTOIQHOFKO4XBHIIC74XJTON52GCQNBQR2ARB6XFIX5MKFNQ2QKBOLQTUYGXTM476RL4F7WKPYCMIJGGFT2QD6PNDSEHVIZN4KRLB5U2X7AEZM66CWD7YN2YJEMQBU6IJDFHMF6RQ44V5EGIWI62Y2N3IVFI3O7QNMEKAMGTBV5XJLKDKBYMN2XZF6UHHGKHQL45M2IY" } }, "request": { "type": "IntentRequest", "requestId": "EdwRequestId.41fae6de-d61f-488c-936e-18f5abad6a0e", "intent": { "name": "IntentActivity", "slots": { "activityname": { "name": "activityname", "value": "raid" } } }, "locale": "en-US", "timestamp": "2017-09-15T06:05:20Z" }, "context": { "AudioPlayer": { "playerActivity": "IDLE" }, "System": { "application": { "applicationId": "amzn1.ask.skill.78652fba-466f-4a54-b5c0-ea7930de5788" }, "user": { "userId": "amzn1.ask.account.AESP3HPYTOIQHOFKO4XBHIIC74XJTON52GCQNBQR2ARB6XFIX5MKFNQ2QKBOLQTUYGXTM476RL4F7WKPYCMIJGGFT2QD6PNDSEHVIZN4KRLB5U2X7AEZM66CWD7YN2YJEMQBU6IJDFHMF6RQ44V5EGIWI62Y2N3IVFI3O7QNMEKAMGTBV5XJLKDKBYMN2XZF6UHHGKHQL45M2IY" }, "device": { "supportedInterfaces": {} } } }, "version": "1.0" } lambda_handler(test_event, None)
def test_lambda_handler(self): self.assertEqual( lambda_handler( self.cloudfront_event(accept_encoding='gzip, deflate, br', uri="/packs/application.0332f160.js"), None)['uri'], '/packs/application.0332f160.js.br') self.assertEqual( lambda_handler( self.cloudfront_event(accept_encoding='gzip', uri="/packs/application.0332f160.js"), None)['uri'], '/packs/application.0332f160.js.gz') self.assertEqual( lambda_handler( self.cloudfront_event(accept_encoding='deflate', uri="/packs/application.0332f160.js"), None)['uri'], '/packs/application.0332f160.js') self.assertEqual( lambda_handler( self.cloudfront_event(accept_encoding='gzip, br', uri='/packs/css/foobar.css'), None)['uri'], '/packs/css/foobar.css.br') self.assertEqual( lambda_handler( self.cloudfront_event(accept_encoding='gzip', uri='/packs/css/foobar.css'), None)['uri'], '/packs/css/foobar.css.gz') self.assertEqual( lambda_handler( self.cloudfront_event(accept_encoding='br', uri='/packs/css/foobar.css'), None)['uri'], '/packs/css/foobar.css.br') self.assertEqual( lambda_handler( self.cloudfront_event(accept_encoding='deflate', uri='/packs/css/foobar.css'), None)['uri'], '/packs/css/foobar.css') self.assertEqual( lambda_handler( self.cloudfront_event(accept_encoding='gzip', uri='/packs/media/foobar.svg'), None)['uri'], '/packs/media/foobar.svg') self.assertEqual( lambda_handler( self.cloudfront_event(accept_encoding='br', uri='/packs/media/foobar.svg'), None)['uri'], '/packs/media/foobar.svg') self.assertEqual( lambda_handler( self.cloudfront_event(accept_encoding='deflate', uri='/packs/media/foobar.svg'), None)['uri'], '/packs/media/foobar.svg')
def test_response(self): print("testing response.") event = {'Country': 'USA'} result = lambda_function.lambda_handler(event, None) print(result) self.assertEqual(result['statusCode'], 200) self.assertEqual(result['headers']['Content-Type'], 'application/json') self.assertIn('Hello from ' + event['Country'], result['body']) event = {'Country': 'INDIA'} result = lambda_function.lambda_handler(event, None) print(result) self.assertEqual(result['statusCode'], 200) self.assertEqual(result['headers']['Content-Type'], 'application/json') self.assertIn('Hello from ' + event['Country'], result['body'])
MSG = { u'timestamp': 1460870880139, u'postback': { u'payload': u'NEXT_USER' }, u'recipient': { u'id': 12345 }, u'sender': { u'id': 12345 } } EVENT = { 'body': { "object":"page", "entry":[ { "id": "page_id", "time":1457764198246, "messaging":[ MSG ], } ] } } CONTEXT = None if __name__ == '__main__': print('Response:', lambda_function.lambda_handler(EVENT, CONTEXT))
"site_admin": false } } """ for search, replace in { 'pr_id': str(pr_id), 'pr_author': pr_author, 'repo_owner': repo_owner, 'repo': repo, 'base_branch': base_branch, 'head_branch': head_branch, 'head_sha': head_sha, }.items(): github_event = github_event.replace('{' + search + '}', replace) sns_wrapper = { 'Records': [{ 'Sns': { 'Message': github_event, 'MessageAttributes': { 'X-Github-Event': { 'Value': 'pull_request' } } } }] } lambda_function.lambda_handler(sns_wrapper, None, debug=True)
def alexa_skills_kit_requests(): request_obj = request.get_json() return lambda_handler(request_obj)
#!/usr/bin/env python from __future__ import print_function import lambda_function if __name__ == '__main__': event = { 'A': 1 } context = 1 lambda_function.lambda_handler(event, context)
''' Script for testing out the response for any given request ''' from __future__ import print_function from lambda_function import lambda_handler import json import sys from argparse import ArgumentParser if __name__ == '__main__': parser = ArgumentParser() parser.add_argument('-i', '--input_json', required=True) args = parser.parse_args() request_obj = json.load(open(args.input_json)) print ('Request JSON') print (json.dumps(request_obj, indent=2)) response = lambda_handler(request_obj) print ('Response JSON') print (json.dumps(response, indent=2))