def test_update_request_status_for_job_error(self): """ Tests updating an inprogress job to an 'error' status """ self.create_test_requests() boto3.client = Mock() mock_ssm_get_parameter(3) request_id = REQUEST_ID8 row = requests_db.get_job_by_request_id(request_id) self.assertEqual("inprogress", row[0]["job_status"]) print_rows("begin") utc_now_exp = "2019-07-31 21:07:15.234362+00:00" requests_db.get_utc_now_iso = Mock(return_value=utc_now_exp) job_status = "error" err_msg = "oh no an error" try: result = requests_db.update_request_status_for_job( request_id, job_status, err_msg) print_rows("end") self.assertEqual([], result) row = requests_db.get_job_by_request_id(request_id) self.assertEqual(job_status, row[0]["job_status"]) self.assertEqual(err_msg, row[0]["err_msg"]) self.assertIn(utc_now_exp, row[0]["last_update_time"]) except requests_db.DatabaseError as err: self.fail(f"update_request_status_for_job. {str(err)}")
def test_handler_db_update_err(self): """ Test copy lambda with error updating db. """ boto3.client = Mock() s3_cli = boto3.client('s3') s3_cli.copy_object = Mock(side_effect=[None]) exp_request_ids = [REQUEST_ID7] _, exp_result = create_select_requests(exp_request_ids) time.sleep = Mock(side_effect=None) exp_err = 'Database Error. Internal database error, please contact LP DAAC User Services' database.single_query = Mock(side_effect=[ exp_result, requests_db.DatabaseError(exp_err), requests_db.DatabaseError(exp_err) ]) mock_ssm_get_parameter(3) result = copy_files_to_archive.handler(self.handler_input_event, None) exp_result = [{ 'success': True, 'source_bucket': 'my-dr-fake-glacier-bucket', 'source_key': self.exp_file_key1, 'request_id': REQUEST_ID7, 'target_bucket': PROTECTED_BUCKET, 'err_msg': '' }] self.assertEqual(exp_result, result)
def test_task_no_retries_env_var(self): """ Test environment var RESTORE_REQUEST_RETRIES not set - use default. """ del os.environ['RESTORE_REQUEST_RETRIES'] exp_event = {} granule_id = "MOD09GQ.A0219114.N5aUCG.006.0656338553321" exp_event["input"] = { "granules": [{ "granuleId": granule_id, "keys": [KEY1] }] } exp_event["config"] = {"glacier-bucket": "some_bucket"} boto3.client = Mock() s3_cli = boto3.client('s3') s3_cli.head_object = Mock() s3_cli.restore_object = Mock(side_effect=[None]) CumulusLogger.info = Mock() requests_db.request_id_generator = Mock(return_value=REQUEST_ID1) exp_gran = {} exp_gran['granuleId'] = granule_id exp_files = [] exp_file = {} exp_file['key'] = FILE1 exp_file['dest_bucket'] = PROTECTED_BUCKET exp_file['success'] = True exp_file['err_msg'] = '' exp_files.append(exp_file) exp_gran['files'] = exp_files qresult_1_inprogress, _ = create_insert_request( REQUEST_ID1, REQUEST_GROUP_ID_EXP_1, granule_id, FILE1, "restore", "some_bucket", "inprogress", UTC_NOW_EXP_1, None, None) database.single_query = Mock(side_effect=[qresult_1_inprogress]) mock_ssm_get_parameter(1) try: result = request_files.task(exp_event, self.context) os.environ['RESTORE_REQUEST_RETRIES'] = '3' self.assertEqual(exp_gran, result) boto3.client.assert_called_with('ssm') s3_cli.head_object.assert_called_with(Bucket='some_bucket', Key=FILE1) restore_req_exp = { 'Days': 5, 'GlacierJobParameters': { 'Tier': 'Standard' } } s3_cli.restore_object.assert_called_with( Bucket='some_bucket', Key=FILE1, RestoreRequest=restore_req_exp) database.single_query.assert_called_once() except request_files.RestoreRequestError as err: os.environ['RESTORE_REQUEST_RETRIES'] = '3' self.fail(str(err))
def test_handler_db_read_err(self): """ Test copy lambda with error reading db. """ boto3.client = Mock() s3_cli = boto3.client('s3') s3_cli.copy_object = Mock(side_effect=[None]) exp_request_ids = [REQUEST_ID7] _, exp_result = create_select_requests(exp_request_ids) time.sleep = Mock(side_effect=None) exp_err = 'Database Error. Internal database error, please contact LP DAAC User Services' database.single_query = Mock(side_effect=[ requests_db.DatabaseError(exp_err), requests_db.DatabaseError(exp_err) ]) mock_ssm_get_parameter(2) try: copy_files_to_archive.handler(self.handler_input_event, None) except copy_files_to_archive.CopyRequestError as err: exp_result = [{ 'success': False, 'source_bucket': 'my-dr-fake-glacier-bucket', 'source_key': self.exp_file_key1 }] exp_err = f"File copy failed. {exp_result}" self.assertEqual(exp_err, str(err))
def test_get_jobs_by_status_exceptions(self): """ Tests getting a DatabaseError reading a job by status """ mock_ssm_get_parameter(2) database.single_query = Mock(side_effect=[requests_db.BadRequestError( 'A status must be provided')]) status = None try: requests_db.get_jobs_by_status(status) self.fail("expected BadRequestError") except requests_db.BadRequestError as err: self.assertEqual('A status must be provided', str(err)) status = "error" err_msg = 'Database Error. could not connect to server' database.single_query = Mock(side_effect=[DbError( err_msg)]) os.environ["DATABASE_HOST"] = "unknown.cr.usgs.gov" try: requests_db.get_jobs_by_status(status) self.fail("expected DatabaseError") except requests_db.DatabaseError as err: database.single_query.assert_called_once() self.assertEqual(err_msg, str(err))
def test_get_all_requests(self): """ Tests reading all requests """ exp_request_ids = [REQUEST_ID1, REQUEST_ID2, REQUEST_ID3, REQUEST_ID4, REQUEST_ID5, REQUEST_ID6, REQUEST_ID7, REQUEST_ID8, REQUEST_ID9, REQUEST_ID10, REQUEST_ID11] qresult, exp_result = create_select_requests(exp_request_ids) mock_ssm_get_parameter(1) database.single_query = Mock(side_effect=[qresult]) expected = result_to_json(exp_result) result = requests_db.get_all_requests() database.single_query.assert_called_once() self.assertEqual(expected, result) mock_ssm_get_parameter(1) err_msg = 'Database Error. could not connect to server' database.single_query = Mock(side_effect=[DbError( err_msg)]) try: requests_db.get_all_requests() self.fail("expected DatabaseError") except requests_db.DatabaseError as err: database.single_query.assert_called_once() self.assertEqual(err_msg, str(err))
def test_submit_request_error_status(self): """ Tests that an error job is written to the db """ utc_now_exp = UTC_NOW_EXP_4 requests_db.get_utc_now_iso = Mock(return_value=utc_now_exp) requests_db.request_id_generator = Mock(side_effect=[REQUEST_ID4]) data = {} data["request_id"] = REQUEST_ID4 data["err_msg"] = "Error message goes here" data["request_group_id"] = REQUEST_GROUP_ID_EXP_2 data["granule_id"] = "granule_4" data["object_key"] = "objectkey_4" data["job_type"] = "restore" data["job_status"] = "error" data["request_time"] = utc_now_exp qresult, exp_result = create_insert_request( REQUEST_ID4, data["request_group_id"], data["granule_id"], data["object_key"], data["job_type"], None, None, data["job_status"], data["request_time"], None, data["err_msg"]) database.single_query = Mock(side_effect=[qresult, exp_result, None, None]) mock_ssm_get_parameter(4) try: requests_db.submit_request(data) database.single_query.assert_called_once() except requests_db.DatabaseError as err: self.fail(f"submit_request. {str(err)}") try: result = requests_db.get_job_by_request_id(REQUEST_ID4) expected = result_to_json(exp_result) self.assertEqual(expected, result) except requests_db.DatabaseError as err: self.fail(f"get_job_by_request_id. {str(err)}")
def test_handler_one_file_success(self): """ Test copy lambda with one file, expecting successful result. """ del os.environ['COPY_RETRY_SLEEP_SECS'] del os.environ['COPY_RETRIES'] boto3.client = Mock() s3_cli = boto3.client('s3') s3_cli.copy_object = Mock(side_effect=[None]) exp_upd_result = [] exp_request_ids = [REQUEST_ID7] _, exp_result = create_select_requests(exp_request_ids) database.single_query = Mock(side_effect=[exp_result, exp_upd_result]) mock_ssm_get_parameter(2) result = copy_files_to_archive.handler(self.handler_input_event, None) os.environ['COPY_RETRIES'] = '2' os.environ['COPY_RETRY_SLEEP_SECS'] = '1' boto3.client.assert_called_with('ssm') s3_cli.copy_object.assert_called_with(Bucket=self.exp_target_bucket, CopySource={ 'Bucket': self.exp_src_bucket, 'Key': self.exp_file_key1 }, Key=self.exp_file_key1) exp_result = [{ "success": True, "source_bucket": self.exp_src_bucket, "source_key": self.exp_file_key1, "request_id": REQUEST_ID7, "target_bucket": self.exp_target_bucket, "err_msg": "" }] self.assertEqual(exp_result, result) database.single_query.assert_called()
def test_submit_request_error_status(self): """ Tests that a job is written to the db """ self.create_test_requests() boto3.client = Mock() mock_ssm_get_parameter(2) utc_now_exp = "2019-07-31 18:05:19.161362+00:00" requests_db.get_utc_now_iso = Mock(return_value=utc_now_exp) requests_db.request_id_generator = Mock(side_effect=[REQUEST_ID12]) data = {} data["request_id"] = REQUEST_ID12 data["request_group_id"] = REQUEST_GROUP_ID_EXP_1 data["granule_id"] = "granule_1" data["object_key"] = "thisisanobjectkey" data["job_type"] = "restore" data["restore_bucket_dest"] = "my_s3_bucket" data["archive_bucket_dest"] = PROTECTED_BUCKET data["job_status"] = "error" data["request_time"] = utc_now_exp data["err_msg"] = "restore request error message here" try: requests_db.submit_request(data) except requests_db.DatabaseError as err: self.fail(f"submit_request. {str(err)}") try: result = requests_db.get_job_by_request_id(REQUEST_ID12) data["last_update_time"] = utc_now_exp self.assertEqual(data, result[0]) except requests_db.DatabaseError as err: self.fail(f"get_job_by_request_id. {str(err)}")
def test_update_request_status_for_job_exceptions(self): """ Tests updating a job to an 'inprogress' status """ utc_now_exp = "2019-07-31 21:07:15.234362+00:00" requests_db.get_utc_now_iso = Mock(return_value=utc_now_exp) request_id = REQUEST_ID3 job_status = "inprogress" exp_err = 'A new status must be provided' try: requests_db.update_request_status_for_job(request_id, None) self.fail("expected BadRequestError") except requests_db.BadRequestError as err: self.assertEqual(exp_err, str(err)) exp_err = 'No request_id provided' try: requests_db.update_request_status_for_job(None, job_status) self.fail("expected BadRequestError") except requests_db.BadRequestError as err: self.assertEqual(exp_err, str(err)) exp_err = 'Database Error. Internal database error, please contact LP DAAC User Services' database.single_query = Mock(side_effect=[DbError(exp_err)]) mock_ssm_get_parameter(1) try: requests_db.update_request_status_for_job(request_id, job_status) self.fail("expected DatabaseError") except requests_db.DatabaseError as err: self.assertEqual(exp_err, str(err)) database.single_query.assert_called_once()
def test_get_jobs_by_request_group_id(self): """ Tests reading a job by request_group_id """ mock_ssm_get_parameter(2) exp_request_ids = [REQUEST_ID5, REQUEST_ID6] _, exp_result = create_select_requests(exp_request_ids) database.single_query = Mock(side_effect=[exp_result]) expected = result_to_json(exp_result) try: result = requests_db.get_jobs_by_request_group_id(None) self.fail("expected BadRequestError") except requests_db.BadRequestError as err: self.assertEqual("A request_group_id must be provided", str(err)) try: result = requests_db.get_jobs_by_request_group_id(REQUEST_GROUP_ID_EXP_3) except requests_db.BadRequestError as err: self.fail(str(err)) self.assertEqual(expected, result) database.single_query.assert_called_once() database.single_query = Mock(side_effect=[DbError("database error")]) try: result = requests_db.get_jobs_by_request_group_id(REQUEST_GROUP_ID_EXP_3) self.fail("expected DatabaseError") except requests_db.DatabaseError as err: self.assertEqual("database error", str(err))
def test_handler_one_file_retry2_success(self): """ Test copy lambda with two failed copy attempts, third attempt successful. """ del os.environ['COPY_RETRY_SLEEP_SECS'] del os.environ['COPY_RETRIES'] time.sleep(1) boto3.client = Mock() s3_cli = boto3.client('s3') s3_cli.copy_object = Mock(side_effect=[ ClientError({'Error': { 'Code': 'AccessDenied' }}, 'copy_object'), None ]) self.create_test_requests() mock_ssm_get_parameter(6) print_rows("begin") row = requests_db.get_job_by_request_id(REQUEST_ID3) self.assertEqual("inprogress", row[0]['job_status']) result = copy_files_to_archive.handler(self.handler_input_event, None) os.environ['COPY_RETRIES'] = '2' os.environ['COPY_RETRY_SLEEP_SECS'] = '1' exp_result = [{ "success": True, "source_bucket": self.exp_src_bucket, "source_key": self.exp_file_key1, "request_id": REQUEST_ID3, "target_bucket": self.exp_target_bucket, "err_msg": "" }] self.assertEqual(exp_result, result) row = requests_db.get_job_by_request_id(REQUEST_ID3) self.assertEqual("complete", row[0]['job_status']) print_rows("end")
def test_submit_request_bad_status(self): """ Tests adding a job with an invalid status """ utc_now_exp = "2019-07-31 18:05:19.161362+00:00" requests_db.get_utc_now_iso = Mock(return_value=utc_now_exp) requests_db.request_id_generator = Mock(side_effect=[REQUEST_ID1]) data = {} data["request_id"] = REQUEST_ID1 data["request_group_id"] = REQUEST_GROUP_ID_EXP_1 data["granule_id"] = "granule_1" data["object_key"] = "thisisanobjectkey" data["job_type"] = "restore" data["restore_bucket_dest"] = "my_s3_bucket" data["job_status"] = "invalid" data["last_update_time"] = utc_now_exp mock_err = ('Database Error. new row for relation "request_status" violates ' 'check constraint "request_status_job_status_check" ' f'DETAIL: Failing row contains (1306, {REQUEST_GROUP_ID_EXP_1}, ' 'granule_1, thisisanobjectkey, restore, my_s3_bucket, invalid, ' '2019-07-31 18:05:19.161362+00, 2019-07-31 18:05:19.161362+00, null).') exp_err = ('new row for relation "request_status" violates check constraint ' '"request_status_job_status_check"') mock_ssm_get_parameter(1) database.single_query = Mock(side_effect=[requests_db.DatabaseError( mock_err)]) try: requests_db.submit_request(data) self.fail("expected DatabaseError") except requests_db.DatabaseError as err: self.assertIn(exp_err, str(err)) database.single_query.assert_called_once()
def test_handler_one_file_success(self): """ Test copy lambda with one file, expecting successful result. """ del os.environ['COPY_RETRY_SLEEP_SECS'] del os.environ['COPY_RETRIES'] boto3.client = Mock() s3_cli = boto3.client('s3') s3_cli.copy_object = Mock(side_effect=[None]) self.create_test_requests() mock_ssm_get_parameter(6) print_rows("begin") row = requests_db.get_job_by_request_id(REQUEST_ID3) self.assertEqual("inprogress", row[0]['job_status']) result = copy_files_to_archive.handler(self.handler_input_event, None) os.environ['COPY_RETRIES'] = '2' os.environ['COPY_RETRY_SLEEP_SECS'] = '1' boto3.client.assert_called_with('ssm') exp_result = [{ "success": True, "source_bucket": self.exp_src_bucket, "source_key": self.exp_file_key1, "request_id": REQUEST_ID3, "target_bucket": self.exp_target_bucket, "err_msg": "" }] self.assertEqual(exp_result, result) print_rows("end") row = requests_db.get_job_by_request_id(REQUEST_ID3) self.assertEqual("complete", row[0]['job_status'])
def test_find_job_in_db(self): """ Test reading job from db not found. """ key = "nofilefound" boto3.client = Mock() mock_ssm_get_parameter(1) job = copy_files_to_archive.find_job_in_db(key) self.assertEqual(None, job)
def test_handler_two_records_one_fail_one_success(self): """ Test copy lambda with two files, one successful copy, one failed copy. """ exp_file_key = 'dr-glacier/MOD09GQ.A0219114.N5aUCG.006.0656338553321.hdf' exp_file2_key = 'dr-glacier/MOD09GQ.A0219114.N5aUCG.006.0656338553321.txt' exp_rec_2 = create_copy_event2() self.handler_input_event["Records"].append(exp_rec_2) exp_err_msg = ("An error occurred (AccessDenied) when calling " "the copy_object operation: Unknown") exp_error = ( "File copy failed. [{'success': False, " f"'source_bucket': '{self.exp_src_bucket}', " f"'source_key': '{exp_file2_key}', " f"'request_id': '{REQUEST_ID3}', " f"'target_bucket': '{self.exp_target_bucket}', " f"'err_msg': '{exp_err_msg}'" "}, {'success': True, " f"'source_bucket': '{self.exp_src_bucket}', " f"'source_key': '{exp_file_key}', " f"'request_id': '{REQUEST_ID4}', " f"'target_bucket': '{self.exp_target_bucket}', 'err_msg': ''" "}]") self.create_test_requests() boto3.client = Mock() mock_ssm_get_parameter(13) s3_cli = boto3.client('s3') s3_cli.copy_object = Mock(side_effect=[ ClientError({'Error': { 'Code': 'AccessDenied' }}, 'copy_object'), ClientError({'Error': { 'Code': 'AccessDenied' }}, 'copy_object'), ClientError({'Error': { 'Code': 'AccessDenied' }}, 'copy_object'), None ]) print_rows("begin") row = requests_db.get_job_by_request_id(REQUEST_ID3) self.assertEqual("inprogress", row[0]['job_status']) row = requests_db.get_job_by_request_id(REQUEST_ID4) self.assertEqual("inprogress", row[0]['job_status']) try: copy_files_to_archive.handler(self.handler_input_event, None) self.fail("expected CopyRequestError") except copy_files_to_archive.CopyRequestError as ex: self.assertEqual(exp_error, str(ex)) print_rows("end") row = requests_db.get_job_by_request_id(REQUEST_ID3) self.assertEqual("error", row[0]['job_status']) self.assertEqual(exp_err_msg, row[0]['err_msg']) row = requests_db.get_job_by_request_id(REQUEST_ID4) self.assertEqual("complete", row[0]['job_status'])
def test_get_all_requests(self): """ Tests reading all requests """ qresult = self.create_test_requests() boto3.client = Mock() mock_ssm_get_parameter(1) expected = result_to_json(qresult) result = requests_db.get_all_requests() self.assertEqual(expected, result)
def test_get_request_not_found(self): """ Tests reading a job that doesn't exist """ request_id = "ABCDEFG" exp_result = [] mock_ssm_get_parameter(1) database.single_query = Mock(side_effect=[exp_result]) result = requests_db.get_job_by_request_id(request_id) self.assertEqual(exp_result, result) database.single_query.assert_called_once()
def test_delete_all_requests(self): """ Tests deleting all requests from the request_status table """ try: self.create_test_requests() boto3.client = Mock() mock_ssm_get_parameter(1) result = requests_db.delete_all_requests() self.assertEqual([], result) except requests_db.DatabaseError as err: self.fail(f"delete_all_requests. {str(err)}")
def test_delete_request(self): """ Tests deleting a job by request_id """ try: self.create_test_requests() boto3.client = Mock() mock_ssm_get_parameter(1) result = requests_db.delete_request(REQUEST_ID1) self.assertEqual([], result) except requests_db.DatabaseError as err: self.fail(f"delete_request. {str(err)}")
def test_get_jobs_by_object_key_dberror(self): """ Tests db error reading by object_key """ mock_ssm_get_parameter(1) database.single_query = Mock(side_effect=[DbError("DbError reading requests")]) try: requests_db.get_jobs_by_object_key("file_1.h5") self.fail("expected DbError") except requests_db.DatabaseError as err: self.assertEqual("DbError reading requests", str(err)) database.single_query.assert_called_once()
def test_get_jobs_by_object_key(self): """ Tests reading by object_key """ mock_ssm_get_parameter(1) exp_request_ids = [REQUEST_ID1, REQUEST_ID2, REQUEST_ID3] _, exp_result_1 = create_select_requests(exp_request_ids) object_key = " " expected = result_to_json(exp_result_1) database.single_query = Mock(side_effect=[exp_result_1]) result = requests_db.get_jobs_by_object_key(object_key) self.assertEqual(expected, result) database.single_query.assert_called_once()
def test_delete_request_no_request_id(self): """ Tests no request_id given for deleting a job by request_id """ try: mock_ssm_get_parameter(1) database.single_query = Mock(side_effect= [requests_db.BadRequestError( "No request_id provided")]) requests_db.delete_request(None) self.fail("expected BadRequestError") except requests_db.BadRequestError as err: self.assertEqual("No request_id provided", str(err))
def test_delete_request_database_error(self): """ Tests database error while deleting a job by request_id """ exp_err = 'Database Error. Internal database error, please contact LP DAAC User Services' try: mock_ssm_get_parameter(1) database.single_query = Mock(side_effect=[DbError(exp_err)]) requests_db.delete_request('x') self.fail("expected DatabaseError") except requests_db.DatabaseError as err: self.assertEqual(exp_err, str(err)) database.single_query.assert_called_once()
def test_delete_request(self): """ Tests deleting a job by request_id """ try: exp_result = [] mock_ssm_get_parameter(1) database.single_query = Mock(side_effect=[exp_result]) result = requests_db.delete_request(REQUEST_ID1) self.assertEqual(exp_result, result) database.single_query.assert_called_once() except requests_db.DatabaseError as err: self.fail(f"delete_request. {str(err)}")
def test_handler_two_records_success(self): """ Test copy lambda with two files, expecting successful result. """ os.environ['DEVELOP_TESTS'] = "False" exp_file_key = 'dr-glacier/MOD09GQ.A0219114.N5aUCG.006.0656338553321.hdf' boto3.client = Mock() s3_cli = boto3.client('s3') s3_cli.copy_object = Mock(side_effect=[None, None]) exp_upd_result = [] exp_request_ids = [REQUEST_ID4, REQUEST_ID7] _, exp_result = create_select_requests(exp_request_ids) database.single_query = Mock(side_effect=[ exp_result, exp_upd_result, exp_result, exp_upd_result ]) mock_ssm_get_parameter(4) exp_rec_2 = create_copy_event2() self.handler_input_event["Records"].append(exp_rec_2) result = copy_files_to_archive.handler(self.handler_input_event, None) boto3.client.assert_called_with('ssm') exp_result = [{ "success": True, "source_bucket": self.exp_src_bucket, "source_key": self.exp_file_key1, "request_id": REQUEST_ID7, "target_bucket": self.exp_target_bucket, "err_msg": "" }, { "success": True, "source_bucket": self.exp_src_bucket, "source_key": exp_file_key, "request_id": REQUEST_ID7, "target_bucket": self.exp_target_bucket, "err_msg": "" }] self.assertEqual(exp_result, result) s3_cli.copy_object.assert_any_call(Bucket=self.exp_target_bucket, CopySource={ 'Bucket': self.exp_src_bucket, 'Key': self.exp_file_key1 }, Key=self.exp_file_key1) s3_cli.copy_object.assert_any_call(Bucket=self.exp_target_bucket, CopySource={ 'Bucket': self.exp_src_bucket, 'Key': exp_file_key }, Key=exp_file_key)
def test_get_job_by_request_id_dberror(self): """ Tests getting a DatabaseError reading a job by request_id """ mock_ssm_get_parameter(1) exp_msg = 'Database Error. could not connect to server' database.single_query = Mock(side_effect=[DbError(exp_msg)]) os.environ["DATABASE_HOST"] = "unknown.cr.usgs.gov" try: requests_db.get_job_by_request_id('x') self.fail("expected DatabaseError") except requests_db.DatabaseError as err: self.assertEqual(exp_msg, str(err)) database.single_query.assert_called_once()
def test_update_request_status_missing_key(self): """ Tests updating a job where the object_key isn't given """ utc_now_exp = "2019-07-31 19:21:38.263364+00:00" requests_db.get_utc_now_iso = Mock(return_value=utc_now_exp) job_status = "invalid" exp_msg = "No object_key provided" database.single_query = Mock(side_effect=[requests_db.BadRequestError(exp_msg)]) mock_ssm_get_parameter(1) try: requests_db.update_request_status_for_job(REQUEST_ID1, job_status) self.fail("expected requests_db.BadRequestError") except requests_db.BadRequestError as err: self.assertEqual(exp_msg, str(err))
def test_handler_one_file_fail_3x(self): """ Test copy lambda with one failed copy after 3 retries. """ boto3.client = Mock() s3_cli = boto3.client('s3') s3_cli.copy_object = Mock(side_effect=[ ClientError({'Error': { 'Code': 'AccessDenied' }}, 'copy_object'), ClientError({'Error': { 'Code': 'AccessDenied' }}, 'copy_object'), ClientError({'Error': { 'Code': 'AccessDenied' }}, 'copy_object') ]) s3_cli.head_object = Mock() exp_error = "File copy failed. [{'success': False, " \ f"'source_bucket': '{self.exp_src_bucket}', " \ f"'source_key': '{self.exp_file_key1}', " \ f"'request_id': '{REQUEST_ID7}', " \ f"'target_bucket': '{self.exp_target_bucket}', " \ "'err_msg': 'An error occurred (AccessDenied) when calling " \ "the copy_object operation: Unknown'}]" exp_upd_result = [] exp_request_ids = [REQUEST_ID7, REQUEST_ID4] _, exp_result = create_select_requests(exp_request_ids) database.single_query = Mock(side_effect=[ exp_result, exp_result, exp_upd_result, exp_result, exp_upd_result ]) mock_ssm_get_parameter(5) try: copy_files_to_archive.handler(self.handler_input_event, None) self.fail("expected CopyRequestError") except copy_files_to_archive.CopyRequestError as ex: self.assertEqual(exp_error, str(ex)) boto3.client.assert_called_with('ssm') s3_cli.copy_object.assert_called_with(Bucket=self.exp_target_bucket, CopySource={ 'Bucket': self.exp_src_bucket, 'Key': self.exp_file_key1 }, Key=self.exp_file_key1) database.single_query.assert_called()
def test_update_request_status_complete(self): """ Tests updating a job to a 'complete' status """ self.create_test_requests() boto3.client = Mock() mock_ssm_get_parameter(1) utc_now_exp = "2019-07-31 21:07:15.234362+00:00" requests_db.get_utc_now_iso = Mock(return_value=utc_now_exp) job_status = "complete" try: result = requests_db.update_request_status_for_job( REQUEST_ID1, job_status) self.assertEqual([], result) except requests_db.DatabaseError as err: self.fail(f"update_request_status_for_job. {str(err)}")