def test_handler_one_file_retry2_success(self): """ Test copy lambda with two failed copy attempts, third attempt successful. """ del os.environ['COPY_RETRY_SLEEP_SECS'] del os.environ['COPY_RETRIES'] time.sleep(1) boto3.client = Mock() s3_cli = boto3.client('s3') s3_cli.copy_object = Mock(side_effect=[ ClientError({'Error': { 'Code': 'AccessDenied' }}, 'copy_object'), None ]) self.create_test_requests() mock_ssm_get_parameter(6) print_rows("begin") row = requests_db.get_job_by_request_id(REQUEST_ID3) self.assertEqual("inprogress", row[0]['job_status']) result = copy_files_to_archive.handler(self.handler_input_event, None) os.environ['COPY_RETRIES'] = '2' os.environ['COPY_RETRY_SLEEP_SECS'] = '1' exp_result = [{ "success": True, "source_bucket": self.exp_src_bucket, "source_key": self.exp_file_key1, "request_id": REQUEST_ID3, "target_bucket": self.exp_target_bucket, "err_msg": "" }] self.assertEqual(exp_result, result) row = requests_db.get_job_by_request_id(REQUEST_ID3) self.assertEqual("complete", row[0]['job_status']) print_rows("end")
def test_handler_one_file_success(self): """ Test copy lambda with one file, expecting successful result. """ del os.environ['COPY_RETRY_SLEEP_SECS'] del os.environ['COPY_RETRIES'] boto3.client = Mock() s3_cli = boto3.client('s3') s3_cli.copy_object = Mock(side_effect=[None]) self.create_test_requests() mock_ssm_get_parameter(6) print_rows("begin") row = requests_db.get_job_by_request_id(REQUEST_ID3) self.assertEqual("inprogress", row[0]['job_status']) result = copy_files_to_archive.handler(self.handler_input_event, None) os.environ['COPY_RETRIES'] = '2' os.environ['COPY_RETRY_SLEEP_SECS'] = '1' boto3.client.assert_called_with('ssm') exp_result = [{ "success": True, "source_bucket": self.exp_src_bucket, "source_key": self.exp_file_key1, "request_id": REQUEST_ID3, "target_bucket": self.exp_target_bucket, "err_msg": "" }] self.assertEqual(exp_result, result) print_rows("end") row = requests_db.get_job_by_request_id(REQUEST_ID3) self.assertEqual("complete", row[0]['job_status'])
def test_update_request_status_for_job_error(self): """ Tests updating an inprogress job to an 'error' status """ self.create_test_requests() boto3.client = Mock() mock_ssm_get_parameter(3) request_id = REQUEST_ID8 row = requests_db.get_job_by_request_id(request_id) self.assertEqual("inprogress", row[0]["job_status"]) print_rows("begin") utc_now_exp = "2019-07-31 21:07:15.234362+00:00" requests_db.get_utc_now_iso = Mock(return_value=utc_now_exp) job_status = "error" err_msg = "oh no an error" try: result = requests_db.update_request_status_for_job( request_id, job_status, err_msg) print_rows("end") self.assertEqual([], result) row = requests_db.get_job_by_request_id(request_id) self.assertEqual(job_status, row[0]["job_status"]) self.assertEqual(err_msg, row[0]["err_msg"]) self.assertIn(utc_now_exp, row[0]["last_update_time"]) except requests_db.DatabaseError as err: self.fail(f"update_request_status_for_job. {str(err)}")
def test_handler_two_records_one_fail_one_success(self): """ Test copy lambda with two files, one successful copy, one failed copy. """ exp_file_key = 'dr-glacier/MOD09GQ.A0219114.N5aUCG.006.0656338553321.hdf' exp_file2_key = 'dr-glacier/MOD09GQ.A0219114.N5aUCG.006.0656338553321.txt' exp_rec_2 = create_copy_event2() self.handler_input_event["Records"].append(exp_rec_2) exp_err_msg = ("An error occurred (AccessDenied) when calling " "the copy_object operation: Unknown") exp_error = ( "File copy failed. [{'success': False, " f"'source_bucket': '{self.exp_src_bucket}', " f"'source_key': '{exp_file2_key}', " f"'request_id': '{REQUEST_ID3}', " f"'target_bucket': '{self.exp_target_bucket}', " f"'err_msg': '{exp_err_msg}'" "}, {'success': True, " f"'source_bucket': '{self.exp_src_bucket}', " f"'source_key': '{exp_file_key}', " f"'request_id': '{REQUEST_ID4}', " f"'target_bucket': '{self.exp_target_bucket}', 'err_msg': ''" "}]") self.create_test_requests() boto3.client = Mock() mock_ssm_get_parameter(13) s3_cli = boto3.client('s3') s3_cli.copy_object = Mock(side_effect=[ ClientError({'Error': { 'Code': 'AccessDenied' }}, 'copy_object'), ClientError({'Error': { 'Code': 'AccessDenied' }}, 'copy_object'), ClientError({'Error': { 'Code': 'AccessDenied' }}, 'copy_object'), None ]) print_rows("begin") row = requests_db.get_job_by_request_id(REQUEST_ID3) self.assertEqual("inprogress", row[0]['job_status']) row = requests_db.get_job_by_request_id(REQUEST_ID4) self.assertEqual("inprogress", row[0]['job_status']) try: copy_files_to_archive.handler(self.handler_input_event, None) self.fail("expected CopyRequestError") except copy_files_to_archive.CopyRequestError as ex: self.assertEqual(exp_error, str(ex)) print_rows("end") row = requests_db.get_job_by_request_id(REQUEST_ID3) self.assertEqual("error", row[0]['job_status']) self.assertEqual(exp_err_msg, row[0]['err_msg']) row = requests_db.get_job_by_request_id(REQUEST_ID4) self.assertEqual("complete", row[0]['job_status'])
def test_handler_one_file_fail_3x(self): """ Test copy lambda with one failed copy after 3 retries. """ exp_err_msg = ("An error occurred (AccessDenied) when calling " "the copy_object operation: Unknown") exp_error = ("File copy failed. [{'success': False, " f"'source_bucket': '{self.exp_src_bucket}', " f"'source_key': '{self.exp_file_key1}', " f"'request_id': '{REQUEST_ID3}', " f"'target_bucket': '{self.exp_target_bucket}', " f"'err_msg': '{exp_err_msg}'" "}]") self.create_test_requests() utc_now_exp = requests_db.get_utc_now_iso() requests_db.get_utc_now_iso = Mock(return_value=utc_now_exp) boto3.client = Mock() mock_ssm_get_parameter(7) print_rows("begin") s3_cli = boto3.client('s3') s3_cli.copy_object = Mock(side_effect=[ ClientError({'Error': { 'Code': 'AccessDenied' }}, 'copy_object'), ClientError({'Error': { 'Code': 'AccessDenied' }}, 'copy_object'), ClientError({'Error': { 'Code': 'AccessDenied' }}, 'copy_object') ]) s3_cli.head_object = Mock() row = requests_db.get_job_by_request_id(REQUEST_ID3) self.assertEqual("inprogress", row[0]['job_status']) self.assertEqual(None, row[0]['err_msg']) try: copy_files_to_archive.handler(self.handler_input_event, None) self.fail("expected CopyRequestError") except copy_files_to_archive.CopyRequestError as ex: self.assertEqual(exp_error, str(ex)) print_rows("end") row = requests_db.get_job_by_request_id(REQUEST_ID3) self.assertEqual("error", row[0]['job_status']) self.assertEqual(exp_err_msg, row[0]['err_msg'])
def test_handler_two_records_success(self): """ Test copy lambda with two files, expecting successful result. """ exp_file_key = 'dr-glacier/MOD09GQ.A0219114.N5aUCG.006.0656338553321.hdf' boto3.client = Mock() s3_cli = boto3.client('s3') s3_cli.copy_object = Mock(side_effect=[None, None]) exp_rec_2 = create_copy_event2() self.handler_input_event["Records"].append(exp_rec_2) self.create_test_requests() mock_ssm_get_parameter(10) print_rows("begin") row = requests_db.get_job_by_request_id(REQUEST_ID3) self.assertEqual("inprogress", row[0]['job_status']) row = requests_db.get_job_by_request_id(REQUEST_ID4) self.assertEqual("inprogress", row[0]['job_status']) result = copy_files_to_archive.handler(self.handler_input_event, None) boto3.client.assert_called_with('ssm') exp_result = [{ "success": True, "source_bucket": self.exp_src_bucket, "source_key": self.exp_file_key1, "request_id": REQUEST_ID3, "target_bucket": self.exp_target_bucket, "err_msg": "" }, { "success": True, "source_bucket": self.exp_src_bucket, "source_key": exp_file_key, "request_id": REQUEST_ID4, "target_bucket": PROTECTED_BUCKET, "err_msg": "" }] self.assertEqual(exp_result, result) print_rows("end") row = requests_db.get_job_by_request_id(REQUEST_ID3) self.assertEqual("complete", row[0]['job_status']) row = requests_db.get_job_by_request_id(REQUEST_ID4) self.assertEqual("complete", row[0]['job_status'])
def test_get_jobs_by_object_key(self): """ Tests reading by object_key """ #os.environ['DEVELOP_TESTS'] = "True" self.create_test_requests() boto3.client = Mock() mock_ssm_get_parameter(2) print_rows("begin") object_key = "objectkey_4" result = requests_db.get_jobs_by_object_key(object_key) exp_ids = [REQUEST_ID7, REQUEST_ID4] idx = 0 for job in result: self.assertEqual(exp_ids[idx], job["request_id"]) idx = idx + 1 object_key = "objectkey_5" result = requests_db.get_jobs_by_object_key(object_key)
def test_update_request_status_for_job_inprogress(self): """ Tests updating an 'error' job to an 'inprogress' status """ self.create_test_requests() boto3.client = Mock() mock_ssm_get_parameter(2) print_rows("begin") utc_now_exp = requests_db.get_utc_now_iso() requests_db.get_utc_now_iso = Mock(return_value=utc_now_exp) request_id = REQUEST_ID4 job_status = "inprogress" try: result = requests_db.update_request_status_for_job( request_id, job_status) print_rows("end") self.assertEqual([], result) row = requests_db.get_job_by_request_id(request_id) self.assertEqual(job_status, row[0]["job_status"]) self.assertEqual(None, row[0]["err_msg"]) except requests_db.DatabaseError as err: self.fail(f"update_request_status_for_job. {str(err)}")
def test_task_client_error_2_times(self): """ Test two files, first successful, second has two errors, then success. """ file1 = {"key": FILE1, "dest_bucket": "sndbx-cumulus-protected"} file2 = {"key": FILE2, "dest_bucket": "sndbx-cumulus-protected"} exp_event = {} exp_event["config"] = {"glacier-bucket": "some_bucket"} gran = {} granule_id = "MOD09GQ.A0219114.N5aUCG.006.0656338553321" gran["granuleId"] = granule_id keys = [] keys.append(file1) keys.append(file2) gran["keys"] = keys exp_event["input"] = {"granules": [gran]} requests_db.request_id_generator = Mock(side_effect=[ REQUEST_GROUP_ID_EXP_1, REQUEST_ID1, REQUEST_GROUP_ID_EXP_2, REQUEST_ID2, REQUEST_ID3 ]) boto3.client = Mock() s3_cli = boto3.client('s3') s3_cli.head_object = Mock() s3_cli.restore_object = Mock(side_effect=[ None, ClientError({'Error': { 'Code': 'NoSuchBucket' }}, 'restore_object'), ClientError({'Error': { 'Code': 'NoSuchBucket' }}, 'restore_object'), None ]) CumulusLogger.info = Mock() CumulusLogger.error = Mock() mock_ssm_get_parameter(2) exp_gran = {} exp_gran['granuleId'] = granule_id exp_files = [] exp_file = {} exp_file['key'] = FILE1 exp_file['dest_bucket'] = "sndbx-cumulus-protected" exp_file['success'] = True exp_file['err_msg'] = '' exp_files.append(exp_file) exp_file = {} exp_file['key'] = FILE2 exp_file['dest_bucket'] = "sndbx-cumulus-protected" exp_file['success'] = True exp_file['err_msg'] = '' exp_files.append(exp_file) exp_gran['files'] = exp_files print_rows("begin") result = request_files.task(exp_event, self.context) self.assertEqual(exp_gran, result) print_rows("end")
def test_task_client_error_3_times(self): """ Test three files, two successful, one errors on all retries and fails. """ exp_event = {} exp_event["config"] = {"glacier-bucket": "some_bucket"} gran = {} granule_id = "MOD09GQ.A0219114.N5aUCG.006.0656338553321" gran["granuleId"] = granule_id keys = [KEY1, KEY3, KEY4] gran["keys"] = keys exp_event["input"] = {"granules": [gran]} requests_db.request_id_generator = Mock(side_effect=[ REQUEST_GROUP_ID_EXP_1, REQUEST_ID1, REQUEST_GROUP_ID_EXP_3, REQUEST_ID2, REQUEST_ID3, REQUEST_ID4 ]) boto3.client = Mock() s3_cli = boto3.client('s3') s3_cli.head_object = Mock() s3_cli.restore_object = Mock(side_effect=[ None, ClientError({'Error': { 'Code': 'NoSuchBucket' }}, 'restore_object'), None, ClientError({'Error': { 'Code': 'NoSuchBucket' }}, 'restore_object'), ClientError({'Error': { 'Code': 'NoSuchKey' }}, 'restore_object') ]) CumulusLogger.info = Mock() CumulusLogger.error = Mock() mock_ssm_get_parameter(3) exp_gran = {} exp_gran['granuleId'] = granule_id exp_files = [] exp_file = {} exp_file['key'] = FILE1 exp_file['dest_bucket'] = PROTECTED_BUCKET exp_file['success'] = True exp_file['err_msg'] = '' exp_files.append(exp_file) exp_file = {} exp_file['key'] = FILE3 exp_file['dest_bucket'] = None exp_file['success'] = False exp_file['err_msg'] = 'An error occurred (NoSuchKey) when calling the restore_object ' \ 'operation: Unknown' exp_files.append(exp_file) exp_file = {} exp_file['key'] = FILE4 exp_file['dest_bucket'] = PUBLIC_BUCKET exp_file['success'] = True exp_file['err_msg'] = '' exp_files.append(exp_file) exp_gran['files'] = exp_files exp_err = f"One or more files failed to be requested. {exp_gran}" print_rows("begin") try: request_files.task(exp_event, self.context) self.fail("RestoreRequestError expected") except request_files.RestoreRequestError as err: self.assertEqual(exp_err, str(err)) print_rows("end")