def test_update_request_status_for_job_error(self): """ Tests updating an inprogress job to an 'error' status """ self.create_test_requests() boto3.client = Mock() mock_ssm_get_parameter(3) request_id = REQUEST_ID8 row = requests_db.get_job_by_request_id(request_id) self.assertEqual("inprogress", row[0]["job_status"]) print_rows("begin") utc_now_exp = "2019-07-31 21:07:15.234362+00:00" requests_db.get_utc_now_iso = Mock(return_value=utc_now_exp) job_status = "error" err_msg = "oh no an error" try: result = requests_db.update_request_status_for_job( request_id, job_status, err_msg) print_rows("end") self.assertEqual([], result) row = requests_db.get_job_by_request_id(request_id) self.assertEqual(job_status, row[0]["job_status"]) self.assertEqual(err_msg, row[0]["err_msg"]) self.assertIn(utc_now_exp, row[0]["last_update_time"]) except requests_db.DatabaseError as err: self.fail(f"update_request_status_for_job. {str(err)}")
def test_handler_one_file_success(self): """ Test copy lambda with one file, expecting successful result. """ del os.environ['COPY_RETRY_SLEEP_SECS'] del os.environ['COPY_RETRIES'] boto3.client = Mock() s3_cli = boto3.client('s3') s3_cli.copy_object = Mock(side_effect=[None]) self.create_test_requests() mock_ssm_get_parameter(6) print_rows("begin") row = requests_db.get_job_by_request_id(REQUEST_ID3) self.assertEqual("inprogress", row[0]['job_status']) result = copy_files_to_archive.handler(self.handler_input_event, None) os.environ['COPY_RETRIES'] = '2' os.environ['COPY_RETRY_SLEEP_SECS'] = '1' boto3.client.assert_called_with('ssm') exp_result = [{ "success": True, "source_bucket": self.exp_src_bucket, "source_key": self.exp_file_key1, "request_id": REQUEST_ID3, "target_bucket": self.exp_target_bucket, "err_msg": "" }] self.assertEqual(exp_result, result) print_rows("end") row = requests_db.get_job_by_request_id(REQUEST_ID3) self.assertEqual("complete", row[0]['job_status'])
def test_handler_one_file_retry2_success(self): """ Test copy lambda with two failed copy attempts, third attempt successful. """ del os.environ['COPY_RETRY_SLEEP_SECS'] del os.environ['COPY_RETRIES'] time.sleep(1) boto3.client = Mock() s3_cli = boto3.client('s3') s3_cli.copy_object = Mock(side_effect=[ClientError({'Error': {'Code': 'AccessDenied'}}, 'copy_object'), None]) self.create_test_requests() mock_secretsmanager_get_parameter(6) print_rows("begin") row = requests_db.get_job_by_request_id(REQUEST_ID3) self.assertEqual("inprogress", row[0]['job_status']) result = copy_files_to_archive.handler(self.handler_input_event, None) os.environ['COPY_RETRIES'] = '2' os.environ['COPY_RETRY_SLEEP_SECS'] = '1' exp_result = [{"success": True, "source_bucket": self.exp_src_bucket, "source_key": self.exp_file_key1, "request_id": REQUEST_ID3, "target_bucket": self.exp_target_bucket, "err_msg": ""}] self.assertEqual(exp_result, result) row = requests_db.get_job_by_request_id(REQUEST_ID3) self.assertEqual("complete", row[0]['job_status']) print_rows("end")
def test_handler_two_records_one_fail_one_success(self): """ Test copy lambda with two files, one successful copy, one failed copy. """ exp_file_key = 'dr-glacier/MOD09GQ.A0219114.N5aUCG.006.0656338553321.hdf' exp_file2_key = 'dr-glacier/MOD09GQ.A0219114.N5aUCG.006.0656338553321.txt' exp_rec_2 = create_copy_event2() self.handler_input_event["Records"].append(exp_rec_2) exp_err_msg = ("An error occurred (AccessDenied) when calling " "the copy_object operation: Unknown") exp_error = ( "File copy failed. [{'success': False, " f"'source_bucket': '{self.exp_src_bucket}', " f"'source_key': '{exp_file2_key}', " f"'request_id': '{REQUEST_ID3}', " f"'target_bucket': '{self.exp_target_bucket}', " f"'err_msg': '{exp_err_msg}'" "}, {'success': True, " f"'source_bucket': '{self.exp_src_bucket}', " f"'source_key': '{exp_file_key}', " f"'request_id': '{REQUEST_ID4}', " f"'target_bucket': '{self.exp_target_bucket}', 'err_msg': ''" "}]") self.create_test_requests() boto3.client = Mock() mock_ssm_get_parameter(13) s3_cli = boto3.client('s3') s3_cli.copy_object = Mock(side_effect=[ ClientError({'Error': { 'Code': 'AccessDenied' }}, 'copy_object'), ClientError({'Error': { 'Code': 'AccessDenied' }}, 'copy_object'), ClientError({'Error': { 'Code': 'AccessDenied' }}, 'copy_object'), None ]) print_rows("begin") row = requests_db.get_job_by_request_id(REQUEST_ID3) self.assertEqual("inprogress", row[0]['job_status']) row = requests_db.get_job_by_request_id(REQUEST_ID4) self.assertEqual("inprogress", row[0]['job_status']) try: copy_files_to_archive.handler(self.handler_input_event, None) self.fail("expected CopyRequestError") except copy_files_to_archive.CopyRequestError as ex: self.assertEqual(exp_error, str(ex)) print_rows("end") row = requests_db.get_job_by_request_id(REQUEST_ID3) self.assertEqual("error", row[0]['job_status']) self.assertEqual(exp_err_msg, row[0]['err_msg']) row = requests_db.get_job_by_request_id(REQUEST_ID4) self.assertEqual("complete", row[0]['job_status'])
def test_get_job_by_request_id_dberror(self): """ Tests getting a DatabaseError reading a job by request_id """ mock_ssm_get_parameter(1) exp_msg = 'Database Error. could not connect to server' database.single_query = Mock(side_effect=[DbError(exp_msg)]) os.environ["DATABASE_HOST"] = "unknown.cr.usgs.gov" try: requests_db.get_job_by_request_id('x') self.fail("expected DatabaseError") except requests_db.DatabaseError as err: self.assertEqual(exp_msg, str(err)) database.single_query.assert_called_once()
def test_submit_request_error_status(self): """ Tests that an error job is written to the db """ utc_now_exp = UTC_NOW_EXP_4 requests_db.get_utc_now_iso = Mock(return_value=utc_now_exp) requests_db.request_id_generator = Mock(side_effect=[REQUEST_ID4]) data = {} data["request_id"] = REQUEST_ID4 data["err_msg"] = "Error message goes here" data["request_group_id"] = REQUEST_GROUP_ID_EXP_2 data["granule_id"] = "granule_4" data["object_key"] = "objectkey_4" data["job_type"] = "restore" data["job_status"] = "error" data["request_time"] = utc_now_exp qresult, exp_result = create_insert_request( REQUEST_ID4, data["request_group_id"], data["granule_id"], data["object_key"], data["job_type"], None, None, data["job_status"], data["request_time"], None, data["err_msg"]) database.single_query = Mock(side_effect=[qresult, exp_result, None, None]) mock_ssm_get_parameter(4) try: requests_db.submit_request(data) database.single_query.assert_called_once() except requests_db.DatabaseError as err: self.fail(f"submit_request. {str(err)}") try: result = requests_db.get_job_by_request_id(REQUEST_ID4) expected = result_to_json(exp_result) self.assertEqual(expected, result) except requests_db.DatabaseError as err: self.fail(f"get_job_by_request_id. {str(err)}")
def test_submit_request_error_status(self): """ Tests that a job is written to the db """ self.create_test_requests() boto3.client = Mock() mock_ssm_get_parameter(2) utc_now_exp = "2019-07-31 18:05:19.161362+00:00" requests_db.get_utc_now_iso = Mock(return_value=utc_now_exp) requests_db.request_id_generator = Mock(side_effect=[REQUEST_ID12]) data = {} data["request_id"] = REQUEST_ID12 data["request_group_id"] = REQUEST_GROUP_ID_EXP_1 data["granule_id"] = "granule_1" data["object_key"] = "thisisanobjectkey" data["job_type"] = "restore" data["restore_bucket_dest"] = "my_s3_bucket" data["archive_bucket_dest"] = PROTECTED_BUCKET data["job_status"] = "error" data["request_time"] = utc_now_exp data["err_msg"] = "restore request error message here" try: requests_db.submit_request(data) except requests_db.DatabaseError as err: self.fail(f"submit_request. {str(err)}") try: result = requests_db.get_job_by_request_id(REQUEST_ID12) data["last_update_time"] = utc_now_exp self.assertEqual(data, result[0]) except requests_db.DatabaseError as err: self.fail(f"get_job_by_request_id. {str(err)}")
def add_request(event): """ Adds a request to the database """ try: granule_id = event['granule_id'] except KeyError: raise BadRequestError("Missing 'granule_id' in input data") try: request_group_id = event['request_group_id'] except KeyError: raise BadRequestError("Missing 'request_group_id' in input data") try: status = event['status'] except KeyError: status = "error" data = {} data["request_id"] = requests_db.request_id_generator() data["request_group_id"] = request_group_id data["granule_id"] = granule_id data["object_key"] = "my_test_filename" data["job_type"] = "restore" data["restore_bucket_dest"] = "my_test_bucket" data["job_status"] = status if status == "error": data["err_msg"] = "error message goes here" request_id = requests_db.submit_request(data) result = requests_db.get_job_by_request_id(request_id) return result
def query_requests(event): """ Queries the database for requests """ try: granule_id = event['granule_id'] except KeyError: granule_id = None try: request_group_id = event['request_group_id'] except KeyError: request_group_id = None try: request_id = event['request_id'] except KeyError: request_id = None try: object_key = event['object_key'] except KeyError: object_key = None if request_id: result = requests_db.get_job_by_request_id(request_id) else: if request_group_id: result = requests_db.get_jobs_by_request_group_id(request_group_id) else: if granule_id: result = requests_db.get_jobs_by_granule_id(granule_id) else: if object_key: result = requests_db.get_jobs_by_object_key(object_key) else: result = requests_db.get_all_requests() return result
def test_handler_one_file_fail_3x(self): """ Test copy lambda with one failed copy after 3 retries. """ exp_err_msg = ("An error occurred (AccessDenied) when calling " "the copy_object operation: Unknown") exp_error = ("File copy failed. [{'success': False, " f"'source_bucket': '{self.exp_src_bucket}', " f"'source_key': '{self.exp_file_key1}', " f"'request_id': '{REQUEST_ID3}', " f"'target_bucket': '{self.exp_target_bucket}', " f"'err_msg': '{exp_err_msg}'" "}]") self.create_test_requests() utc_now_exp = requests_db.get_utc_now_iso() requests_db.get_utc_now_iso = Mock(return_value=utc_now_exp) boto3.client = Mock() mock_ssm_get_parameter(7) print_rows("begin") s3_cli = boto3.client('s3') s3_cli.copy_object = Mock(side_effect=[ ClientError({'Error': { 'Code': 'AccessDenied' }}, 'copy_object'), ClientError({'Error': { 'Code': 'AccessDenied' }}, 'copy_object'), ClientError({'Error': { 'Code': 'AccessDenied' }}, 'copy_object') ]) s3_cli.head_object = Mock() row = requests_db.get_job_by_request_id(REQUEST_ID3) self.assertEqual("inprogress", row[0]['job_status']) self.assertEqual(None, row[0]['err_msg']) try: copy_files_to_archive.handler(self.handler_input_event, None) self.fail("expected CopyRequestError") except copy_files_to_archive.CopyRequestError as ex: self.assertEqual(exp_error, str(ex)) print_rows("end") row = requests_db.get_job_by_request_id(REQUEST_ID3) self.assertEqual("error", row[0]['job_status']) self.assertEqual(exp_err_msg, row[0]['err_msg'])
def test_handler_two_records_success(self): """ Test copy lambda with two files, expecting successful result. """ exp_file_key = 'dr-glacier/MOD09GQ.A0219114.N5aUCG.006.0656338553321.hdf' boto3.client = Mock() s3_cli = boto3.client('s3') s3_cli.copy_object = Mock(side_effect=[None, None]) exp_rec_2 = create_copy_event2() self.handler_input_event["Records"].append(exp_rec_2) self.create_test_requests() mock_ssm_get_parameter(10) print_rows("begin") row = requests_db.get_job_by_request_id(REQUEST_ID3) self.assertEqual("inprogress", row[0]['job_status']) row = requests_db.get_job_by_request_id(REQUEST_ID4) self.assertEqual("inprogress", row[0]['job_status']) result = copy_files_to_archive.handler(self.handler_input_event, None) boto3.client.assert_called_with('ssm') exp_result = [{ "success": True, "source_bucket": self.exp_src_bucket, "source_key": self.exp_file_key1, "request_id": REQUEST_ID3, "target_bucket": self.exp_target_bucket, "err_msg": "" }, { "success": True, "source_bucket": self.exp_src_bucket, "source_key": exp_file_key, "request_id": REQUEST_ID4, "target_bucket": PROTECTED_BUCKET, "err_msg": "" }] self.assertEqual(exp_result, result) print_rows("end") row = requests_db.get_job_by_request_id(REQUEST_ID3) self.assertEqual("complete", row[0]['job_status']) row = requests_db.get_job_by_request_id(REQUEST_ID4) self.assertEqual("complete", row[0]['job_status'])
def test_get_request_not_found(self): """ Tests reading a job that doesn't exist """ request_id = "ABCDEFG" exp_result = [] mock_ssm_get_parameter(1) database.single_query = Mock(side_effect=[exp_result]) result = requests_db.get_job_by_request_id(request_id) self.assertEqual(exp_result, result) database.single_query.assert_called_once()
def test_submit_request_inprogress_status(self): """ Tests that an inprogress job is written to the db """ utc_now_exp = UTC_NOW_EXP_1 requests_db.get_utc_now_iso = Mock(return_value=utc_now_exp) requests_db.request_id_generator = Mock(side_effect=[REQUEST_ID1]) data = {} data["request_id"] = REQUEST_ID1 data["request_group_id"] = REQUEST_GROUP_ID_EXP_1 data["granule_id"] = "granule_1" data["object_key"] = "thisisanobjectkey" data["job_type"] = "restore" data["restore_bucket_dest"] = "my_s3_bucket" data["archive_bucket_dest"] = "your_s3_bucket" data["job_status"] = "inprogress" data["request_time"] = utc_now_exp qresult, exp_result = create_insert_request( REQUEST_ID1, data["request_group_id"], data["granule_id"], data["object_key"], data["job_type"], data["restore_bucket_dest"], data["job_status"], data["archive_bucket_dest"], data["request_time"], None, None) database.single_query = Mock(side_effect=[qresult, exp_result, None, None]) mock_ssm_get_parameter(4) try: requests_db.submit_request(data) database.single_query.assert_called_once() except requests_db.DatabaseError as err: self.fail(f"submit_request. {str(err)}") try: result = requests_db.get_job_by_request_id(REQUEST_ID1) expected = result_to_json(exp_result) self.assertEqual(expected, result) except requests_db.DatabaseError as err: self.fail(f"get_job_by_request_id. {str(err)}")
def test_update_request_status_for_job_inprogress(self): """ Tests updating an 'error' job to an 'inprogress' status """ self.create_test_requests() boto3.client = Mock() mock_ssm_get_parameter(2) print_rows("begin") utc_now_exp = requests_db.get_utc_now_iso() requests_db.get_utc_now_iso = Mock(return_value=utc_now_exp) request_id = REQUEST_ID4 job_status = "inprogress" try: result = requests_db.update_request_status_for_job( request_id, job_status) print_rows("end") self.assertEqual([], result) row = requests_db.get_job_by_request_id(request_id) self.assertEqual(job_status, row[0]["job_status"]) self.assertEqual(None, row[0]["err_msg"]) except requests_db.DatabaseError as err: self.fail(f"update_request_status_for_job. {str(err)}")