def test_get_jobs_by_request_group_id(self): """ Tests reading a job by request_group_id """ mock_ssm_get_parameter(2) exp_request_ids = [REQUEST_ID5, REQUEST_ID6] _, exp_result = create_select_requests(exp_request_ids) database.single_query = Mock(side_effect=[exp_result]) expected = result_to_json(exp_result) try: result = requests_db.get_jobs_by_request_group_id(None) self.fail("expected BadRequestError") except requests_db.BadRequestError as err: self.assertEqual("A request_group_id must be provided", str(err)) try: result = requests_db.get_jobs_by_request_group_id(REQUEST_GROUP_ID_EXP_3) except requests_db.BadRequestError as err: self.fail(str(err)) self.assertEqual(expected, result) database.single_query.assert_called_once() database.single_query = Mock(side_effect=[DbError("database error")]) try: result = requests_db.get_jobs_by_request_group_id(REQUEST_GROUP_ID_EXP_3) self.fail("expected DatabaseError") except requests_db.DatabaseError as err: self.assertEqual("database error", str(err))
def test_handler_db_update_err(self): """ Test copy lambda with error updating db. """ boto3.client = Mock() s3_cli = boto3.client('s3') s3_cli.copy_object = Mock(side_effect=[None]) exp_request_ids = [REQUEST_ID7] _, exp_result = create_select_requests(exp_request_ids) time.sleep = Mock(side_effect=None) exp_err = 'Database Error. Internal database error, please contact LP DAAC User Services' database.single_query = Mock(side_effect=[ exp_result, requests_db.DatabaseError(exp_err), requests_db.DatabaseError(exp_err) ]) mock_ssm_get_parameter(3) result = copy_files_to_archive.handler(self.handler_input_event, None) exp_result = [{ 'success': True, 'source_bucket': 'my-dr-fake-glacier-bucket', 'source_key': self.exp_file_key1, 'request_id': REQUEST_ID7, 'target_bucket': PROTECTED_BUCKET, 'err_msg': '' }] self.assertEqual(exp_result, result)
def test_get_all_requests(self): """ Tests reading all requests """ exp_request_ids = [REQUEST_ID1, REQUEST_ID2, REQUEST_ID3, REQUEST_ID4, REQUEST_ID5, REQUEST_ID6, REQUEST_ID7, REQUEST_ID8, REQUEST_ID9, REQUEST_ID10, REQUEST_ID11] qresult, exp_result = create_select_requests(exp_request_ids) mock_ssm_get_parameter(1) database.single_query = Mock(side_effect=[qresult]) expected = result_to_json(exp_result) result = requests_db.get_all_requests() database.single_query.assert_called_once() self.assertEqual(expected, result) mock_ssm_get_parameter(1) err_msg = 'Database Error. could not connect to server' database.single_query = Mock(side_effect=[DbError( err_msg)]) try: requests_db.get_all_requests() self.fail("expected DatabaseError") except requests_db.DatabaseError as err: database.single_query.assert_called_once() self.assertEqual(err_msg, str(err))
def test_handler_db_read_err(self): """ Test copy lambda with error reading db. """ boto3.client = Mock() s3_cli = boto3.client('s3') s3_cli.copy_object = Mock(side_effect=[None]) exp_request_ids = [REQUEST_ID7] _, exp_result = create_select_requests(exp_request_ids) time.sleep = Mock(side_effect=None) exp_err = 'Database Error. Internal database error, please contact LP DAAC User Services' database.single_query = Mock(side_effect=[ requests_db.DatabaseError(exp_err), requests_db.DatabaseError(exp_err) ]) mock_ssm_get_parameter(2) try: copy_files_to_archive.handler(self.handler_input_event, None) except copy_files_to_archive.CopyRequestError as err: exp_result = [{ 'success': False, 'source_bucket': 'my-dr-fake-glacier-bucket', 'source_key': self.exp_file_key1 }] exp_err = f"File copy failed. {exp_result}" self.assertEqual(exp_err, str(err))
def test_handler_one_file_success(self): """ Test copy lambda with one file, expecting successful result. """ del os.environ['COPY_RETRY_SLEEP_SECS'] del os.environ['COPY_RETRIES'] boto3.client = Mock() s3_cli = boto3.client('s3') s3_cli.copy_object = Mock(side_effect=[None]) exp_upd_result = [] exp_request_ids = [REQUEST_ID7] _, exp_result = create_select_requests(exp_request_ids) database.single_query = Mock(side_effect=[exp_result, exp_upd_result]) mock_ssm_get_parameter(2) result = copy_files_to_archive.handler(self.handler_input_event, None) os.environ['COPY_RETRIES'] = '2' os.environ['COPY_RETRY_SLEEP_SECS'] = '1' boto3.client.assert_called_with('ssm') s3_cli.copy_object.assert_called_with(Bucket=self.exp_target_bucket, CopySource={ 'Bucket': self.exp_src_bucket, 'Key': self.exp_file_key1 }, Key=self.exp_file_key1) exp_result = [{ "success": True, "source_bucket": self.exp_src_bucket, "source_key": self.exp_file_key1, "request_id": REQUEST_ID7, "target_bucket": self.exp_target_bucket, "err_msg": "" }] self.assertEqual(exp_result, result) database.single_query.assert_called()
def test_delete_all_requests(self): """ Tests deleting all requests from the request_status table """ exp_request_ids = [REQUEST_ID1, REQUEST_ID2, REQUEST_ID3, REQUEST_ID4, REQUEST_ID5, REQUEST_ID6, REQUEST_ID7, REQUEST_ID8, REQUEST_ID9, REQUEST_ID10, REQUEST_ID11] try: create_select_requests(exp_request_ids) empty_result = [] mock_ssm_get_parameter(1) database.single_query = Mock( side_effect=[empty_result]) result = requests_db.delete_all_requests() database.single_query.assert_called() self.assertEqual(empty_result, result) except requests_db.DatabaseError as err: self.fail(f"delete_all_requests. {str(err)}")
def test_get_jobs_by_object_key(self): """ Tests reading by object_key """ mock_ssm_get_parameter(1) exp_request_ids = [REQUEST_ID1, REQUEST_ID2, REQUEST_ID3] _, exp_result_1 = create_select_requests(exp_request_ids) object_key = " " expected = result_to_json(exp_result_1) database.single_query = Mock(side_effect=[exp_result_1]) result = requests_db.get_jobs_by_object_key(object_key) self.assertEqual(expected, result) database.single_query.assert_called_once()
def test_task_clear(self): """ Test clearing the request_status table. """ handler_input_event = {} handler_input_event["function"] = "clear" exp_result = [] exp_request_ids = [ REQUEST_ID1, REQUEST_ID2, REQUEST_ID3, REQUEST_ID4, REQUEST_ID5, REQUEST_ID6, REQUEST_ID7, REQUEST_ID8, REQUEST_ID9, REQUEST_ID10, REQUEST_ID11 ] self.mock_ssm_get_parameter(2) try: create_select_requests(exp_request_ids) empty_result = [] database.single_query = Mock( side_effect=[empty_result, empty_result]) result = request_status.task(handler_input_event, None) self.assertEqual(exp_result, result) except requests_db.NotFound as err: self.assertEqual("No granules found", str(err))
def test_handler_two_records_success(self): """ Test copy lambda with two files, expecting successful result. """ os.environ['DEVELOP_TESTS'] = "False" exp_file_key = 'dr-glacier/MOD09GQ.A0219114.N5aUCG.006.0656338553321.hdf' boto3.client = Mock() s3_cli = boto3.client('s3') s3_cli.copy_object = Mock(side_effect=[None, None]) exp_upd_result = [] exp_request_ids = [REQUEST_ID4, REQUEST_ID7] _, exp_result = create_select_requests(exp_request_ids) database.single_query = Mock(side_effect=[ exp_result, exp_upd_result, exp_result, exp_upd_result ]) mock_ssm_get_parameter(4) exp_rec_2 = create_copy_event2() self.handler_input_event["Records"].append(exp_rec_2) result = copy_files_to_archive.handler(self.handler_input_event, None) boto3.client.assert_called_with('ssm') exp_result = [{ "success": True, "source_bucket": self.exp_src_bucket, "source_key": self.exp_file_key1, "request_id": REQUEST_ID7, "target_bucket": self.exp_target_bucket, "err_msg": "" }, { "success": True, "source_bucket": self.exp_src_bucket, "source_key": exp_file_key, "request_id": REQUEST_ID7, "target_bucket": self.exp_target_bucket, "err_msg": "" }] self.assertEqual(exp_result, result) s3_cli.copy_object.assert_any_call(Bucket=self.exp_target_bucket, CopySource={ 'Bucket': self.exp_src_bucket, 'Key': self.exp_file_key1 }, Key=self.exp_file_key1) s3_cli.copy_object.assert_any_call(Bucket=self.exp_target_bucket, CopySource={ 'Bucket': self.exp_src_bucket, 'Key': exp_file_key }, Key=exp_file_key)
def test_handler_one_file_fail_3x(self): """ Test copy lambda with one failed copy after 3 retries. """ boto3.client = Mock() s3_cli = boto3.client('s3') s3_cli.copy_object = Mock(side_effect=[ ClientError({'Error': { 'Code': 'AccessDenied' }}, 'copy_object'), ClientError({'Error': { 'Code': 'AccessDenied' }}, 'copy_object'), ClientError({'Error': { 'Code': 'AccessDenied' }}, 'copy_object') ]) s3_cli.head_object = Mock() exp_error = "File copy failed. [{'success': False, " \ f"'source_bucket': '{self.exp_src_bucket}', " \ f"'source_key': '{self.exp_file_key1}', " \ f"'request_id': '{REQUEST_ID7}', " \ f"'target_bucket': '{self.exp_target_bucket}', " \ "'err_msg': 'An error occurred (AccessDenied) when calling " \ "the copy_object operation: Unknown'}]" exp_upd_result = [] exp_request_ids = [REQUEST_ID7, REQUEST_ID4] _, exp_result = create_select_requests(exp_request_ids) database.single_query = Mock(side_effect=[ exp_result, exp_result, exp_upd_result, exp_result, exp_upd_result ]) mock_ssm_get_parameter(5) try: copy_files_to_archive.handler(self.handler_input_event, None) self.fail("expected CopyRequestError") except copy_files_to_archive.CopyRequestError as ex: self.assertEqual(exp_error, str(ex)) boto3.client.assert_called_with('ssm') s3_cli.copy_object.assert_called_with(Bucket=self.exp_target_bucket, CopySource={ 'Bucket': self.exp_src_bucket, 'Key': self.exp_file_key1 }, Key=self.exp_file_key1) database.single_query.assert_called()
def test_get_jobs_by_status_max_days(self): """ Tests reading by status for limited days """ exp_request_ids = [REQUEST_ID1, REQUEST_ID2, REQUEST_ID3] _, exp_result = create_select_requests(exp_request_ids) status = "noexist" mock_ssm_get_parameter(2) database.single_query = Mock(side_effect=[[], exp_result]) result = requests_db.get_jobs_by_status(status) self.assertEqual([], result) database.single_query.assert_called_once() status = "complete" expected = result_to_json(exp_result) result = requests_db.get_jobs_by_status(status, 5) self.assertEqual(expected, result) database.single_query.assert_called()
def test_task_query_object_key(self): """ Test query by object_key. """ handler_input_event = {} object_key = "objectkey_2" handler_input_event["object_key"] = object_key handler_input_event["function"] = "query" exp_request_ids = [REQUEST_ID4, REQUEST_ID7] _, exp_result = create_select_requests(exp_request_ids) expected = result_to_json(exp_result) database.single_query = Mock(side_effect=[exp_result]) self.mock_ssm_get_parameter(1) try: result = request_status.task(handler_input_event, None) self.assertEqual(expected, result) except requests_db.NotFound as err: self.assertEqual(f"Unknown object_key: {object_key}", str(err))
def test_task_query_request_id(self): """ Test query by request_id. """ handler_input_event = {} request_id = 1 handler_input_event["request_id"] = request_id handler_input_event["function"] = "query" exp_request_ids = [REQUEST_ID1] _, exp_result = create_select_requests(exp_request_ids) expected = result_to_json(exp_result) database.single_query = Mock(side_effect=[exp_result]) self.mock_ssm_get_parameter(1) try: result = request_status.task(handler_input_event, None) self.assertEqual(expected, result) except requests_db.NotFound as err: self.assertEqual(f"Unknown request_id: {request_id}", str(err))
def test_handler_one_file_retry2_success(self): """ Test copy lambda with two failed copy attempts, third attempt successful. """ del os.environ['COPY_RETRY_SLEEP_SECS'] del os.environ['COPY_RETRIES'] time.sleep(1) boto3.client = Mock() s3_cli = boto3.client('s3') s3_cli.copy_object = Mock(side_effect=[ ClientError({'Error': { 'Code': 'AccessDenied' }}, 'copy_object'), None ]) exp_request_ids = [REQUEST_ID7, REQUEST_ID4] _, exp_result = create_select_requests(exp_request_ids) exp_upd_result = [] database.single_query = Mock(side_effect=[ exp_result, exp_upd_result, exp_result, exp_upd_result ]) mock_ssm_get_parameter(4) result = copy_files_to_archive.handler(self.handler_input_event, None) os.environ['COPY_RETRIES'] = '2' os.environ['COPY_RETRY_SLEEP_SECS'] = '1' boto3.client.assert_called_with('ssm') exp_result = [{ "success": True, "source_bucket": self.exp_src_bucket, "source_key": self.exp_file_key1, "request_id": REQUEST_ID7, "target_bucket": self.exp_target_bucket, "err_msg": "" }] self.assertEqual(exp_result, result) s3_cli.copy_object.assert_called_with(Bucket=self.exp_target_bucket, CopySource={ 'Bucket': self.exp_src_bucket, 'Key': self.exp_file_key1 }, Key=self.exp_file_key1) database.single_query.assert_called()
def test_task_query_all(self): """ Test query all. """ exp_request_ids = [ REQUEST_ID1, REQUEST_ID2, REQUEST_ID3, REQUEST_ID4, REQUEST_ID5, REQUEST_ID6, REQUEST_ID7, REQUEST_ID8, REQUEST_ID9, REQUEST_ID10, REQUEST_ID11 ] qresult, exp_result = create_select_requests(exp_request_ids) handler_input_event = {} handler_input_event["function"] = "query" expected = result_to_json(exp_result) database.single_query = Mock(side_effect=[qresult]) self.mock_ssm_get_parameter(1) try: result = request_status.task(handler_input_event, None) self.assertEqual(expected, result) database.single_query.assert_called() except requests_db.NotFound as err: self.fail(str(err))
def test_update_request_status_error(self): """ Tests updating a job to an 'error' status """ _, exp_result = create_select_requests([REQUEST_ID4]) utc_now_exp = "2019-07-31 19:21:38.263364+00:00" requests_db.get_utc_now_iso = Mock(return_value=utc_now_exp) granule_id = "granule_4" job_status = "error" err_msg = "Error message goes here" empty_result = [] database.single_query = Mock(side_effect=[empty_result, exp_result]) mock_ssm_get_parameter(2) try: result = requests_db.update_request_status_for_job(REQUEST_ID4, job_status, err_msg) self.assertEqual([], result) database.single_query.assert_called_once() except requests_db.DatabaseError as err: self.fail(f"update_request_status. {str(err)}") result = requests_db.get_jobs_by_granule_id(granule_id) self.assertEqual(err_msg, result[0]["err_msg"])