def test_single_mnp_job(self, args, expected, capsys, boto3_stubber, test_datadir, shared_datadir): response_parent = json.loads( read_text(shared_datadir / "aws_api_responses/batch_describe-jobs_single_mnp_job.json") ) response_children = json.loads( read_text(shared_datadir / "aws_api_responses/batch_describe-jobs_single_mnp_job_children.json") ) boto3_stubber( "batch", [ MockedBoto3Request( method="describe_jobs", response=response_parent, expected_params={"jobs": ["6abf3ecd-07a8-4faa-8a65-79e7404eb50f"]}, ), MockedBoto3Request( method="describe_jobs", response=response_children, expected_params={ "jobs": ["6abf3ecd-07a8-4faa-8a65-79e7404eb50f#0", "6abf3ecd-07a8-4faa-8a65-79e7404eb50f#1"] }, ), ], ) awsbstat.main(["-c", "cluster"] + args) assert capsys.readouterr().out == read_text(test_datadir / expected)
def test_single_array_job(self, args, expected, capsys, boto3_stubber, test_datadir, shared_datadir): response_parent = json.loads( read_text(shared_datadir / "aws_api_responses/batch_describe-jobs_single_array_job.json") ) response_children = json.loads( read_text(shared_datadir / "aws_api_responses/batch_describe-jobs_single_array_job_children.json") ) boto3_stubber( "batch", [ MockedBoto3Request( method="describe_jobs", response=response_parent, expected_params={"jobs": ["3286a19c-68a9-47c9-8000-427d23ffc7ca"]}, ), MockedBoto3Request( method="describe_jobs", response=response_children, expected_params={ "jobs": ["3286a19c-68a9-47c9-8000-427d23ffc7ca:0", "3286a19c-68a9-47c9-8000-427d23ffc7ca:1"] }, ), ], ) awsbstat.main(["-c", "cluster"] + args) assert capsys.readouterr().out == read_text(test_datadir / expected)
def test_default_ordering_by_id(self, args, expected, capsys, boto3_stubber, test_datadir, shared_datadir): parent_jobs_response = {"jobs": []} for file in [ "batch_describe-jobs_single_mnp_job.json", "batch_describe-jobs_single_array_job.json", "batch_describe-jobs_single_job.json", ]: parent_jobs_response["jobs"].extend( json.loads(read_text(shared_datadir / "aws_api_responses/{0}".format(file)))["jobs"] ) children_jobs_response = {"jobs": []} for file in [ "batch_describe-jobs_single_mnp_job_children.json", "batch_describe-jobs_single_array_job_children.json", ]: children_jobs_response["jobs"].extend( json.loads(read_text(shared_datadir / "aws_api_responses/{0}".format(file)))["jobs"] ) boto3_stubber( "batch", [ MockedBoto3Request( method="describe_jobs", response=parent_jobs_response, expected_params={ "jobs": [ "3286a19c-68a9-47c9-8000-427d23ffc7ca", "ab2cd019-1d84-43c7-a016-9772dd963f3b", "6abf3ecd-07a8-4faa-8a65-79e7404eb50f", ] }, ), MockedBoto3Request( method="describe_jobs", response=children_jobs_response, expected_params={ "jobs": [ "6abf3ecd-07a8-4faa-8a65-79e7404eb50f#0", "6abf3ecd-07a8-4faa-8a65-79e7404eb50f#1", "3286a19c-68a9-47c9-8000-427d23ffc7ca:0", "3286a19c-68a9-47c9-8000-427d23ffc7ca:1", ] }, ), ], ) awsbstat.main(["-c", "cluster"] + args) assert capsys.readouterr().out == read_text(test_datadir / expected)
def test_all_status_detailed(self, capsys, boto3_stubber, test_datadir, shared_datadir): mocked_requests = [] jobs_ids = [] describe_jobs_response = {"jobs": []} for status in ALL_JOB_STATUS: list_jobs_response = json.loads( read_text(shared_datadir / "aws_api_responses/batch_list-jobs_{0}.json".format(status)) ) describe_jobs_response["jobs"].extend( json.loads(read_text(shared_datadir / "aws_api_responses/batch_describe-jobs_{0}.json".format(status)))[ "jobs" ] ) jobs_ids.extend([job["jobId"] for job in list_jobs_response["jobSummaryList"]]) mocked_requests.append( MockedBoto3Request( method="list_jobs", response=list_jobs_response, expected_params={ "jobQueue": DEFAULT_AWSBATCHCLICONFIG_MOCK_CONFIG["job_queue"], "jobStatus": status, "nextToken": "", }, ) ) mocked_requests.append( MockedBoto3Request( method="describe_jobs", response=describe_jobs_response, expected_params={"jobs": jobs_ids} ) ) boto3_stubber("batch", mocked_requests) awsbstat.main(["-c", "cluster", "-s", "ALL", "-d"]) # describe-jobs api validation made by the Stubber requires startedAt to be always present. # Removing it from output when value is default (1970-01-01T00:00:00+00:00) since this is the # behavior for not stubbed calls. output = capsys.readouterr().out.replace("1970-01-01T00:00:00+00:00", "-") expcted_jobs_count_by_status = { "SUBMITTED": 2, "PENDING": 1, "RUNNABLE": 2, "STARTING": 2, "RUNNING": 2, "SUCCEEDED": 3, "FAILED": 3, } for status, count in expcted_jobs_count_by_status.items(): assert output.count(status) == count assert output.count("jobId") == 15 assert output == read_text(test_datadir / "expected_output.txt")
def test_single_job_detailed(self, capsys, boto3_stubber, test_datadir, shared_datadir): response = json.loads(read_text(shared_datadir / "aws_api_responses/batch_describe-jobs_single_job.json")) boto3_stubber( "batch", MockedBoto3Request( method="describe_jobs", response=response, expected_params={"jobs": ["ab2cd019-1d84-43c7-a016-9772dd963f3b"]}, ), ) awsbstat.main(["-c", "cluster", "ab2cd019-1d84-43c7-a016-9772dd963f3b"]) assert capsys.readouterr().out == read_text(test_datadir / "expected_output.txt")
def test_xgmunpack(tmpdir, capsys): """xgmunpack -d tmpdir/actual_output *.XGM""" datapkg = f"{testdatapkg_parent}.unpack" contents2tmpdir = make_contents2destdir(datapkg, tmpdir) # 1. prepare data files & paths contents2tmpdir(recursive=True) input_wildcard_arg = tmpdir.join("*.XGM") output_dir_arg = tmpdir.join("actual_output") # 2. run the actual xgmunpack command args = f'-v -d "{output_dir_arg!s}" "{input_wildcard_arg}"' run_xgmunpack(shlex_split(args)) # 3. check verbose stdout stdout_line1, stdout_rest = capsys.readouterr().out.split(sep="\n", maxsplit=1) assert stdout_line1.startswith("unpacking ") assert ( stdout_rest == """\ -> 'A.IMX' -> 'B.IMX' -> 'C.XG' -> 'D.XG' -> 'E.XG' """ ) # 4. prepare paths for file & dir comparison actual_output_dir = tmpdir.join("actual_output", "unpack_XGM") expected_ouput_dir = tmpdir.join("expected_output") toml_actual_output_path = actual_output_dir.join("unpack.XGM.toml") toml_expected_output_path = expected_ouput_dir.join("unpack.XGM.toml") # 5. check that the expected and actual output files are identical cmpdirs = filecmp.dircmp( actual_output_dir, expected_ouput_dir, ignore=["unpack.XGM.toml"] ) match, mismatch, errors = filecmp.cmpfiles( actual_output_dir, expected_ouput_dir, cmpdirs.common, shallow=False ) assert not mismatch assert not errors # In the case of toml files, compares just values, not comments and such toml_actual_output = tomlkit.parse(read_text(toml_actual_output_path)) toml_expected_output = tomlkit.parse(read_text(toml_expected_output_path)) # before comparing, remove the guide/help section if it exists toml_actual_output.pop("Item Help/Guide", None) toml_expected_output.pop("Item Help/Guide", None) assert toml_actual_output == toml_expected_output
def test_succeeded_status(self, capsys, boto3_stubber, test_datadir, shared_datadir): response = json.loads(read_text(shared_datadir / "aws_api_responses/batch_list-jobs_SUCCEEDED.json")) boto3_stubber( "batch", MockedBoto3Request( method="list_jobs", response=response, expected_params={ "jobQueue": DEFAULT_AWSBATCHCLICONFIG_MOCK_CONFIG["job_queue"], "jobStatus": "SUCCEEDED", "nextToken": "", }, ), ) awsbstat.main(["-c", "cluster", "-s", "SUCCEEDED"]) assert capsys.readouterr().out == read_text(test_datadir / "expected_output.txt")
def test_get_compute_nodes_info(pbsnodes_mocked_response, expected_output, mocker, test_datadir): pbsnodes_output = read_text(test_datadir / pbsnodes_mocked_response) mock = mocker.patch( "common.schedulers.torque_commands.check_command_output", return_value=pbsnodes_output, autospec=True ) nodes = get_compute_nodes_info(hostname_filter=["host1"]) mock.assert_called_with("/opt/torque/bin/pbsnodes -x host1", raise_on_error=False) assert_that(nodes).is_equal_to(expected_output)
def test_get_compute_nodes_info(qstat_mocked_response, expected_output, test_datadir, mocker): qstat_output = read_text(test_datadir / qstat_mocked_response) mock = mocker.patch( "common.schedulers.sge_commands.check_sge_command_output", return_value=qstat_output, autospec=True ) nodes = get_compute_nodes_info() mock.assert_called_with("qstat -xml -g dt -u '*' -f") assert_that(nodes).is_equal_to(expected_output)
def test_children_by_ids(self, args, expected, capsys, boto3_stubber, test_datadir, shared_datadir): boto3_stubber( "batch", MockedBoto3Request( method="describe_jobs", response=json.loads( read_text(shared_datadir / "aws_api_responses/batch_describe-jobs_children_jobs.json") ), expected_params={ "jobs": [ "3286a19c-68a9-47c9-8000-427d23ffc7ca:0", "ab2cd019-1d84-43c7-a016-9772dd963f3b", "6abf3ecd-07a8-4faa-8a65-79e7404eb50f#1", ] }, ), ) awsbstat.main(["-c", "cluster"] + args) assert capsys.readouterr().out == read_text(test_datadir / expected)
def test_get_jobs_info( qstat_mocked_response, filter_by_states, filter_by_exec_hosts, expected_output, mocker, test_datadir ): qstat_output = read_text(test_datadir / qstat_mocked_response) mock = mocker.patch( "common.schedulers.torque_commands.check_command_output", return_value=qstat_output, autospec=True ) jobs = get_jobs_info(filter_by_states, filter_by_exec_hosts) mock.assert_called_with("/opt/torque/bin/qstat -t -x") assert_that(jobs).is_equal_to(expected_output)
def test_all_status(self, capsys, boto3_stubber, test_datadir, shared_datadir): mocked_requests = [] for status in ALL_JOB_STATUS: response = json.loads( read_text(shared_datadir / "aws_api_responses/batch_list-jobs_{0}.json".format(status)) ) mocked_requests.append( MockedBoto3Request( method="list_jobs", response=response, expected_params={ "jobQueue": DEFAULT_AWSBATCHCLICONFIG_MOCK_CONFIG["job_queue"], "jobStatus": status, "nextToken": "", }, ) ) boto3_stubber("batch", mocked_requests) awsbstat.main(["-c", "cluster", "-s", "ALL"]) assert capsys.readouterr().out == read_text(test_datadir / "expected_output.txt")
def test_get_jobs_info(squeue_mocked_response, expected_output, test_datadir, mocker): qstat_output = read_text(test_datadir / squeue_mocked_response) mock = mocker.patch( "common.schedulers.slurm_commands.check_command_output", return_value=qstat_output, autospec=True) jobs = get_jobs_info(job_state_filter="PD,R") mock.assert_called_with( "/opt/slurm/bin/squeue -r -o '%i|%t|%D|%C|%c|%r' --states PD,R") assert_that(jobs).is_equal_to(expected_output)
def test_get_jobs_info(squeue_mocked_response, expected_output, test_datadir, mocker): qstat_output = read_text(test_datadir / squeue_mocked_response) mock = mocker.patch( "common.schedulers.slurm_commands.check_command_output", return_value=qstat_output, autospec=True) jobs = get_jobs_info(job_state_filter="PD,R") mock.assert_called_with( "/opt/slurm/bin/squeue -r -O 'jobid:200,statecompact:200,numnodes:200,numcpus:200,numtasks:200," "cpus-per-task:200,mincpus:200,reason:200,tres-per-job:200,tres-per-task:200,tres-per-node:200," "cpus-per-tres:200' --states PD,R") assert_that(jobs).is_equal_to(expected_output)
def test_no_jobs_all_status(self, capsys, boto3_stubber, test_datadir): empty_response = {"jobSummaryList": []} mocked_requests = [] for status in ALL_JOB_STATUS: mocked_requests.append( MockedBoto3Request( method="list_jobs", response=empty_response, expected_params={ "jobQueue": DEFAULT_AWSBATCHCLICONFIG_MOCK_CONFIG["job_queue"], "jobStatus": status, "nextToken": "", }, ) ) boto3_stubber("batch", mocked_requests) awsbstat.main(["-c", "cluster", "-s", "ALL"]) assert capsys.readouterr().out == read_text(test_datadir / "expected_output.txt")
def test_expanded_children(self, capsys, boto3_stubber, test_datadir, shared_datadir): mocked_requests = [] # Mock all list-jobs requests for status in ALL_JOB_STATUS: list_jobs_response = json.loads( read_text(shared_datadir / "aws_api_responses/batch_list-jobs_{0}.json".format(status)) ) mocked_requests.append( MockedBoto3Request( method="list_jobs", response=list_jobs_response, expected_params={ "jobQueue": DEFAULT_AWSBATCHCLICONFIG_MOCK_CONFIG["job_queue"], "jobStatus": status, "nextToken": "", }, ) ) # Mock describe-jobs on parents describe_parent_jobs_response = json.loads( read_text(shared_datadir / "aws_api_responses/batch_describe-jobs_ALL_parents.json") ) jobs_with_children_ids = [] for job in describe_parent_jobs_response["jobs"]: jobs_with_children_ids.append(job["jobId"]) mocked_requests.append( MockedBoto3Request( method="describe_jobs", response=describe_parent_jobs_response, expected_params={"jobs": jobs_with_children_ids}, ) ) # Mock describe-jobs on children describe_children_jobs_response = json.loads( read_text(shared_datadir / "aws_api_responses/batch_describe-jobs_ALL_children.json") ) mocked_requests.append( MockedBoto3Request( method="describe_jobs", response=describe_children_jobs_response, expected_params={ "jobs": [ "3c6ee190-9121-464e-a0ac-62e4084e6bf1#0", "3c6ee190-9121-464e-a0ac-62e4084e6bf1#1", "11aa9096-1e98-4a7c-a44b-5ac3442df177:0", "11aa9096-1e98-4a7c-a44b-5ac3442df177:1", "77712b12-71eb-4007-a865-85f05de13a71#0", "77712b12-71eb-4007-a865-85f05de13a71#1", "bbbbbcbc-2647-4d8b-a1ef-da65bffe0dd0#0", "bbbbbcbc-2647-4d8b-a1ef-da65bffe0dd0#1", "qwerfcbc-2647-4d8b-a1ef-da65bffe0dd0#0", "qwerfcbc-2647-4d8b-a1ef-da65bffe0dd0#1", "3286a19c-68a9-47c9-8000-427d23ffc7ca:0", "3286a19c-68a9-47c9-8000-427d23ffc7ca:1", "3ec00225-8b85-48ba-a321-f61d005bec46#0", "3ec00225-8b85-48ba-a321-f61d005bec46#1", "44db07a9-f8a2-48d9-8d67-dcb04ceca54c:0", "44db07a9-f8a2-48d9-8d67-dcb04ceca54c:1", "7a712b12-71eb-4007-a865-85f05de13a71#0", "7a712b12-71eb-4007-a865-85f05de13a71#1", ] }, ) ) boto3_stubber("batch", mocked_requests) awsbstat.main(["-c", "cluster", "-s", "ALL", "-e"]) # describe-jobs api validation made by the Stubber requires startedAt to be always present. # Removing it from output when value is default (1970-01-01T00:00:00+00:00) since this is the # behavior for not stubbed calls. output = capsys.readouterr().out.replace("1970-01-01T00:00:00+00:00", "- ") assert output == read_text(test_datadir / "expected_output.txt")
def test_sge_host_parsing(sge_host_xml, expected_output, test_datadir): host_xml = read_text(test_datadir / sge_host_xml) sge_host = SgeHost.from_xml(host_xml) assert_that(sge_host).is_equal_to(expected_output)
def test_sge_job_parsing(sge_job_xml, expected_output, test_datadir): job_xml = read_text(test_datadir / sge_job_xml) sge_job = SgeJob.from_xml(job_xml) assert_that(sge_job).is_equal_to(expected_output)