def test_single_mnp_job(self, args, expected, capsys, boto3_stubber, test_datadir, shared_datadir): response_parent = json.loads( read_text(shared_datadir / "aws_api_responses/batch_describe-jobs_single_mnp_job.json") ) response_children = json.loads( read_text(shared_datadir / "aws_api_responses/batch_describe-jobs_single_mnp_job_children.json") ) boto3_stubber( "batch", [ MockedBoto3Request( method="describe_jobs", response=response_parent, expected_params={"jobs": ["6abf3ecd-07a8-4faa-8a65-79e7404eb50f"]}, ), MockedBoto3Request( method="describe_jobs", response=response_children, expected_params={ "jobs": ["6abf3ecd-07a8-4faa-8a65-79e7404eb50f#0", "6abf3ecd-07a8-4faa-8a65-79e7404eb50f#1"] }, ), ], ) awsbstat.main(["-c", "cluster"] + args) assert capsys.readouterr().out == read_text(test_datadir / expected)
def test_single_array_job(self, args, expected, capsys, boto3_stubber, test_datadir, shared_datadir): response_parent = json.loads( read_text(shared_datadir / "aws_api_responses/batch_describe-jobs_single_array_job.json") ) response_children = json.loads( read_text(shared_datadir / "aws_api_responses/batch_describe-jobs_single_array_job_children.json") ) boto3_stubber( "batch", [ MockedBoto3Request( method="describe_jobs", response=response_parent, expected_params={"jobs": ["3286a19c-68a9-47c9-8000-427d23ffc7ca"]}, ), MockedBoto3Request( method="describe_jobs", response=response_children, expected_params={ "jobs": ["3286a19c-68a9-47c9-8000-427d23ffc7ca:0", "3286a19c-68a9-47c9-8000-427d23ffc7ca:1"] }, ), ], ) awsbstat.main(["-c", "cluster"] + args) assert capsys.readouterr().out == read_text(test_datadir / expected)
def test_default_ordering_by_id(self, args, expected, capsys, boto3_stubber, test_datadir, shared_datadir): parent_jobs_response = {"jobs": []} for file in [ "batch_describe-jobs_single_mnp_job.json", "batch_describe-jobs_single_array_job.json", "batch_describe-jobs_single_job.json", ]: parent_jobs_response["jobs"].extend( json.loads(read_text(shared_datadir / "aws_api_responses/{0}".format(file)))["jobs"] ) children_jobs_response = {"jobs": []} for file in [ "batch_describe-jobs_single_mnp_job_children.json", "batch_describe-jobs_single_array_job_children.json", ]: children_jobs_response["jobs"].extend( json.loads(read_text(shared_datadir / "aws_api_responses/{0}".format(file)))["jobs"] ) boto3_stubber( "batch", [ MockedBoto3Request( method="describe_jobs", response=parent_jobs_response, expected_params={ "jobs": [ "3286a19c-68a9-47c9-8000-427d23ffc7ca", "ab2cd019-1d84-43c7-a016-9772dd963f3b", "6abf3ecd-07a8-4faa-8a65-79e7404eb50f", ] }, ), MockedBoto3Request( method="describe_jobs", response=children_jobs_response, expected_params={ "jobs": [ "6abf3ecd-07a8-4faa-8a65-79e7404eb50f#0", "6abf3ecd-07a8-4faa-8a65-79e7404eb50f#1", "3286a19c-68a9-47c9-8000-427d23ffc7ca:0", "3286a19c-68a9-47c9-8000-427d23ffc7ca:1", ] }, ), ], ) awsbstat.main(["-c", "cluster"] + args) assert capsys.readouterr().out == read_text(test_datadir / expected)
def test_all_status_detailed(self, capsys, boto3_stubber, test_datadir, shared_datadir): mocked_requests = [] jobs_ids = [] describe_jobs_response = {"jobs": []} for status in ALL_JOB_STATUS: list_jobs_response = json.loads( read_text(shared_datadir / "aws_api_responses/batch_list-jobs_{0}.json".format(status)) ) describe_jobs_response["jobs"].extend( json.loads(read_text(shared_datadir / "aws_api_responses/batch_describe-jobs_{0}.json".format(status)))[ "jobs" ] ) jobs_ids.extend([job["jobId"] for job in list_jobs_response["jobSummaryList"]]) mocked_requests.append( MockedBoto3Request( method="list_jobs", response=list_jobs_response, expected_params={ "jobQueue": DEFAULT_AWSBATCHCLICONFIG_MOCK_CONFIG["job_queue"], "jobStatus": status, "nextToken": "", }, ) ) mocked_requests.append( MockedBoto3Request( method="describe_jobs", response=describe_jobs_response, expected_params={"jobs": jobs_ids} ) ) boto3_stubber("batch", mocked_requests) awsbstat.main(["-c", "cluster", "-s", "ALL", "-d"]) # describe-jobs api validation made by the Stubber requires startedAt to be always present. # Removing it from output when value is default (1970-01-01T00:00:00+00:00) since this is the # behavior for not stubbed calls. output = capsys.readouterr().out.replace("1970-01-01T00:00:00+00:00", "-") expcted_jobs_count_by_status = { "SUBMITTED": 2, "PENDING": 1, "RUNNABLE": 2, "STARTING": 2, "RUNNING": 2, "SUCCEEDED": 3, "FAILED": 3, } for status, count in expcted_jobs_count_by_status.items(): assert output.count(status) == count assert output.count("jobId") == 15 assert output == read_text(test_datadir / "expected_output.txt")
def test_single_job_detailed(self, capsys, boto3_stubber, test_datadir, shared_datadir): response = json.loads(read_text(shared_datadir / "aws_api_responses/batch_describe-jobs_single_job.json")) boto3_stubber( "batch", MockedBoto3Request( method="describe_jobs", response=response, expected_params={"jobs": ["ab2cd019-1d84-43c7-a016-9772dd963f3b"]}, ), ) awsbstat.main(["-c", "cluster", "ab2cd019-1d84-43c7-a016-9772dd963f3b"]) assert capsys.readouterr().out == read_text(test_datadir / "expected_output.txt")
def load(self, file=None): """ Loads content from a file. :param file: If given, the file that content should be loaded from. """ if file: # check if file is existing, if not do nothing if not os.path.isfile(file): return else: # no file specified, show file open dialog file = QtWidgets.QFileDialog.getOpenFileName( self, 'Open File', settings['path.current'], self.file_filter) file = file[0] # if the open file dialog was canceled, do nothing if not file: return # file exists, we should load it settings['path.current'] = os.path.dirname(file) content = common.read_text(file) self.file = file self.tooltip_changer(self.file) self.read_content = content # replace tabs if desired and set as content if settings['options.edit.tabs.replace']: content = content.replace( '\t', ' ' * settings['options.edit.tabs.replacement_spaces']) self.setPlainText(content)
def test_succeeded_status(self, capsys, boto3_stubber, test_datadir, shared_datadir): response = json.loads(read_text(shared_datadir / "aws_api_responses/batch_list-jobs_SUCCEEDED.json")) boto3_stubber( "batch", MockedBoto3Request( method="list_jobs", response=response, expected_params={ "jobQueue": DEFAULT_AWSBATCHCLICONFIG_MOCK_CONFIG["job_queue"], "jobStatus": "SUCCEEDED", "nextToken": "", }, ), ) awsbstat.main(["-c", "cluster", "-s", "SUCCEEDED"]) assert capsys.readouterr().out == read_text(test_datadir / "expected_output.txt")
def test_children_by_ids(self, args, expected, capsys, boto3_stubber, test_datadir, shared_datadir): boto3_stubber( "batch", MockedBoto3Request( method="describe_jobs", response=json.loads( read_text(shared_datadir / "aws_api_responses/batch_describe-jobs_children_jobs.json") ), expected_params={ "jobs": [ "3286a19c-68a9-47c9-8000-427d23ffc7ca:0", "ab2cd019-1d84-43c7-a016-9772dd963f3b", "6abf3ecd-07a8-4faa-8a65-79e7404eb50f#1", ] }, ), ) awsbstat.main(["-c", "cluster"] + args) assert capsys.readouterr().out == read_text(test_datadir / expected)
def test_all_status(self, capsys, boto3_stubber, test_datadir, shared_datadir): mocked_requests = [] for status in ALL_JOB_STATUS: response = json.loads( read_text(shared_datadir / "aws_api_responses/batch_list-jobs_{0}.json".format(status)) ) mocked_requests.append( MockedBoto3Request( method="list_jobs", response=response, expected_params={ "jobQueue": DEFAULT_AWSBATCHCLICONFIG_MOCK_CONFIG["job_queue"], "jobStatus": status, "nextToken": "", }, ) ) boto3_stubber("batch", mocked_requests) awsbstat.main(["-c", "cluster", "-s", "ALL"]) assert capsys.readouterr().out == read_text(test_datadir / "expected_output.txt")
def test_no_jobs_all_status(self, capsys, boto3_stubber, test_datadir): empty_response = {"jobSummaryList": []} mocked_requests = [] for status in ALL_JOB_STATUS: mocked_requests.append( MockedBoto3Request( method="list_jobs", response=empty_response, expected_params={ "jobQueue": DEFAULT_AWSBATCHCLICONFIG_MOCK_CONFIG["job_queue"], "jobStatus": status, "nextToken": "", }, ) ) boto3_stubber("batch", mocked_requests) awsbstat.main(["-c", "cluster", "-s", "ALL"]) assert capsys.readouterr().out == read_text(test_datadir / "expected_output.txt")
def test_expanded_children(self, capsys, boto3_stubber, test_datadir, shared_datadir): mocked_requests = [] # Mock all list-jobs requests for status in ALL_JOB_STATUS: list_jobs_response = json.loads( read_text(shared_datadir / "aws_api_responses/batch_list-jobs_{0}.json".format(status)) ) mocked_requests.append( MockedBoto3Request( method="list_jobs", response=list_jobs_response, expected_params={ "jobQueue": DEFAULT_AWSBATCHCLICONFIG_MOCK_CONFIG["job_queue"], "jobStatus": status, "nextToken": "", }, ) ) # Mock describe-jobs on parents describe_parent_jobs_response = json.loads( read_text(shared_datadir / "aws_api_responses/batch_describe-jobs_ALL_parents.json") ) jobs_with_children_ids = [] for job in describe_parent_jobs_response["jobs"]: jobs_with_children_ids.append(job["jobId"]) mocked_requests.append( MockedBoto3Request( method="describe_jobs", response=describe_parent_jobs_response, expected_params={"jobs": jobs_with_children_ids}, ) ) # Mock describe-jobs on children describe_children_jobs_response = json.loads( read_text(shared_datadir / "aws_api_responses/batch_describe-jobs_ALL_children.json") ) mocked_requests.append( MockedBoto3Request( method="describe_jobs", response=describe_children_jobs_response, expected_params={ "jobs": [ "3c6ee190-9121-464e-a0ac-62e4084e6bf1#0", "3c6ee190-9121-464e-a0ac-62e4084e6bf1#1", "11aa9096-1e98-4a7c-a44b-5ac3442df177:0", "11aa9096-1e98-4a7c-a44b-5ac3442df177:1", "77712b12-71eb-4007-a865-85f05de13a71#0", "77712b12-71eb-4007-a865-85f05de13a71#1", "bbbbbcbc-2647-4d8b-a1ef-da65bffe0dd0#0", "bbbbbcbc-2647-4d8b-a1ef-da65bffe0dd0#1", "qwerfcbc-2647-4d8b-a1ef-da65bffe0dd0#0", "qwerfcbc-2647-4d8b-a1ef-da65bffe0dd0#1", "3286a19c-68a9-47c9-8000-427d23ffc7ca:0", "3286a19c-68a9-47c9-8000-427d23ffc7ca:1", "3ec00225-8b85-48ba-a321-f61d005bec46#0", "3ec00225-8b85-48ba-a321-f61d005bec46#1", "44db07a9-f8a2-48d9-8d67-dcb04ceca54c:0", "44db07a9-f8a2-48d9-8d67-dcb04ceca54c:1", "7a712b12-71eb-4007-a865-85f05de13a71#0", "7a712b12-71eb-4007-a865-85f05de13a71#1", ] }, ) ) boto3_stubber("batch", mocked_requests) awsbstat.main(["-c", "cluster", "-s", "ALL", "-e"]) # describe-jobs api validation made by the Stubber requires startedAt to be always present. # Removing it from output when value is default (1970-01-01T00:00:00+00:00) since this is the # behavior for not stubbed calls. output = capsys.readouterr().out.replace("1970-01-01T00:00:00+00:00", "- ") assert output == read_text(test_datadir / "expected_output.txt")
""" Use sys.__excepthook__, the standard hook. """ sys.__excepthook__(type, value, traceback) if __name__ == '__main__': # fix PyQt5 eating exceptions (see http://stackoverflow.com/q/14493081/1536976) sys.excepthook = exception_hook # root path is file path root_path = os.path.dirname(__file__) # read readme file (for the help window) readme_text = common.read_text(os.path.join(root_path, 'README.md')) # read settings settings_file = os.path.join(root_path, 'settings.json') settings = default_settings try: text = common.read_text(settings_file) settings = json.loads(text) # we delete all keys in settings that are not in default_settings (cleanup obsolete keys) settings = {k: v for k, v in settings.items() if k in default_settings} except: pass # create app app = QtWidgets.QApplication([]) app.setWindowIcon(load_icon('app'))