def test_result(self): workflow = self.wf_examples.example_simple_exception2() self.wf_id = self.wf_ctrl.submit_workflow(workflow=workflow, name=self.__class__.__name__, queue='Cati_run4') # Transfer input files if file transfer if self.path_management == self.FILE_TRANSFER or \ self.path_management == self.SHARED_TRANSFER: Helper.transfer_input_files(self.wf_id, self.wf_ctrl) # Wait for the workflow to finish Helper.wait_workflow(self.wf_id, self.wf_ctrl) # Transfer output files if file transfer if self.path_management == self.FILE_TRANSFER or \ self.path_management == self.SHARED_TRANSFER: Helper.transfer_output_files(self.wf_id, self.wf_ctrl) status = self.wf_ctrl.workflow_status(self.wf_id) self.assertTrue( status == constants.WORKFLOW_DONE, "workflow status : %s. Expected : %s" % (status, constants.WORKFLOW_DONE)) nb_failed_jobs = len(Helper.list_failed_jobs(self.wf_id, self.wf_ctrl)) self.assertTrue( nb_failed_jobs == 1, "nb failed jobs : %i. Expected : %i" % (nb_failed_jobs, 1)) nb_failed_aborted_jobs = len( Helper.list_failed_jobs(self.wf_id, self.wf_ctrl, include_aborted_jobs=True)) self.assertTrue( nb_failed_aborted_jobs == 2, "nb failed jobs including aborted : %i. Expected : %i" % (nb_failed_aborted_jobs, 2)) (jobs_info, transfers_info, workflow_status, workflow_queue, tmp_files) = self.wf_ctrl.workflow_elements_status(self.wf_id) for (job_id, tmp_status, queue, exit_info, dates, drmaa_id) \ in jobs_info: job_list = self.wf_ctrl.jobs([job_id]) job_name, job_command, job_submission_date = job_list[job_id] self.tested_job = job_id if exit_info[0] == constants.FINISHED_REGULARLY: # To check job standard out and standard err job_stdout_file = tempfile.NamedTemporaryFile( prefix="job_soma_out_log_", suffix=repr(job_id), delete=False) job_stdout_file = job_stdout_file.name job_stderr_file = tempfile.NamedTemporaryFile( prefix="job_soma_outerr_log_", suffix=repr(job_id), delete=False) job_stderr_file = job_stderr_file.name try: self.wf_ctrl.retrieve_job_stdouterr( job_id, job_stdout_file, job_stderr_file) if job_name == 'job1': # Test stdout isSame, msg = identical_files( job_stdout_file, self.wf_examples.lo_stdout[1]) self.assertTrue(isSame, msg) # Test no stderr self.assertTrue( os.stat(job_stderr_file).st_size == 0, "job stderr not empty : cf %s" % job_stderr_file) # Test output files if self.path_management == self.LOCAL_PATH: isSame, msg = identical_files( self.wf_examples.lo_out_model_file[11], self.wf_examples.lo_file[11]) self.assertTrue(isSame, msg) isSame, msg = identical_files( self.wf_examples.lo_out_model_file[12], self.wf_examples.lo_file[12]) self.assertTrue(isSame, msg) if self.path_management == self.FILE_TRANSFER or \ self.path_management == self.SHARED_TRANSFER: isSame, msg = identical_files( self.wf_examples.lo_out_model_file[11], self.wf_examples.tr_file[11].client_path) self.assertTrue(isSame, msg) isSame, msg = identical_files( self.wf_examples.lo_out_model_file[12], self.wf_examples.tr_file[12].client_path) self.assertTrue(isSame, msg) if job_name == 'job2': # Test stdout isSame, msg = identical_files( job_stdout_file, self.wf_examples.lo_stdout[2]) self.assertTrue(isSame, msg) # Test no stderr self.assertTrue( os.stat(job_stderr_file).st_size == 0, "job stderr not empty : cf %s" % job_stderr_file) # Test output files if self.path_management == self.LOCAL_PATH: isSame, msg = identical_files( self.wf_examples.lo_out_model_file[2], self.wf_examples.lo_file[2]) self.assertTrue(isSame, msg) if self.path_management == self.FILE_TRANSFER or \ self.path_management == self.SHARED_TRANSFER: isSame, msg = identical_files( self.wf_examples.lo_out_model_file[2], self.wf_examples.tr_file[2].client_path) self.assertTrue(isSame, msg) if job_name == 'job3 with exception': # Test stdout isSame, msg = identical_files( job_stdout_file, self.wf_examples.lo_stdout_exception_model) self.assertTrue(isSame, msg) # Test the last line of stderr with open(job_stderr_file) as f: lines = f.readlines() expected_error = 'Exception: Paf Boum Boum Bada Boum !!!\n' isSame = (lines[-1] == expected_error) self.assertTrue( isSame, "Job exception : %s. Expected : %s" % (lines[-1], expected_error)) finally: os.unlink(job_stdout_file) os.unlink(job_stderr_file) del self.tested_job
def test_result(self): workflow = self.wf_examples.example_native_spec_pbs() self.wf_id = self.wf_ctrl.submit_workflow(workflow=workflow, name=self.__class__.__name__) # Transfer input files if file transfer if self.path_management == self.FILE_TRANSFER or self.path_management == self.SHARED_TRANSFER: Helper.transfer_input_files(self.wf_id, self.wf_ctrl) # Wait for the workflow to finish Helper.wait_workflow(self.wf_id, self.wf_ctrl) # Transfer output files if file transfer if self.path_management == self.FILE_TRANSFER or self.path_management == self.SHARED_TRANSFER: Helper.transfer_output_files(self.wf_id, self.wf_ctrl) status = self.wf_ctrl.workflow_status(self.wf_id) self.assertTrue( status == constants.WORKFLOW_DONE, "workflow status : %s. Expected : %s" % (status, constants.WORKFLOW_DONE) ) nb_failed_jobs = len(Helper.list_failed_jobs(self.wf_id, self.wf_ctrl)) self.assertTrue(nb_failed_jobs == 0, "nb failed jobs : %i. Expected : %i" % (nb_failed_jobs, 0)) nb_failed_aborted_jobs = len(Helper.list_failed_jobs(self.wf_id, self.wf_ctrl, include_aborted_jobs=True)) self.assertTrue( nb_failed_aborted_jobs == 0, "nb failed jobs including aborted : %i. Expected : %i" % (nb_failed_aborted_jobs, 0), ) (jobs_info, transfers_info, workflow_status, workflow_queue, tmp_files) = self.wf_ctrl.workflow_elements_status( self.wf_id ) for (job_id, tmp_status, queue, exit_info, dates) in jobs_info: job_list = self.wf_ctrl.jobs([job_id]) job_name, job_command, job_submission_date = job_list[job_id] self.tested_job = job_id if exit_info[0] == constants.FINISHED_REGULARLY: # To check job standard out and standard err job_stdout_file = tempfile.NamedTemporaryFile( prefix="job_soma_out_log_", suffix=repr(job_id), delete=False ) job_stdout_file = job_stdout_file.name job_stderr_file = tempfile.NamedTemporaryFile( prefix="job_soma_outerr_log_", suffix=repr(job_id), delete=False ) job_stderr_file = job_stderr_file.name try: self.wf_ctrl.retrieve_job_stdouterr(job_id, job_stdout_file, job_stderr_file) # Test stdout isSame, msg = identical_files(job_stdout_file, self.wf_examples.lo_stdout[1]) self.assertTrue(isSame, msg) # Test no stderr self.assertTrue( os.stat(job_stderr_file).st_size == 0, "job stderr not empty : cf %s" % job_stderr_file ) # Test output files if self.path_management == self.LOCAL_PATH: isSame, msg = identical_files( self.wf_examples.lo_out_model_file[11], self.wf_examples.lo_file[11] ) self.assertTrue(isSame, msg) isSame, msg = identical_files( self.wf_examples.lo_out_model_file[12], self.wf_examples.lo_file[12] ) self.assertTrue(isSame, msg) if self.path_management == self.FILE_TRANSFER or self.path_management == self.SHARED_TRANSFER: isSame, msg = identical_files( self.wf_examples.lo_out_model_file[11], self.wf_examples.tr_file[11].client_path ) self.assertTrue(isSame, msg) isSame, msg = identical_files( self.wf_examples.lo_out_model_file[12], self.wf_examples.tr_file[12].client_path ) self.assertTrue(isSame, msg) finally: os.unlink(job_stdout_file) os.unlink(job_stderr_file) del self.tested_job
def test_result(self): workflow = self.wf_examples.example_simple() self.wf_id = self.wf_ctrl.submit_workflow( workflow=workflow, name=self.__class__.__name__) # Transfer input files if file transfer if self.path_management == self.FILE_TRANSFER or \ self.path_management == self.SHARED_TRANSFER: Helper.transfer_input_files(self.wf_id, self.wf_ctrl) # Wait for the workflow to finish Helper.wait_workflow(self.wf_id, self.wf_ctrl) # Transfer output files if file transfer if self.path_management == self.FILE_TRANSFER or \ self.path_management == self.SHARED_TRANSFER: Helper.transfer_output_files(self.wf_id, self.wf_ctrl) status = self.wf_ctrl.workflow_status(self.wf_id) self.assertTrue(status == constants.WORKFLOW_DONE, "workflow status : %s. Expected : %s" % (status, constants.WORKFLOW_DONE)) nb_failed_jobs = len(Helper.list_failed_jobs(self.wf_id, self.wf_ctrl)) self.assertTrue(nb_failed_jobs == 0, "nb failed jobs : %i. Expected : %i" % (nb_failed_jobs, 0)) nb_failed_aborted_jobs = len(Helper.list_failed_jobs( self.wf_id, self.wf_ctrl, include_aborted_jobs=True)) self.assertTrue(nb_failed_aborted_jobs == 0, "nb failed jobs including aborted : %i. Expected : %i" % (nb_failed_aborted_jobs, 0)) (jobs_info, transfers_info, workflow_status, workflow_queue, tmp_files) = self.wf_ctrl.workflow_elements_status(self.wf_id) for (job_id, tmp_status, queue, exit_info, dates) in jobs_info: job_list = self.wf_ctrl.jobs([job_id]) job_name, job_command, job_submission_date = job_list[job_id] self.tested_job = job_id if exit_info[0] == constants.FINISHED_REGULARLY: # To check job standard out and standard err job_stdout_file = tempfile.NamedTemporaryFile( prefix="job_soma_out_log_", suffix=repr(job_id), delete=False) job_stdout_file = job_stdout_file.name job_stderr_file = tempfile.NamedTemporaryFile( prefix="job_soma_outerr_log_", suffix=repr(job_id), delete=False) job_stderr_file = job_stderr_file.name try: self.wf_ctrl.retrieve_job_stdouterr(job_id, job_stdout_file, job_stderr_file) if job_name == 'job1': # Test stdout isSame, msg = identical_files( job_stdout_file, self.wf_examples.lo_stdout[1]) self.assertTrue(isSame, msg) # Test no stderr msg = "job stderr not empty : cf %s\n" \ "stderr:\n---\n%s\n---" \ % (job_stderr_file, open(job_stderr_file).read()) self.assertTrue(os.stat(job_stderr_file).st_size == 0, msg) # Test output files if self.path_management == self.LOCAL_PATH: isSame, msg = identical_files( self.wf_examples.lo_out_model_file[11], self.wf_examples.lo_file[11]) self.assertTrue(isSame, msg) isSame, msg = identical_files( self.wf_examples.lo_out_model_file[12], self.wf_examples.lo_file[12]) self.assertTrue(isSame, msg) if self.path_management == self.FILE_TRANSFER or \ self.path_management == self.SHARED_TRANSFER: isSame, msg = identical_files( self.wf_examples.lo_out_model_file[11], self.wf_examples.tr_file[11].client_path) self.assertTrue(isSame, msg) isSame, msg = identical_files( self.wf_examples.lo_out_model_file[12], self.wf_examples.tr_file[12].client_path) self.assertTrue(isSame, msg) # For unknown reason, it raises some errors # http://stackoverflow.com/questions/10496758/unexpected-end-of-file-and-error-importing-function-definition-error-running # isSame, msg = identical_files(job_stderr_file,self.wf_examples.lo_stderr[1]) # self.failUnless(isSame == True) if job_name in ['job2', 'job3', 'job4']: job_nb = int(job_name[3]) # Test stdout isSame, msg = identical_files( job_stdout_file, self.wf_examples.lo_stdout[job_nb]) self.assertTrue(isSame, msg) # Test no stderr self.assertTrue(os.stat(job_stderr_file).st_size == 0, "job stderr not empty : cf %s" % job_stderr_file) # Test output files if self.path_management == self.LOCAL_PATH: isSame, msg = identical_files( self.wf_examples.lo_out_model_file[job_nb], self.wf_examples.lo_file[job_nb]) self.assertTrue(isSame, msg) if self.path_management == self.FILE_TRANSFER or \ self.path_management == self.SHARED_TRANSFER: isSame, msg = identical_files( self.wf_examples.lo_out_model_file[job_nb], self.wf_examples.tr_file[job_nb].client_path) self.assertTrue(isSame, msg) finally: os.unlink(job_stdout_file) os.unlink(job_stderr_file) del self.tested_job
def test_result(self): # Cause all warnings to always be triggered. warnings.simplefilter("always") with warnings.catch_warnings(record=True) as w: # Trigger a warning. workflow = self.wf_examples.example_special_command() # Verify some things self.assertTrue(len(w) == 1) self.assertTrue(issubclass(w[-1].category, UserWarning)) self.assertTrue("contains single quote. It could fail using DRMAA" in str(w[-1].message)) self.wf_id = self.wf_ctrl.submit_workflow(workflow=workflow, name=self.__class__.__name__) # Transfer input files if file transfer if self.path_management == self.FILE_TRANSFER or \ self.path_management == self.SHARED_TRANSFER: Helper.transfer_input_files(self.wf_id, self.wf_ctrl) # Wait for the worklow to finish Helper.wait_workflow(self.wf_id, self.wf_ctrl) status = self.wf_ctrl.workflow_status(self.wf_id) self.assertTrue( status == constants.WORKFLOW_DONE, "workflow status : %s. Expected : %s" % (status, constants.WORKFLOW_DONE)) # TODO : sometimes raises an error # because status = "workflow_in_progress" nb_failed_jobs = len(Helper.list_failed_jobs(self.wf_id, self.wf_ctrl)) self.assertTrue( nb_failed_jobs == 0, "nb failed jobs : %i. Expected : %i" % (nb_failed_jobs, 0)) nb_failed_aborted_jobs = len( Helper.list_failed_jobs(self.wf_id, self.wf_ctrl, include_aborted_jobs=True)) self.assertTrue( nb_failed_aborted_jobs == 0, "nb failed jobs including aborted : %i. Expected : %i" % (nb_failed_aborted_jobs, 0)) (jobs_info, transfers_info, workflow_status, workflow_queue, tmp_files) = self.wf_ctrl.workflow_elements_status(self.wf_id) for (job_id, tmp_status, queue, exit_info, dates) in jobs_info: job_list = self.wf_ctrl.jobs([job_id]) job_name, job_command, job_submission_date = job_list[job_id] self.tested_job = job_id if exit_info[0] == constants.FINISHED_REGULARLY: # To check job standard out and standard err job_stdout_file = tempfile.NamedTemporaryFile( prefix="job_soma_out_log_", suffix=repr(job_id), delete=False) job_stdout_file = job_stdout_file.name job_stderr_file = tempfile.NamedTemporaryFile( prefix="job_soma_outerr_log_", suffix=repr(job_id), delete=False) job_stderr_file = job_stderr_file.name try: self.wf_ctrl.retrieve_job_stdouterr( job_id, job_stdout_file, job_stderr_file) # Test job stdout if self.path_management == self.LOCAL_PATH: isSame, msg = identical_files( job_stdout_file, self.wf_examples.lo_stdout_command_local) self.assertTrue(isSame, msg) else: isSame, msg = identical_files( job_stdout_file, self.wf_examples.lo_stdout_command_remote) self.assertTrue(isSame, msg) # Test no stderr self.assertTrue( os.stat(job_stderr_file).st_size == 0, "job stderr not empty : cf %s" % job_stderr_file) finally: os.unlink(job_stdout_file) os.unlink(job_stderr_file) del self.tested_job
def test_result(self): workflow = self.wf_examples.example_simple_exception1() self.wf_id = self.wf_ctrl.submit_workflow( workflow=workflow, name=self.__class__.__name__) # Transfer input files if file transfer if self.path_management == self.FILE_TRANSFER or \ self.path_management == self.SHARED_TRANSFER: Helper.transfer_input_files(self.wf_id, self.wf_ctrl) # Wait for the workflow to finish Helper.wait_workflow(self.wf_id, self.wf_ctrl) # Transfer output files if file transfer if self.path_management == self.FILE_TRANSFER or \ self.path_management == self.SHARED_TRANSFER: Helper.transfer_output_files(self.wf_id, self.wf_ctrl) status = self.wf_ctrl.workflow_status(self.wf_id) self.assertTrue(status == constants.WORKFLOW_DONE, "workflow status : %s. Expected : %s" % (status, constants.WORKFLOW_DONE)) nb_failed_jobs = len(Helper.list_failed_jobs(self.wf_id, self.wf_ctrl)) self.assertTrue(nb_failed_jobs == 1, "nb failed jobs : %i. Expected : %i" % (nb_failed_jobs, 1)) nb_failed_aborted_jobs = len(Helper.list_failed_jobs( self.wf_id, self.wf_ctrl, include_aborted_jobs=True)) self.assertTrue(nb_failed_aborted_jobs == 4, "nb failed jobs including aborted : %i. Expected : %i" % (nb_failed_aborted_jobs, 4)) (jobs_info, transfers_info, workflow_status, workflow_queue, tmp_files) = self.wf_ctrl.workflow_elements_status(self.wf_id) for (job_id, tmp_status, queue, exit_info, dates) in jobs_info: job_list = self.wf_ctrl.jobs([job_id]) job_name, job_command, job_submission_date = job_list[job_id] self.tested_job = job_id if exit_info[0] == constants.FINISHED_REGULARLY: # To check job standard out and standard err job_stdout_file = tempfile.NamedTemporaryFile( prefix="job_soma_out_log_", suffix=repr(job_id), delete=False) job_stdout_file = job_stdout_file.name job_stderr_file = tempfile.NamedTemporaryFile( prefix="job_soma_outerr_log_", suffix=repr(job_id), delete=False) job_stderr_file = job_stderr_file.name try: self.wf_ctrl.retrieve_job_stdouterr(job_id, job_stdout_file, job_stderr_file) if job_name == 'job1 with exception': # Test stdout isSame, msg = identical_files( job_stdout_file, self.wf_examples.lo_stdout_exception_model) self.assertTrue(isSame, msg) # Test the last line of stderr with open(job_stderr_file) as f: lines = f.readlines() expected_error = 'Exception: Paf Boum Boum Bada Boum !!!\n' isSame = (lines[-1] == expected_error) self.assertTrue(isSame, "Job exception : %s. Expected : %s" % (lines[-1], expected_error)) finally: os.unlink(job_stdout_file) os.unlink(job_stderr_file) del self.tested_job
def test_result(self): workflow = self.wf_examples.example_special_transfer() self.wf_id = self.wf_ctrl.submit_workflow(workflow=workflow, name=self.__class__.__name__) # Transfer input files Helper.transfer_input_files(self.wf_id, self.wf_ctrl) # Wait for the worklow to finish Helper.wait_workflow(self.wf_id, self.wf_ctrl) status = self.wf_ctrl.workflow_status(self.wf_id) # Transfer output files Helper.transfer_output_files(self.wf_id, self.wf_ctrl) status = self.wf_ctrl.workflow_status(self.wf_id) self.assertTrue( status == constants.WORKFLOW_DONE, "workflow status : %s. Expected : %s" % (status, constants.WORKFLOW_DONE) ) nb_failed_jobs = len(Helper.list_failed_jobs(self.wf_id, self.wf_ctrl)) self.assertTrue(nb_failed_jobs == 0, "nb failed jobs : %i. Expected : %i" % (nb_failed_jobs, 0)) nb_failed_aborted_jobs = len(Helper.list_failed_jobs(self.wf_id, self.wf_ctrl, include_aborted_jobs=True)) self.assertTrue( nb_failed_aborted_jobs == 0, "nb failed jobs including aborted : %i. Expected : %i" % (nb_failed_aborted_jobs, 0), ) (jobs_info, transfers_info, workflow_status, workflow_queue, tmp_files) = self.wf_ctrl.workflow_elements_status( self.wf_id ) for (job_id, tmp_status, queue, exit_info, dates) in jobs_info: job_list = self.wf_ctrl.jobs([job_id]) job_name, job_command, job_submission_date = job_list[job_id] self.tested_job = job_id if exit_info[0] == constants.FINISHED_REGULARLY: # To check job standard out and standard err job_stdout_file = tempfile.NamedTemporaryFile( prefix="job_soma_out_log_", suffix=repr(job_id), delete=False ) job_stdout_file = job_stdout_file.name job_stderr_file = tempfile.NamedTemporaryFile( prefix="job_soma_outerr_log_", suffix=repr(job_id), delete=False ) job_stderr_file = job_stderr_file.name try: self.wf_ctrl.retrieve_job_stdouterr(job_id, job_stdout_file, job_stderr_file) if job_name == "dir_contents": # Test job standard out with open(job_stdout_file, "r+") as f: dir_contents = f.readlines() dir_path_in = self.wf_examples.lo_in_dir full_path_list = [] for element in os.listdir(dir_path_in): full_path_list.append(os.path.join(dir_path_in, element)) dir_contents_model = list_contents(full_path_list, []) self.assertTrue(sorted(dir_contents) == sorted(dir_contents_model)) # Test no stderr self.assertTrue( os.stat(job_stderr_file).st_size == 0, "job stderr not empty : cf %s" % job_stderr_file ) if job_name == "multi file format test": # Test job standard out isSame, msg = identical_files(job_stdout_file, self.wf_examples.lo_mff_stdout) self.assertTrue(isSame, msg) # Test no stderr self.assertTrue( os.stat(job_stderr_file).st_size == 0, "job stderr not empty : cf %s" % job_stderr_file ) finally: os.unlink(job_stdout_file) os.unlink(job_stderr_file) del self.tested_job
def test_result(self): workflow = self.wf_examples.example_special_transfer() self.wf_id = self.wf_ctrl.submit_workflow( workflow=workflow, name=self.__class__.__name__) # Transfer input files Helper.transfer_input_files(self.wf_id, self.wf_ctrl) # Wait for the worklow to finish Helper.wait_workflow(self.wf_id, self.wf_ctrl) status = self.wf_ctrl.workflow_status(self.wf_id) # Transfer output files Helper.transfer_output_files(self.wf_id, self.wf_ctrl) status = self.wf_ctrl.workflow_status(self.wf_id) self.assertTrue(status == constants.WORKFLOW_DONE, "workflow status : %s. Expected : %s" % (status, constants.WORKFLOW_DONE)) nb_failed_jobs = len(Helper.list_failed_jobs( self.wf_id, self.wf_ctrl)) self.assertTrue(nb_failed_jobs == 0, "nb failed jobs : %i. Expected : %i" % (nb_failed_jobs, 0)) nb_failed_aborted_jobs = len(Helper.list_failed_jobs( self.wf_id, self.wf_ctrl, include_aborted_jobs=True)) self.assertTrue(nb_failed_aborted_jobs == 0, "nb failed jobs including aborted : %i. Expected : %i" % (nb_failed_aborted_jobs, 0)) (jobs_info, transfers_info, workflow_status, workflow_queue, tmp_files) = self.wf_ctrl.workflow_elements_status(self.wf_id) for (job_id, tmp_status, queue, exit_info, dates) in jobs_info: job_list = self.wf_ctrl.jobs([job_id]) job_name, job_command, job_submission_date = job_list[job_id] self.tested_job = job_id if exit_info[0] == constants.FINISHED_REGULARLY: # To check job standard out and standard err job_stdout_file = tempfile.NamedTemporaryFile( prefix="job_soma_out_log_", suffix=repr(job_id), delete=False) job_stdout_file = job_stdout_file.name job_stderr_file = tempfile.NamedTemporaryFile( prefix="job_soma_outerr_log_", suffix=repr(job_id), delete=False) job_stderr_file = job_stderr_file.name try: self.wf_ctrl.retrieve_job_stdouterr(job_id, job_stdout_file, job_stderr_file) if job_name == 'dir_contents': # Test job standard out with open(job_stdout_file, 'r+') as f: dir_contents = f.readlines() dir_path_in = self.wf_examples.lo_in_dir full_path_list = [] for element in os.listdir(dir_path_in): full_path_list.append(os.path.join(dir_path_in, element)) dir_contents_model = list_contents(full_path_list, []) self.assertTrue( sorted(dir_contents) == sorted(dir_contents_model)) # Test no stderr self.assertTrue(os.stat(job_stderr_file).st_size == 0, "job stderr not empty : cf %s" % job_stderr_file) if job_name == 'multi file format test': # Test job standard out isSame, msg = identical_files( job_stdout_file, self.wf_examples.lo_mff_stdout) self.assertTrue(isSame, msg) # Test no stderr self.assertTrue(os.stat(job_stderr_file).st_size == 0, "job stderr not empty : cf %s" % job_stderr_file) finally: os.unlink(job_stdout_file) os.unlink(job_stderr_file) del self.tested_job
def test_result(self): # Cause all warnings to always be triggered. warnings.simplefilter("always") with warnings.catch_warnings(record=True) as w: # Trigger a warning. workflow = self.wf_examples.example_special_command() # Verify some things self.assertTrue(len(w) == 1) self.assertTrue(issubclass(w[-1].category, UserWarning)) self.assertTrue("contains single quote. It could fail using DRMAA" in str(w[-1].message)) self.wf_id = self.wf_ctrl.submit_workflow( workflow=workflow, name=self.__class__.__name__) # Transfer input files if file transfer if self.path_management == self.FILE_TRANSFER or \ self.path_management == self.SHARED_TRANSFER: Helper.transfer_input_files(self.wf_id, self.wf_ctrl) # Wait for the worklow to finish Helper.wait_workflow(self.wf_id, self.wf_ctrl) status = self.wf_ctrl.workflow_status(self.wf_id) self.assertTrue(status == constants.WORKFLOW_DONE, "workflow status : %s. Expected : %s" % (status, constants.WORKFLOW_DONE)) # TODO : sometimes raises an error # because status = "workflow_in_progress" nb_failed_jobs = len(Helper.list_failed_jobs( self.wf_id, self.wf_ctrl)) self.assertTrue(nb_failed_jobs == 0, "nb failed jobs : %i. Expected : %i" % (nb_failed_jobs, 0)) nb_failed_aborted_jobs = len(Helper.list_failed_jobs( self.wf_id, self.wf_ctrl, include_aborted_jobs=True)) self.assertTrue(nb_failed_aborted_jobs == 0, "nb failed jobs including aborted : %i. Expected : %i" % (nb_failed_aborted_jobs, 0)) (jobs_info, transfers_info, workflow_status, workflow_queue, tmp_files) = self.wf_ctrl.workflow_elements_status(self.wf_id) for (job_id, tmp_status, queue, exit_info, dates) in jobs_info: job_list = self.wf_ctrl.jobs([job_id]) job_name, job_command, job_submission_date = job_list[job_id] self.tested_job = job_id if exit_info[0] == constants.FINISHED_REGULARLY: # To check job standard out and standard err job_stdout_file = tempfile.NamedTemporaryFile( prefix="job_soma_out_log_", suffix=repr(job_id), delete=False) job_stdout_file = job_stdout_file.name job_stderr_file = tempfile.NamedTemporaryFile( prefix="job_soma_outerr_log_", suffix=repr(job_id), delete=False) job_stderr_file = job_stderr_file.name try: self.wf_ctrl.retrieve_job_stdouterr(job_id, job_stdout_file, job_stderr_file) # Test job stdout if self.path_management == self.LOCAL_PATH: isSame, msg = identical_files( job_stdout_file, self.wf_examples.lo_stdout_command_local) self.assertTrue(isSame, msg) else: isSame, msg = identical_files( job_stdout_file, self.wf_examples.lo_stdout_command_remote) self.assertTrue(isSame, msg) # Test no stderr self.assertTrue(os.stat(job_stderr_file).st_size == 0, "job stderr not empty : cf %s" % job_stderr_file) finally: os.unlink(job_stdout_file) os.unlink(job_stderr_file) del self.tested_job
def run_workflow(self, workflow, test_files=[], test_dyn_files={}): self.wf_id = self.wf_ctrl.submit_workflow( workflow=workflow, name=self.__class__.__name__) # Transfer input files if file transfer if self.path_management == self.FILE_TRANSFER or \ self.path_management == self.SHARED_TRANSFER: Helper.transfer_input_files(self.wf_id, self.wf_ctrl) # Wait for the workflow to finish Helper.wait_workflow(self.wf_id, self.wf_ctrl) # Transfer output files if file transfer if self.path_management == self.FILE_TRANSFER or \ self.path_management == self.SHARED_TRANSFER: Helper.transfer_output_files(self.wf_id, self.wf_ctrl) status = self.wf_ctrl.workflow_status(self.wf_id) self.assertTrue(status == constants.WORKFLOW_DONE, "workflow status : %s. Expected : %s" % (status, constants.WORKFLOW_DONE)) failed_jobs = Helper.list_failed_jobs(self.wf_id, self.wf_ctrl) nb_failed_jobs = len(failed_jobs) if nb_failed_jobs != 0: self.print_jobs(failed_jobs, 'Failed jobs') self.assertTrue(nb_failed_jobs == 0, "nb failed jobs : %i. Expected : %i" % (nb_failed_jobs, 0)) failed_aborted_jobs = Helper.list_failed_jobs( self.wf_id, self.wf_ctrl, include_aborted_jobs=True) nb_failed_aborted_jobs = len(failed_aborted_jobs) if nb_failed_aborted_jobs != 0: self.print_jobs(failed_aborted_jobs, 'Aborted jobs') self.assertTrue(nb_failed_aborted_jobs == 0, "nb failed jobs including aborted : %i. Expected : %i" % (nb_failed_aborted_jobs, 0)) (jobs_info, transfers_info, workflow_status, workflow_queue, tmp_files) = self.wf_ctrl.workflow_elements_status(self.wf_id) dyn_out_params = {1: 'filePathOut2', 2: 'filePathOut'} dyn_out_params = {} for (job_id, tmp_status, queue, exit_info, dates, drmaa_id) \ in jobs_info: job_list = self.wf_ctrl.jobs([job_id]) job_name, job_command, job_submission_date = job_list[job_id] self.tested_job = job_id if exit_info[0] == constants.FINISHED_REGULARLY: # To check job standard out and standard err job_stdout_file = tempfile.NamedTemporaryFile( prefix="job_soma_out_log_", suffix=repr(job_id), delete=False) job_stdout_file = job_stdout_file.name job_stderr_file = tempfile.NamedTemporaryFile( prefix="job_soma_outerr_log_", suffix=repr(job_id), delete=False) job_stderr_file = job_stderr_file.name try: self.wf_ctrl.retrieve_job_stdouterr(job_id, job_stdout_file, job_stderr_file) if job_name.startswith('job1'): # Test stdout isSame, msg = identical_files( job_stdout_file, self.wf_examples.lo_stdout[1]) self.assertTrue(isSame, msg) # Test no stderr with open(job_stderr_file) as f: msg = "job stderr not empty : cf %s\n" \ "stderr:\n---\n%s\n---" \ % (job_stderr_file, f.read()) self.assertTrue(os.stat(job_stderr_file).st_size == 0, msg) if job_name in test_dyn_files: out_params = self.wf_ctrl.get_job_output_params(job_id) dyn_out_params[job_name] = out_params # For unknown reason, it raises some errors # http://stackoverflow.com/questions/10496758/unexpected-end-of-file-and-error-importing-function-definition-error-running # isSame, msg = identical_files(job_stderr_file,self.wf_examples.lo_stderr[1]) # self.failUnless(isSame == True) finally: os.unlink(job_stdout_file) os.unlink(job_stderr_file) for out_file_num in test_files: # Test output files if self.path_management == self.LOCAL_PATH: out_file = self.wf_examples.lo_file[out_file_num] elif self.path_management == self.FILE_TRANSFER or \ self.path_management == self.SHARED_TRANSFER: out_file = self.wf_examples.tr_file[out_file_num].client_path isSame, msg = identical_files( self.wf_examples.lo_out_model_file[out_file_num], out_file) self.assertTrue(isSame, msg) for job_name, ref_out_params in six.iteritems(test_dyn_files): out_params = dyn_out_params[job_name] for param, file_num in six.iteritems(ref_out_params): isSame, msg = identical_files( self.wf_examples.lo_out_model_file[file_num], out_params[param]) self.assertTrue(isSame, msg) del self.tested_job
def test_result(self): workflow = self.wf_examples.example_multiple() self.wf_id = self.wf_ctrl.submit_workflow(workflow=workflow, name=self.__class__.__name__) # Transfer input files if file transfer if self.path_management == self.FILE_TRANSFER or \ self.path_management == self.SHARED_TRANSFER: Helper.transfer_input_files(self.wf_id, self.wf_ctrl) # Wait for the workflow to finish Helper.wait_workflow(self.wf_id, self.wf_ctrl) # Transfer output files if file transfer if self.path_management == self.FILE_TRANSFER or \ self.path_management == self.SHARED_TRANSFER: Helper.transfer_output_files(self.wf_id, self.wf_ctrl) status = self.wf_ctrl.workflow_status(self.wf_id) self.assertTrue( status == constants.WORKFLOW_DONE, "workflow status : %s. Expected : %s" % (status, constants.WORKFLOW_DONE)) nb_failed_jobs = len(Helper.list_failed_jobs(self.wf_id, self.wf_ctrl)) self.assertTrue( nb_failed_jobs == 2, "nb failed jobs : %i. Expected : %i" % (nb_failed_jobs, 2)) nb_failed_aborted_jobs = len( Helper.list_failed_jobs(self.wf_id, self.wf_ctrl, include_aborted_jobs=True)) self.assertTrue( nb_failed_aborted_jobs == 6, "nb failed jobs including aborted : %i. Expected : %i" % (nb_failed_aborted_jobs, 6)) (jobs_info, transfers_info, workflow_status, workflow_queue, tmp_files) = self.wf_ctrl.workflow_elements_status(self.wf_id) for (job_id, tmp_status, queue, exit_info, dates) in jobs_info: job_list = self.wf_ctrl.jobs([job_id]) job_name, job_command, job_submission_date = job_list[job_id] self.tested_job = job_id if exit_info[0] == constants.FINISHED_REGULARLY: # To check job standard out and standard err job_stdout_file = tempfile.NamedTemporaryFile( prefix="job_soma_out_log_", suffix=repr(job_id), delete=False) job_stdout_file = job_stdout_file.name job_stderr_file = tempfile.NamedTemporaryFile( prefix="job_soma_outerr_log_", suffix=repr(job_id), delete=False) job_stderr_file = job_stderr_file.name try: self.wf_ctrl.retrieve_job_stdouterr( job_id, job_stdout_file, job_stderr_file) if job_name == 'job1': # Test stdout isSame, msg = identical_files( job_stdout_file, self.wf_examples.lo_stdout[1]) self.assertTrue(isSame, msg) # Test no stderr self.assertTrue( os.stat(job_stderr_file).st_size == 0, "job stderr not empty : cf %s" % job_stderr_file) # Test output files if self.path_management == self.LOCAL_PATH: isSame, msg = identical_files( self.wf_examples.lo_out_model_file[11], self.wf_examples.lo_file[11]) self.assertTrue(isSame, msg) isSame, msg = identical_files( self.wf_examples.lo_out_model_file[12], self.wf_examples.lo_file[12]) self.assertTrue(isSame, msg) if self.path_management == self.FILE_TRANSFER or \ self.path_management == self.SHARED_TRANSFER: isSame, msg = identical_files( self.wf_examples.lo_out_model_file[11], self.wf_examples.tr_file[11].client_path) self.assertTrue(isSame, msg) isSame, msg = identical_files( self.wf_examples.lo_out_model_file[12], self.wf_examples.tr_file[12].client_path) self.assertTrue(isSame, msg) # For unknown reason, it raises some errors # http://stackoverflow.com/questions/10496758/unexpected-end-of-file-and-error-importing-function-definition-error-running # isSame, msg = identical_files(job_stderr_file,self.wf_examples.lo_stderr[1]) # self.failUnless(isSame == True) if job_name in ['job2', 'job3', 'job4']: job_nb = int(job_name[3]) # Test stdout isSame, msg = identical_files( job_stdout_file, self.wf_examples.lo_stdout[job_nb]) self.assertTrue(isSame, msg) # Test no stderr self.assertTrue( os.stat(job_stderr_file).st_size == 0, "job stderr not empty : cf %s" % job_stderr_file) # Test output files if self.path_management == self.LOCAL_PATH: isSame, msg = identical_files( self.wf_examples.lo_out_model_file[job_nb], self.wf_examples.lo_file[job_nb]) self.assertTrue(isSame, msg) if self.path_management == self.FILE_TRANSFER or \ self.path_management == self.SHARED_TRANSFER: isSame, msg = identical_files( self.wf_examples.lo_out_model_file[job_nb], self.wf_examples.tr_file[job_nb].client_path) self.assertTrue(isSame, msg) if job_name in [ 'job1 with exception', 'job3 with exception' ]: # Test stdout isSame, msg = identical_files( job_stdout_file, self.wf_examples.lo_stdout_exception_model) self.assertTrue(isSame) # Test stderr with open(job_stderr_file) as f: lines = f.readlines() print(lines) isSame = (lines[-1] == 'Exception: Paf Boum ' 'Boum Bada Boum !!!\n') self.assertTrue(isSame, "Job Exception: %s" % lines[-1]) finally: if os.path.exists(job_stdout_file): os.unlink(job_stdout_file) if os.path.exists(job_stderr_file): os.unlink(job_stderr_file) del self.tested_job