def test_result(self):
        workflow = self.wf_examples.example_wrong_native_spec_pbs()
        self.wf_id = self.wf_ctrl.submit_workflow(
            workflow=workflow,
            name=self.__class__.__name__)
        # Transfer input files if file transfer
        if self.path_management == self.FILE_TRANSFER or \
                self.path_management == self.SHARED_TRANSFER:
            Helper.transfer_input_files(self.wf_id,
                                        self.wf_ctrl)
        # Wait for the workflow to finish
        Helper.wait_workflow(self.wf_id, self.wf_ctrl)
        # Transfer output files if file transfer
        if self.path_management == self.FILE_TRANSFER or \
                self.path_management == self.SHARED_TRANSFER:
            Helper.transfer_output_files(self.wf_id,
                                         self.wf_ctrl)

        status = self.wf_ctrl.workflow_status(self.wf_id)
        self.assertTrue(status == constants.WORKFLOW_DONE,
                        "workflow status : %s. Expected : %s" %
                        (status, constants.WORKFLOW_DONE))
        nb_failed_jobs = len(Helper.list_failed_jobs(
            self.wf_id,
            self.wf_ctrl))
        self.assertTrue(nb_failed_jobs == 0,
                        "nb failed jobs : %i. Expected : %i" %
                        (nb_failed_jobs, 0))
        nb_failed_aborted_jobs = len(Helper.list_failed_jobs(
            self.wf_id,
            self.wf_ctrl,
            include_aborted_jobs=True))
        if self.path_management == self.LOCAL_PATH:
            self.assertTrue(nb_failed_aborted_jobs == 0,
                            "nb failed jobs including aborted : %i. "
                            "Expected : %i" % (nb_failed_aborted_jobs, 0))
        else:
            self.assertTrue(nb_failed_aborted_jobs == 1,
                            "nb failed jobs including aborted : %i. "
                            "Expected : %i" % (nb_failed_aborted_jobs, 1))
    def test_result(self):
        workflow = self.wf_examples.example_wrong_native_spec_pbs()
        self.wf_id = self.wf_ctrl.submit_workflow(workflow=workflow,
                                                  name=self.__class__.__name__)
        # Transfer input files if file transfer
        if self.path_management == self.FILE_TRANSFER or \
                self.path_management == self.SHARED_TRANSFER:
            Helper.transfer_input_files(self.wf_id, self.wf_ctrl)
        # Wait for the workflow to finish
        Helper.wait_workflow(self.wf_id, self.wf_ctrl)
        # Transfer output files if file transfer
        if self.path_management == self.FILE_TRANSFER or \
                self.path_management == self.SHARED_TRANSFER:
            Helper.transfer_output_files(self.wf_id, self.wf_ctrl)

        status = self.wf_ctrl.workflow_status(self.wf_id)
        self.assertTrue(
            status == constants.WORKFLOW_DONE,
            "workflow status : %s. Expected : %s" %
            (status, constants.WORKFLOW_DONE))
        nb_failed_jobs = len(Helper.list_failed_jobs(self.wf_id, self.wf_ctrl))
        self.assertTrue(
            nb_failed_jobs == 0,
            "nb failed jobs : %i. Expected : %i" % (nb_failed_jobs, 0))
        nb_failed_aborted_jobs = len(
            Helper.list_failed_jobs(self.wf_id,
                                    self.wf_ctrl,
                                    include_aborted_jobs=True))
        if self.path_management == self.LOCAL_PATH \
                or self.wf_ctrl.get_scheduler_type() != 'pbs':
            self.assertTrue(
                nb_failed_aborted_jobs == 0,
                "nb failed jobs including aborted : %i. "
                "Expected : %i" % (nb_failed_aborted_jobs, 0))
        else:
            self.assertTrue(
                nb_failed_aborted_jobs == 1,
                "nb failed jobs including aborted : %i. "
                "Expected : %i" % (nb_failed_aborted_jobs, 1))
Ejemplo n.º 3
0
 def _get_workflow_status(self):
     sw_status \
         = self._workflow_controller.workflow_status(self._workflow_id)
     if (sw_status in [sw.constants.WORKFLOW_IN_PROGRESS,
                       sw.constants.WORKFLOW_NOT_STARTED]):
         status = Runner.RUNNING
     else:
         has_failed = (len(Helper.list_failed_jobs(
             self._workflow_id, self._workflow_controller,
             include_aborted_jobs=True,
             include_user_killed_jobs=True)) != 0)
         if has_failed:
             status = Runner.FAILED
         else:
             status = Runner.SUCCESS
     return status
Ejemplo n.º 4
0
    def test_result(self):
        workflow = self.wf_examples.example_native_spec_pbs()
        self.wf_id = self.wf_ctrl.submit_workflow(workflow=workflow, name=self.__class__.__name__)
        # Transfer input files if file transfer
        if self.path_management == self.FILE_TRANSFER or self.path_management == self.SHARED_TRANSFER:
            Helper.transfer_input_files(self.wf_id, self.wf_ctrl)
        # Wait for the workflow to finish
        Helper.wait_workflow(self.wf_id, self.wf_ctrl)
        # Transfer output files if file transfer
        if self.path_management == self.FILE_TRANSFER or self.path_management == self.SHARED_TRANSFER:
            Helper.transfer_output_files(self.wf_id, self.wf_ctrl)

        status = self.wf_ctrl.workflow_status(self.wf_id)
        self.assertTrue(
            status == constants.WORKFLOW_DONE, "workflow status : %s. Expected : %s" % (status, constants.WORKFLOW_DONE)
        )
        nb_failed_jobs = len(Helper.list_failed_jobs(self.wf_id, self.wf_ctrl))
        self.assertTrue(nb_failed_jobs == 0, "nb failed jobs : %i. Expected : %i" % (nb_failed_jobs, 0))
        nb_failed_aborted_jobs = len(Helper.list_failed_jobs(self.wf_id, self.wf_ctrl, include_aborted_jobs=True))
        self.assertTrue(
            nb_failed_aborted_jobs == 0,
            "nb failed jobs including aborted : %i. Expected : %i" % (nb_failed_aborted_jobs, 0),
        )

        (jobs_info, transfers_info, workflow_status, workflow_queue, tmp_files) = self.wf_ctrl.workflow_elements_status(
            self.wf_id
        )

        for (job_id, tmp_status, queue, exit_info, dates) in jobs_info:
            job_list = self.wf_ctrl.jobs([job_id])
            job_name, job_command, job_submission_date = job_list[job_id]

            self.tested_job = job_id

            if exit_info[0] == constants.FINISHED_REGULARLY:
                # To check job standard out and standard err
                job_stdout_file = tempfile.NamedTemporaryFile(
                    prefix="job_soma_out_log_", suffix=repr(job_id), delete=False
                )
                job_stdout_file = job_stdout_file.name
                job_stderr_file = tempfile.NamedTemporaryFile(
                    prefix="job_soma_outerr_log_", suffix=repr(job_id), delete=False
                )
                job_stderr_file = job_stderr_file.name

                try:
                    self.wf_ctrl.retrieve_job_stdouterr(job_id, job_stdout_file, job_stderr_file)
                    # Test stdout
                    isSame, msg = identical_files(job_stdout_file, self.wf_examples.lo_stdout[1])
                    self.assertTrue(isSame, msg)
                    # Test no stderr
                    self.assertTrue(
                        os.stat(job_stderr_file).st_size == 0, "job stderr not empty : cf %s" % job_stderr_file
                    )
                    # Test output files
                    if self.path_management == self.LOCAL_PATH:
                        isSame, msg = identical_files(
                            self.wf_examples.lo_out_model_file[11], self.wf_examples.lo_file[11]
                        )
                        self.assertTrue(isSame, msg)
                        isSame, msg = identical_files(
                            self.wf_examples.lo_out_model_file[12], self.wf_examples.lo_file[12]
                        )
                        self.assertTrue(isSame, msg)
                    if self.path_management == self.FILE_TRANSFER or self.path_management == self.SHARED_TRANSFER:
                        isSame, msg = identical_files(
                            self.wf_examples.lo_out_model_file[11], self.wf_examples.tr_file[11].client_path
                        )
                        self.assertTrue(isSame, msg)
                        isSame, msg = identical_files(
                            self.wf_examples.lo_out_model_file[12], self.wf_examples.tr_file[12].client_path
                        )
                        self.assertTrue(isSame, msg)
                finally:
                    os.unlink(job_stdout_file)
                    os.unlink(job_stderr_file)

        del self.tested_job
Ejemplo n.º 5
0
    def test_result(self):
        workflow = self.wf_examples.example_simple_exception1()
        self.wf_id = self.wf_ctrl.submit_workflow(
            workflow=workflow,
            name=self.__class__.__name__)
        # Transfer input files if file transfer
        if self.path_management == self.FILE_TRANSFER or \
                self.path_management == self.SHARED_TRANSFER:
            Helper.transfer_input_files(self.wf_id, self.wf_ctrl)
        # Wait for the workflow to finish
        Helper.wait_workflow(self.wf_id, self.wf_ctrl)
        # Transfer output files if file transfer
        if self.path_management == self.FILE_TRANSFER or \
                self.path_management == self.SHARED_TRANSFER:
            Helper.transfer_output_files(self.wf_id, self.wf_ctrl)

        status = self.wf_ctrl.workflow_status(self.wf_id)
        self.assertTrue(status == constants.WORKFLOW_DONE,
                        "workflow status : %s. Expected : %s" %
                        (status, constants.WORKFLOW_DONE))
        nb_failed_jobs = len(Helper.list_failed_jobs(self.wf_id,
                                                     self.wf_ctrl))
        self.assertTrue(nb_failed_jobs == 1,
                        "nb failed jobs : %i. Expected : %i" %
                        (nb_failed_jobs, 1))
        nb_failed_aborted_jobs = len(Helper.list_failed_jobs(
            self.wf_id,
            self.wf_ctrl,
            include_aborted_jobs=True))
        self.assertTrue(nb_failed_aborted_jobs == 4,
                        "nb failed jobs including aborted : %i. Expected : %i"
                        % (nb_failed_aborted_jobs, 4))

        (jobs_info, transfers_info, workflow_status, workflow_queue,
            tmp_files) = self.wf_ctrl.workflow_elements_status(self.wf_id)

        for (job_id, tmp_status, queue, exit_info, dates) in jobs_info:
            job_list = self.wf_ctrl.jobs([job_id])
            job_name, job_command, job_submission_date = job_list[job_id]

            self.tested_job = job_id

            if exit_info[0] == constants.FINISHED_REGULARLY:
                # To check job standard out and standard err
                job_stdout_file = tempfile.NamedTemporaryFile(
                    prefix="job_soma_out_log_",
                    suffix=repr(job_id),
                    delete=False)
                job_stdout_file = job_stdout_file.name
                job_stderr_file = tempfile.NamedTemporaryFile(
                    prefix="job_soma_outerr_log_",
                    suffix=repr(job_id),
                    delete=False)
                job_stderr_file = job_stderr_file.name
                try:
                    self.wf_ctrl.retrieve_job_stdouterr(job_id,
                                                        job_stdout_file,
                                                        job_stderr_file)

                    if job_name == 'job1 with exception':
                        # Test stdout
                        isSame, msg = identical_files(
                            job_stdout_file,
                            self.wf_examples.lo_stdout_exception_model)
                        self.assertTrue(isSame, msg)
                        # Test the last line of stderr
                        with open(job_stderr_file) as f:
                            lines = f.readlines()
                        expected_error = 'Exception: Paf Boum Boum Bada Boum !!!\n'
                        isSame = (lines[-1] == expected_error)
                        self.assertTrue(isSame,
                                        "Job exception : %s. Expected : %s" %
                                        (lines[-1], expected_error))
                finally:
                    os.unlink(job_stdout_file)
                    os.unlink(job_stderr_file)

        del self.tested_job
Ejemplo n.º 6
0
    def test_result(self):
        # Cause all warnings to always be triggered.
        warnings.simplefilter("always")
        with warnings.catch_warnings(record=True) as w:
            # Trigger a warning.
            workflow = self.wf_examples.example_special_command()
            # Verify some things
            self.assertTrue(len(w) == 1)
            self.assertTrue(issubclass(w[-1].category, UserWarning))
            self.assertTrue("contains single quote. It could fail using DRMAA"
                            in str(w[-1].message))

        self.wf_id = self.wf_ctrl.submit_workflow(workflow=workflow,
                                                  name=self.__class__.__name__)
        # Transfer input files if file transfer
        if self.path_management == self.FILE_TRANSFER or \
                self.path_management == self.SHARED_TRANSFER:
            Helper.transfer_input_files(self.wf_id, self.wf_ctrl)
        # Wait for the worklow to finish
        Helper.wait_workflow(self.wf_id, self.wf_ctrl)

        status = self.wf_ctrl.workflow_status(self.wf_id)
        self.assertTrue(
            status == constants.WORKFLOW_DONE,
            "workflow status : %s. Expected : %s" %
            (status, constants.WORKFLOW_DONE))
        # TODO : sometimes raises an error
        # because status = "workflow_in_progress"

        nb_failed_jobs = len(Helper.list_failed_jobs(self.wf_id, self.wf_ctrl))
        self.assertTrue(
            nb_failed_jobs == 0,
            "nb failed jobs : %i. Expected : %i" % (nb_failed_jobs, 0))

        nb_failed_aborted_jobs = len(
            Helper.list_failed_jobs(self.wf_id,
                                    self.wf_ctrl,
                                    include_aborted_jobs=True))
        self.assertTrue(
            nb_failed_aborted_jobs == 0,
            "nb failed jobs including aborted : %i. Expected : %i" %
            (nb_failed_aborted_jobs, 0))

        (jobs_info, transfers_info, workflow_status, workflow_queue,
         tmp_files) = self.wf_ctrl.workflow_elements_status(self.wf_id)

        for (job_id, tmp_status, queue, exit_info, dates) in jobs_info:
            job_list = self.wf_ctrl.jobs([job_id])
            job_name, job_command, job_submission_date = job_list[job_id]

            self.tested_job = job_id

            if exit_info[0] == constants.FINISHED_REGULARLY:
                # To check job standard out and standard err
                job_stdout_file = tempfile.NamedTemporaryFile(
                    prefix="job_soma_out_log_",
                    suffix=repr(job_id),
                    delete=False)
                job_stdout_file = job_stdout_file.name
                job_stderr_file = tempfile.NamedTemporaryFile(
                    prefix="job_soma_outerr_log_",
                    suffix=repr(job_id),
                    delete=False)
                job_stderr_file = job_stderr_file.name

                try:
                    self.wf_ctrl.retrieve_job_stdouterr(
                        job_id, job_stdout_file, job_stderr_file)
                    # Test job stdout
                    if self.path_management == self.LOCAL_PATH:
                        isSame, msg = identical_files(
                            job_stdout_file,
                            self.wf_examples.lo_stdout_command_local)
                        self.assertTrue(isSame, msg)
                    else:
                        isSame, msg = identical_files(
                            job_stdout_file,
                            self.wf_examples.lo_stdout_command_remote)
                        self.assertTrue(isSame, msg)
                    # Test no stderr
                    self.assertTrue(
                        os.stat(job_stderr_file).st_size == 0,
                        "job stderr not empty : cf %s" % job_stderr_file)
                finally:
                    os.unlink(job_stdout_file)
                    os.unlink(job_stderr_file)

        del self.tested_job
Ejemplo n.º 7
0
    def test_result(self):
        cmd = [
            sys.executable, '-c',
            'from __future__ import print_function; import sys;'
            'from soma_workflow.test.workflow_tests.test_workflow_config import print_cmdline; '
            'print_cmdline(sys.argv[1:])', 'conf=%(configuration_dict)s'
        ]
        configuration = {'config1': 'value1', 'config2': 'value2'}
        print(cmd)
        job1 = Job(name='job1', command=cmd)
        job2 = Job(name='job2',
                   command=cmd,
                   configuration=configuration,
                   param_dict={})
        job3 = Job(name='job3',
                   command=cmd,
                   configuration=configuration,
                   use_input_params_file=True)
        expected_outputs = {
            'workflow': {
                'job1':
                'args: [\'conf=%(configuration_dict)s\']\n'
                'config:\n'
                'None',
                'job2':
                '''args: ['conf={"config1": "value1", "config2": "value2"}']
conf param
config:
{'config1': 'value1', 'config2': 'value2'}''',
                'job3':
                '''args: [\'conf=%(configuration_dict)s\']
with input file
params:
{'parameters': {}, 'configuration_dict': {'config1': 'value1', 'config2': 'value2'}}
config:
{'config1': 'value1', 'config2': 'value2'}''',
            },
        }
        workflow1 = Workflow(name='workflow', jobs=[job1, job2, job3])
        for workflow in (workflow1, ):
            self.wf_id = self.wf_ctrl.submit_workflow(
                workflow=workflow, name=self.__class__.__name__)
            # Transfer input files if file transfer
            if self.path_management == self.FILE_TRANSFER or \
                    self.path_management == self.SHARED_TRANSFER:
                Helper.transfer_input_files(self.wf_id, self.wf_ctrl)
            # Wait for the workflow to finish
            Helper.wait_workflow(self.wf_id, self.wf_ctrl)
            status = self.wf_ctrl.workflow_status(self.wf_id)
            self.assertTrue(
                status == constants.WORKFLOW_DONE,
                "workflow status : %s. Expected : %s" %
                (status, constants.WORKFLOW_DONE))

            nb_failed_jobs = len(
                Helper.list_failed_jobs(self.wf_id, self.wf_ctrl))
            try:
                self.assertTrue(
                    nb_failed_jobs == 0,
                    "nb failed jobs : %i. Expected : %i" % (nb_failed_jobs, 0))
            except:  # noqa: E722
                print('jobs failed:', file=sys.stderr)
                print(Helper.list_failed_jobs(self.wf_id, self.wf_ctrl),
                      file=sys.stderr)
                raise
            nb_failed_aborted_jobs = len(
                Helper.list_failed_jobs(self.wf_id,
                                        self.wf_ctrl,
                                        include_aborted_jobs=True))
            try:
                self.assertTrue(
                    nb_failed_aborted_jobs == 0,
                    "nb failed jobs including aborted : %i. Expected : %i" %
                    (nb_failed_aborted_jobs, 0))
            except:  # noqa: E722
                print('aborted jobs:', file=sys.stderr)
                print(Helper.list_failed_jobs(self.wf_id,
                                              self.wf_ctrl,
                                              include_aborted_jobs=True),
                      file=sys.stderr)
                raise

            (jobs_info, transfers_info, workflow_status, workflow_queue,
             tmp_files) = self.wf_ctrl.workflow_elements_status(self.wf_id)

            for (job_id, tmp_status, queue, exit_info, dates, drmaa_id) \
                    in jobs_info:
                job_list = self.wf_ctrl.jobs([job_id])
                job_name, job_command, job_submission_date = job_list[job_id]

                self.tested_job = job_id

                if exit_info[0] == constants.FINISHED_REGULARLY:
                    # To check job standard out and standard err
                    job_stdout_file = tempfile.NamedTemporaryFile(
                        prefix="job_soma_out_log_",
                        suffix=repr(job_id),
                        delete=False)
                    job_stdout_file = job_stdout_file.name
                    job_stderr_file = tempfile.NamedTemporaryFile(
                        prefix="job_soma_outerr_log_",
                        suffix=repr(job_id),
                        delete=False)
                    job_stderr_file = job_stderr_file.name

                    try:
                        self.wf_ctrl.retrieve_job_stdouterr(
                            job_id, job_stdout_file, job_stderr_file)
                        output \
                            = open(job_stdout_file).read().strip().split('\n')
                        exp_wf_outputs = expected_outputs[workflow.name]
                        if job_name in exp_wf_outputs:
                            exp = exp_wf_outputs[job_name].split('\n')
                            #print('### job', job_name, 'output:', output, file=sys.stderr)
                            #print('### expected:', exp, file=sys.stderr)
                            #print('### res:', [o in output for o in exp], file=sys.stderr)
                            self.assertTrue(all([o in output for o in exp]))
                    finally:
                        os.unlink(job_stdout_file)
                        os.unlink(job_stderr_file)

            del self.tested_job
Ejemplo n.º 8
0
    def test_result(self):
        nb = 20
        time_sleep = 1

        workflow = self.wf_examples.example_n_jobs(nb=nb, time=time_sleep)
        self.wf_id = self.wf_ctrl.submit_workflow(
            workflow=workflow,
            name=self.__class__.__name__)
        # Transfer input files if file transfer
        if self.path_management == self.FILE_TRANSFER or \
                self.path_management == self.SHARED_TRANSFER:
            Helper.transfer_input_files(self.wf_id, self.wf_ctrl)
        # Wait for the workflow to finish
        Helper.wait_workflow(self.wf_id, self.wf_ctrl)
        # Transfer output files if file transfer
        if self.path_management == self.FILE_TRANSFER or \
                self.path_management == self.SHARED_TRANSFER:
            Helper.transfer_output_files(self.wf_id, self.wf_ctrl)

        status = self.wf_ctrl.workflow_status(self.wf_id)
        self.assertTrue(status == constants.WORKFLOW_DONE,
                        "workflow status : %s. Expected : %s" %
                        (status, constants.WORKFLOW_DONE))
        nb_failed_jobs = len(Helper.list_failed_jobs(
            self.wf_id,
            self.wf_ctrl))
        self.assertTrue(nb_failed_jobs == 0,
                        "nb failed jobs : %i. Expected : %i" %
                        (nb_failed_jobs, 0))
        nb_failed_aborted_jobs = len(Helper.list_failed_jobs(
            self.wf_id,
            self.wf_ctrl,
            include_aborted_jobs=True))
        self.assertTrue(nb_failed_aborted_jobs == 0,
                        "nb failed jobs including aborted : %i. Expected : %i"
                        % (nb_failed_aborted_jobs, 0))

        (jobs_info, transfers_info, workflow_status, workflow_queue,
            tmp_files) = self.wf_ctrl.workflow_elements_status(self.wf_id)

        for (job_id, tmp_status, queue, exit_info, dates) in jobs_info:
            job_list = self.wf_ctrl.jobs([job_id])
            job_name, job_command, job_submission_date = job_list[job_id]

            self.tested_job = job_id

            if exit_info[0] == constants.FINISHED_REGULARLY:
                # To check job standard out and standard err
                job_stdout_file = tempfile.NamedTemporaryFile(
                    prefix="job_soma_out_log_",
                    suffix=repr(job_id),
                    delete=False)
                job_stdout_file = job_stdout_file.name
                job_stderr_file = tempfile.NamedTemporaryFile(
                    prefix="job_soma_outerr_log_",
                    suffix=repr(job_id),
                    delete=False)
                job_stderr_file = job_stderr_file.name

                try:
                    self.wf_ctrl.retrieve_job_stdouterr(job_id,
                                                        job_stdout_file,
                                                        job_stderr_file)
                    # Test stdout
                    self.assertTrue(os.stat(job_stdout_file).st_size == 0,
                                    "job stdout not empty : file: %s, "
                                    "contents:\n%s" %
                                    (job_stdout_file,
                                     open(job_stdout_file).read()))
                    # Test no stderr
                    self.assertTrue(os.stat(job_stderr_file).st_size == 0,
                                    "job stderr not empty : file %s, "
                                    "contents:\n%s" %
                                    (job_stderr_file,
                                     open(job_stderr_file).read()))
                finally:
                    os.unlink(job_stdout_file)
                    os.unlink(job_stderr_file)

        del self.tested_job
Ejemplo n.º 9
0
    def test_result(self):
        # Cause all warnings to always be triggered.
        warnings.simplefilter("always")
        with warnings.catch_warnings(record=True) as w:
            # Trigger a warning.
            workflow = self.wf_examples.example_special_command()
            # Verify some things
            self.assertTrue(len(w) == 1)
            self.assertTrue(issubclass(w[-1].category, UserWarning))
            self.assertTrue("contains single quote. It could fail using DRMAA"
                            in str(w[-1].message))

        self.wf_id = self.wf_ctrl.submit_workflow(
            workflow=workflow,
            name=self.__class__.__name__)
        # Transfer input files if file transfer
        if self.path_management == self.FILE_TRANSFER or \
                self.path_management == self.SHARED_TRANSFER:
            Helper.transfer_input_files(self.wf_id, self.wf_ctrl)
        # Wait for the worklow to finish
        Helper.wait_workflow(self.wf_id, self.wf_ctrl)

        status = self.wf_ctrl.workflow_status(self.wf_id)
        self.assertTrue(status == constants.WORKFLOW_DONE,
                        "workflow status : %s. Expected : %s" %
                        (status, constants.WORKFLOW_DONE))
        # TODO : sometimes raises an error
        # because status = "workflow_in_progress"

        nb_failed_jobs = len(Helper.list_failed_jobs(
            self.wf_id,
            self.wf_ctrl))
        self.assertTrue(nb_failed_jobs == 0,
                        "nb failed jobs : %i. Expected : %i" %
                        (nb_failed_jobs, 0))

        nb_failed_aborted_jobs = len(Helper.list_failed_jobs(
            self.wf_id,
            self.wf_ctrl,
            include_aborted_jobs=True))
        self.assertTrue(nb_failed_aborted_jobs == 0,
                        "nb failed jobs including aborted : %i. Expected : %i"
                        % (nb_failed_aborted_jobs, 0))

        (jobs_info, transfers_info, workflow_status, workflow_queue,
            tmp_files) = self.wf_ctrl.workflow_elements_status(self.wf_id)

        for (job_id, tmp_status, queue, exit_info, dates) in jobs_info:
            job_list = self.wf_ctrl.jobs([job_id])
            job_name, job_command, job_submission_date = job_list[job_id]

            self.tested_job = job_id

            if exit_info[0] == constants.FINISHED_REGULARLY:
                # To check job standard out and standard err
                job_stdout_file = tempfile.NamedTemporaryFile(
                    prefix="job_soma_out_log_",
                    suffix=repr(job_id),
                    delete=False)
                job_stdout_file = job_stdout_file.name
                job_stderr_file = tempfile.NamedTemporaryFile(
                    prefix="job_soma_outerr_log_",
                    suffix=repr(job_id),
                    delete=False)
                job_stderr_file = job_stderr_file.name

                try:
                    self.wf_ctrl.retrieve_job_stdouterr(job_id,
                                                        job_stdout_file,
                                                        job_stderr_file)
                    # Test job stdout
                    if self.path_management == self.LOCAL_PATH:
                        isSame, msg = identical_files(
                            job_stdout_file,
                            self.wf_examples.lo_stdout_command_local)
                        self.assertTrue(isSame, msg)
                    else:
                        isSame, msg = identical_files(
                            job_stdout_file,
                            self.wf_examples.lo_stdout_command_remote)
                        self.assertTrue(isSame, msg)
                    # Test no stderr
                    self.assertTrue(os.stat(job_stderr_file).st_size == 0,
                                    "job stderr not empty : cf %s" %
                                    job_stderr_file)
                finally:
                    os.unlink(job_stdout_file)
                    os.unlink(job_stderr_file)

        del self.tested_job
Ejemplo n.º 10
0
    def test_result(self):
        if hasattr(self.wf_ctrl.scheduler_config, 'get_proc_nb'):
            n_iter = 5 * self.wf_ctrl.scheduler_config.get_proc_nb()
        else:
            n_iter = 5
        workflow = self.wf_examples.example_fake_pipelineT1(n_iter)
        self.wf_id = self.wf_ctrl.submit_workflow(workflow=workflow,
                                                  name=self.__class__.__name__)
        # Transfer input files if file transfer
        if self.path_management == self.FILE_TRANSFER or \
                self.path_management == self.SHARED_TRANSFER:
            Helper.transfer_input_files(self.wf_id, self.wf_ctrl)
        # Wait for the workflow to finish
        Helper.wait_workflow(self.wf_id, self.wf_ctrl)
        # Transfer output files if file transfer
        if self.path_management == self.FILE_TRANSFER or \
                self.path_management == self.SHARED_TRANSFER:
            Helper.transfer_output_files(self.wf_id, self.wf_ctrl)

        status = self.wf_ctrl.workflow_status(self.wf_id)
        self.assertTrue(status == constants.WORKFLOW_DONE)
        self.assertTrue(
            len(Helper.list_failed_jobs(self.wf_id, self.wf_ctrl)) == 0)
        self.assertTrue(
            len(
                Helper.list_failed_jobs(
                    self.wf_id, self.wf_ctrl, include_aborted_jobs=True)) == 0)

        (jobs_info, transfers_info, workflow_status, workflow_queue,
         tmp_files) = self.wf_ctrl.workflow_elements_status(self.wf_id)

        for (job_id, tmp_status, queue, exit_info, dates, dmraa_id) \
                in jobs_info:
            job_list = self.wf_ctrl.jobs([job_id])
            job_name, job_command, job_submission_date = job_list[job_id]

            self.tested_job = job_id

            if exit_info[0] == constants.FINISHED_REGULARLY:
                # To check job standard out and standard err
                job_stdout_file = tempfile.NamedTemporaryFile(
                    prefix="job_soma_out_log_",
                    suffix=repr(job_id),
                    delete=False)
                job_stdout_file = job_stdout_file.name
                job_stderr_file = tempfile.NamedTemporaryFile(
                    prefix="job_soma_outerr_log_",
                    suffix=repr(job_id),
                    delete=False)
                job_stderr_file = job_stderr_file.name

                try:
                    self.wf_ctrl.retrieve_job_stdouterr(
                        job_id, job_stdout_file, job_stderr_file)
                    # Test stdout
                    self.assertTrue(
                        os.stat(job_stdout_file).st_size == 0,
                        "job stdout not empty : cf %s" % job_stdout_file)
                    # Test no stderr
                    self.assertTrue(
                        os.stat(job_stderr_file).st_size == 0,
                        "job stderr not empty : cf %s" % job_stderr_file)
                finally:
                    if os.path.exists(job_stdout_file):
                        os.unlink(job_stdout_file)
                    if os.path.exists(job_stderr_file):
                        os.unlink(job_stderr_file)

            del self.tested_job
Ejemplo n.º 11
0
echo %s
""" % test_bash_script
    fileout.write(filecontent)
    fileout.close()
    os.chdir(cur_work_dir)

    job1 = Job(command=[u"touch", test_filepath],
               name="epac_job_test",
               working_directory=tmp_work_dir_path)
    job2 = Job(command=["%s/readfile" % cur_file_dir, test_bash_script],
               name="epac_job_test",
               working_directory=tmp_work_dir_path)

    soma_workflow = Workflow(jobs=[job1, job2])

    resource_id = socket.gethostname()
    controller = WorkflowController(resource_id, "", "")
    ## run soma-workflow
    ## =================
    wf_id = controller.submit_workflow(workflow=soma_workflow,
                                       name="epac workflow")
    Helper.wait_workflow(wf_id, controller)
    nb_failed_jobs = len(Helper.list_failed_jobs(wf_id, controller))
    if nb_failed_jobs > 0:
        raise ValueError("Soma-workflow error, cannot use working directory")

    if not os.path.isfile(os.path.join(tmp_work_dir_path, test_filepath)):
        raise ValueError("Soma-workflow cannot define working directory")
    else:
        print "OK for creating new file in working directory"
Ejemplo n.º 12
0
    def run_workflow(self, workflow, test_files=[], test_dyn_files={}):
        self.wf_id = self.wf_ctrl.submit_workflow(
            workflow=workflow,
            name=self.__class__.__name__)
        # Transfer input files if file transfer
        if self.path_management == self.FILE_TRANSFER or \
                self.path_management == self.SHARED_TRANSFER:
            Helper.transfer_input_files(self.wf_id, self.wf_ctrl)
        # Wait for the workflow to finish
        Helper.wait_workflow(self.wf_id, self.wf_ctrl)
        # Transfer output files if file transfer
        if self.path_management == self.FILE_TRANSFER or \
                self.path_management == self.SHARED_TRANSFER:
            Helper.transfer_output_files(self.wf_id, self.wf_ctrl)

        status = self.wf_ctrl.workflow_status(self.wf_id)
        self.assertTrue(status == constants.WORKFLOW_DONE,
                        "workflow status : %s. Expected : %s" %
                        (status, constants.WORKFLOW_DONE))

        failed_jobs = Helper.list_failed_jobs(self.wf_id, self.wf_ctrl)
        nb_failed_jobs = len(failed_jobs)
        if nb_failed_jobs != 0:
            self.print_jobs(failed_jobs, 'Failed jobs')

        self.assertTrue(nb_failed_jobs == 0,
                        "nb failed jobs : %i. Expected : %i" %
                        (nb_failed_jobs, 0))
        failed_aborted_jobs = Helper.list_failed_jobs(
            self.wf_id,
            self.wf_ctrl,
            include_aborted_jobs=True)
        nb_failed_aborted_jobs = len(failed_aborted_jobs)
        if nb_failed_aborted_jobs != 0:
            self.print_jobs(failed_aborted_jobs, 'Aborted jobs')
        self.assertTrue(nb_failed_aborted_jobs == 0,
                        "nb failed jobs including aborted : %i. Expected : %i"
                        % (nb_failed_aborted_jobs, 0))

        (jobs_info, transfers_info, workflow_status, workflow_queue,
            tmp_files) = self.wf_ctrl.workflow_elements_status(self.wf_id)

        dyn_out_params = {1: 'filePathOut2',
                          2: 'filePathOut'}
        dyn_out_params = {}

        for (job_id, tmp_status, queue, exit_info, dates, drmaa_id) \
                in jobs_info:
            job_list = self.wf_ctrl.jobs([job_id])
            job_name, job_command, job_submission_date = job_list[job_id]

            self.tested_job = job_id

            if exit_info[0] == constants.FINISHED_REGULARLY:
                # To check job standard out and standard err
                job_stdout_file = tempfile.NamedTemporaryFile(
                    prefix="job_soma_out_log_",
                    suffix=repr(job_id),
                    delete=False)
                job_stdout_file = job_stdout_file.name
                job_stderr_file = tempfile.NamedTemporaryFile(
                    prefix="job_soma_outerr_log_",
                    suffix=repr(job_id),
                    delete=False)
                job_stderr_file = job_stderr_file.name

                try:
                    self.wf_ctrl.retrieve_job_stdouterr(job_id,
                                                        job_stdout_file,
                                                        job_stderr_file)
                    if job_name.startswith('job1'):
                        # Test stdout
                        isSame, msg = identical_files(
                            job_stdout_file,
                            self.wf_examples.lo_stdout[1])
                        self.assertTrue(isSame, msg)
                        # Test no stderr
                        with open(job_stderr_file) as f:
                            msg = "job stderr not empty : cf %s\n" \
                                "stderr:\n---\n%s\n---" \
                                % (job_stderr_file, f.read())
                        self.assertTrue(os.stat(job_stderr_file).st_size == 0,
                                        msg)

                    if job_name in test_dyn_files:
                        out_params = self.wf_ctrl.get_job_output_params(job_id)
                        dyn_out_params[job_name] = out_params

                    # For unknown reason, it raises some errors
                    # http://stackoverflow.com/questions/10496758/unexpected-end-of-file-and-error-importing-function-definition-error-running
                    # isSame,	msg	= identical_files(job_stderr_file,self.wf_examples.lo_stderr[1])
                    # self.failUnless(isSame == True)

                finally:
                    os.unlink(job_stdout_file)
                    os.unlink(job_stderr_file)

        for out_file_num in test_files:
            # Test output files
            if self.path_management == self.LOCAL_PATH:
                out_file = self.wf_examples.lo_file[out_file_num]
            elif self.path_management == self.FILE_TRANSFER or \
                    self.path_management == self.SHARED_TRANSFER:
                out_file = self.wf_examples.tr_file[out_file_num].client_path

            isSame, msg = identical_files(
                self.wf_examples.lo_out_model_file[out_file_num], out_file)
            self.assertTrue(isSame, msg)

        for job_name, ref_out_params in six.iteritems(test_dyn_files):
            out_params = dyn_out_params[job_name]
            for param, file_num in six.iteritems(ref_out_params):
                isSame, msg = identical_files(
                    self.wf_examples.lo_out_model_file[file_num],
                    out_params[param])
                self.assertTrue(isSame, msg)


        del self.tested_job
Ejemplo n.º 13
0
    def test_result(self):
        workflow = self.wf_examples.example_multiple()
        self.wf_id = self.wf_ctrl.submit_workflow(workflow=workflow,
                                                  name=self.__class__.__name__)

        # Transfer input files if file transfer
        if self.path_management == self.FILE_TRANSFER or \
                self.path_management == self.SHARED_TRANSFER:
            Helper.transfer_input_files(self.wf_id, self.wf_ctrl)

        # Wait for the workflow to finish
        Helper.wait_workflow(self.wf_id, self.wf_ctrl)

        # Transfer output files if file transfer
        if self.path_management == self.FILE_TRANSFER or \
                self.path_management == self.SHARED_TRANSFER:
            Helper.transfer_output_files(self.wf_id, self.wf_ctrl)

        status = self.wf_ctrl.workflow_status(self.wf_id)
        self.assertTrue(
            status == constants.WORKFLOW_DONE,
            "workflow status : %s. Expected : %s" %
            (status, constants.WORKFLOW_DONE))
        nb_failed_jobs = len(Helper.list_failed_jobs(self.wf_id, self.wf_ctrl))
        self.assertTrue(
            nb_failed_jobs == 2,
            "nb failed jobs : %i. Expected : %i" % (nb_failed_jobs, 2))
        nb_failed_aborted_jobs = len(
            Helper.list_failed_jobs(self.wf_id,
                                    self.wf_ctrl,
                                    include_aborted_jobs=True))
        self.assertTrue(
            nb_failed_aborted_jobs == 6,
            "nb failed jobs including aborted : %i. Expected : %i" %
            (nb_failed_aborted_jobs, 6))

        (jobs_info, transfers_info, workflow_status, workflow_queue,
         tmp_files) = self.wf_ctrl.workflow_elements_status(self.wf_id)

        for (job_id, tmp_status, queue, exit_info, dates) in jobs_info:
            job_list = self.wf_ctrl.jobs([job_id])
            job_name, job_command, job_submission_date = job_list[job_id]

            self.tested_job = job_id

            if exit_info[0] == constants.FINISHED_REGULARLY:
                # To check job standard out and standard err
                job_stdout_file = tempfile.NamedTemporaryFile(
                    prefix="job_soma_out_log_",
                    suffix=repr(job_id),
                    delete=False)
                job_stdout_file = job_stdout_file.name
                job_stderr_file = tempfile.NamedTemporaryFile(
                    prefix="job_soma_outerr_log_",
                    suffix=repr(job_id),
                    delete=False)
                job_stderr_file = job_stderr_file.name

                try:
                    self.wf_ctrl.retrieve_job_stdouterr(
                        job_id, job_stdout_file, job_stderr_file)
                    if job_name == 'job1':
                        # Test stdout
                        isSame, msg = identical_files(
                            job_stdout_file, self.wf_examples.lo_stdout[1])
                        self.assertTrue(isSame, msg)
                        # Test no stderr
                        self.assertTrue(
                            os.stat(job_stderr_file).st_size == 0,
                            "job stderr not empty : cf %s" % job_stderr_file)
                        # Test output files
                        if self.path_management == self.LOCAL_PATH:
                            isSame, msg = identical_files(
                                self.wf_examples.lo_out_model_file[11],
                                self.wf_examples.lo_file[11])
                            self.assertTrue(isSame, msg)
                            isSame, msg = identical_files(
                                self.wf_examples.lo_out_model_file[12],
                                self.wf_examples.lo_file[12])
                            self.assertTrue(isSame, msg)
                        if self.path_management == self.FILE_TRANSFER or \
                                self.path_management == self.SHARED_TRANSFER:
                            isSame, msg = identical_files(
                                self.wf_examples.lo_out_model_file[11],
                                self.wf_examples.tr_file[11].client_path)
                            self.assertTrue(isSame, msg)
                            isSame, msg = identical_files(
                                self.wf_examples.lo_out_model_file[12],
                                self.wf_examples.tr_file[12].client_path)
                            self.assertTrue(isSame, msg)
                            # For unknown reason, it raises some errors
                            # http://stackoverflow.com/questions/10496758/unexpected-end-of-file-and-error-importing-function-definition-error-running
                            # isSame,	msg	= identical_files(job_stderr_file,self.wf_examples.lo_stderr[1])
                            # self.failUnless(isSame == True)

                    if job_name in ['job2', 'job3', 'job4']:
                        job_nb = int(job_name[3])
                        # Test stdout
                        isSame, msg = identical_files(
                            job_stdout_file,
                            self.wf_examples.lo_stdout[job_nb])

                        self.assertTrue(isSame, msg)
                        # Test no stderr
                        self.assertTrue(
                            os.stat(job_stderr_file).st_size == 0,
                            "job stderr not empty : cf %s" % job_stderr_file)
                        # Test output files
                        if self.path_management == self.LOCAL_PATH:
                            isSame, msg = identical_files(
                                self.wf_examples.lo_out_model_file[job_nb],
                                self.wf_examples.lo_file[job_nb])
                            self.assertTrue(isSame, msg)
                        if self.path_management == self.FILE_TRANSFER or \
                                self.path_management == self.SHARED_TRANSFER:
                            isSame, msg = identical_files(
                                self.wf_examples.lo_out_model_file[job_nb],
                                self.wf_examples.tr_file[job_nb].client_path)
                            self.assertTrue(isSame, msg)

                    if job_name in [
                            'job1 with exception', 'job3 with exception'
                    ]:
                        # Test stdout
                        isSame, msg = identical_files(
                            job_stdout_file,
                            self.wf_examples.lo_stdout_exception_model)
                        self.assertTrue(isSame)
                        # Test stderr
                        with open(job_stderr_file) as f:
                            lines = f.readlines()
                        print(lines)
                        isSame = (lines[-1] == 'Exception: Paf Boum '
                                  'Boum Bada Boum !!!\n')
                        self.assertTrue(isSame,
                                        "Job Exception: %s" % lines[-1])
                finally:
                    if os.path.exists(job_stdout_file):
                        os.unlink(job_stdout_file)
                    if os.path.exists(job_stderr_file):
                        os.unlink(job_stderr_file)
        del self.tested_job
Ejemplo n.º 14
0
    def test_result(self):
        if hasattr(self.wf_ctrl.scheduler_config, 'get_proc_nb'):
            n_iter = 10 * self.wf_ctrl.scheduler_config.get_proc_nb()
        else:
            n_iter = 100
        workflow = self.wf_examples.example_fake_pipelineT1(n_iter)
        self.wf_id = self.wf_ctrl.submit_workflow(
            workflow=workflow,
            name=self.__class__.__name__)
        # Transfer input files if file transfer
        if self.path_management == self.FILE_TRANSFER or \
                self.path_management == self.SHARED_TRANSFER:
            Helper.transfer_input_files(self.wf_id,
                                        self.wf_ctrl)
        # Wait for the workflow to finish
        Helper.wait_workflow(self.wf_id, self.wf_ctrl)
        # Transfer output files if file transfer
        if self.path_management == self.FILE_TRANSFER or \
                self.path_management == self.SHARED_TRANSFER:
            Helper.transfer_output_files(self.wf_id,
                                         self.wf_ctrl)

        status = self.wf_ctrl.workflow_status(self.wf_id)
        self.assertTrue(status == constants.WORKFLOW_DONE)
        self.assertTrue(len(Helper.list_failed_jobs(
                        self.wf_id,
                        self.wf_ctrl)) == 0)
        self.assertTrue(len(Helper.list_failed_jobs(
                        self.wf_id,
                        self.wf_ctrl,
                        include_aborted_jobs=True)) == 0)

        (jobs_info, transfers_info, workflow_status, workflow_queue,
            tmp_files) = self.wf_ctrl.workflow_elements_status(self.wf_id)

        for (job_id, tmp_status, queue, exit_info, dates) in jobs_info:
            job_list = self.wf_ctrl.jobs([job_id])
            job_name, job_command, job_submission_date = job_list[job_id]

            self.tested_job = job_id

            if exit_info[0] == constants.FINISHED_REGULARLY:
                # To check job standard out and standard err
                job_stdout_file = tempfile.NamedTemporaryFile(
                    prefix="job_soma_out_log_",
                    suffix=repr(job_id),
                    delete=False)
                job_stdout_file = job_stdout_file.name
                job_stderr_file = tempfile.NamedTemporaryFile(
                    prefix="job_soma_outerr_log_",
                    suffix=repr(job_id),
                    delete=False)
                job_stderr_file = job_stderr_file.name

                try:
                    self.wf_ctrl.retrieve_job_stdouterr(job_id,
                                                        job_stdout_file,
                                                        job_stderr_file)
                    # Test stdout
                    self.assertTrue(os.stat(job_stdout_file).st_size == 0,
                                    "job stdout not empty : cf %s" %
                                    job_stdout_file)
                    # Test no stderr
                    self.assertTrue(os.stat(job_stderr_file).st_size == 0,
                                    "job stderr not empty : cf %s" %
                                    job_stderr_file)
                finally:
                    if os.path.exists(job_stdout_file):
                        os.unlink(job_stdout_file)
                    if os.path.exists(job_stderr_file):
                        os.unlink(job_stderr_file)

        del self.tested_job
Ejemplo n.º 15
0
    def test_result(self):
        workflow = self.wf_examples.example_simple()
        self.wf_id = self.wf_ctrl.submit_workflow(
            workflow=workflow,
            name=self.__class__.__name__)
        # Transfer input files if file transfer
        if self.path_management == self.FILE_TRANSFER or \
                self.path_management == self.SHARED_TRANSFER:
            Helper.transfer_input_files(self.wf_id, self.wf_ctrl)
        # Wait for the workflow to finish
        Helper.wait_workflow(self.wf_id, self.wf_ctrl)
        # Transfer output files if file transfer
        if self.path_management == self.FILE_TRANSFER or \
                self.path_management == self.SHARED_TRANSFER:
            Helper.transfer_output_files(self.wf_id, self.wf_ctrl)

        status = self.wf_ctrl.workflow_status(self.wf_id)
        self.assertTrue(status == constants.WORKFLOW_DONE,
                        "workflow status : %s. Expected : %s" %
                        (status, constants.WORKFLOW_DONE))

        nb_failed_jobs = len(Helper.list_failed_jobs(self.wf_id,
                                                     self.wf_ctrl))
        self.assertTrue(nb_failed_jobs == 0,
                        "nb failed jobs : %i. Expected : %i" %
                        (nb_failed_jobs, 0))
        nb_failed_aborted_jobs = len(Helper.list_failed_jobs(
            self.wf_id,
            self.wf_ctrl,
            include_aborted_jobs=True))
        self.assertTrue(nb_failed_aborted_jobs == 0,
                        "nb failed jobs including aborted : %i. Expected : %i"
                        % (nb_failed_aborted_jobs, 0))

        (jobs_info, transfers_info, workflow_status, workflow_queue,
            tmp_files) = self.wf_ctrl.workflow_elements_status(self.wf_id)

        for (job_id, tmp_status, queue, exit_info, dates) in jobs_info:
            job_list = self.wf_ctrl.jobs([job_id])
            job_name, job_command, job_submission_date = job_list[job_id]

            self.tested_job = job_id

            if exit_info[0] == constants.FINISHED_REGULARLY:
                # To check job standard out and standard err
                job_stdout_file = tempfile.NamedTemporaryFile(
                    prefix="job_soma_out_log_",
                    suffix=repr(job_id),
                    delete=False)
                job_stdout_file = job_stdout_file.name
                job_stderr_file = tempfile.NamedTemporaryFile(
                    prefix="job_soma_outerr_log_",
                    suffix=repr(job_id),
                    delete=False)
                job_stderr_file = job_stderr_file.name

                try:
                    self.wf_ctrl.retrieve_job_stdouterr(job_id,
                                                        job_stdout_file,
                                                        job_stderr_file)
                    if job_name == 'job1':
                        # Test stdout
                        isSame, msg = identical_files(
                            job_stdout_file,
                            self.wf_examples.lo_stdout[1])
                        self.assertTrue(isSame, msg)
                        # Test no stderr
                        msg = "job stderr not empty : cf %s\n" \
                            "stderr:\n---\n%s\n---" \
                            % (job_stderr_file, open(job_stderr_file).read())
                        self.assertTrue(os.stat(job_stderr_file).st_size == 0,
                                        msg)
                        # Test output files
                        if self.path_management == self.LOCAL_PATH:
                            isSame, msg = identical_files(
                                self.wf_examples.lo_out_model_file[11],
                                self.wf_examples.lo_file[11])
                            self.assertTrue(isSame, msg)
                            isSame, msg = identical_files(
                                self.wf_examples.lo_out_model_file[12],
                                self.wf_examples.lo_file[12])
                            self.assertTrue(isSame, msg)
                        if self.path_management == self.FILE_TRANSFER or \
                                self.path_management == self.SHARED_TRANSFER:
                            isSame, msg = identical_files(
                                self.wf_examples.lo_out_model_file[11],
                                self.wf_examples.tr_file[11].client_path)
                            self.assertTrue(isSame, msg)
                            isSame, msg = identical_files(
                                self.wf_examples.lo_out_model_file[12],
                                self.wf_examples.tr_file[12].client_path)
                            self.assertTrue(isSame, msg)
                            # For unknown reason, it raises some errors
                            # http://stackoverflow.com/questions/10496758/unexpected-end-of-file-and-error-importing-function-definition-error-running
                            # isSame,	msg	= identical_files(job_stderr_file,self.wf_examples.lo_stderr[1])
                            # self.failUnless(isSame == True)

                    if job_name in ['job2', 'job3', 'job4']:
                        job_nb = int(job_name[3])
                        # Test stdout
                        isSame, msg = identical_files(
                            job_stdout_file,
                            self.wf_examples.lo_stdout[job_nb])
                        self.assertTrue(isSame, msg)
                        # Test no stderr
                        self.assertTrue(os.stat(job_stderr_file).st_size == 0,
                                        "job stderr not empty : cf %s" %
                                        job_stderr_file)
                        # Test output files
                        if self.path_management == self.LOCAL_PATH:
                            isSame, msg = identical_files(
                                self.wf_examples.lo_out_model_file[job_nb],
                                self.wf_examples.lo_file[job_nb])
                            self.assertTrue(isSame, msg)
                        if self.path_management == self.FILE_TRANSFER or \
                                self.path_management == self.SHARED_TRANSFER:
                            isSame, msg = identical_files(
                                self.wf_examples.lo_out_model_file[job_nb],
                                self.wf_examples.tr_file[job_nb].client_path)
                            self.assertTrue(isSame, msg)
                finally:
                    os.unlink(job_stdout_file)
                    os.unlink(job_stderr_file)

        del self.tested_job
    def test_result(self):
        workflow = self.wf_examples.example_special_transfer()
        self.wf_id = self.wf_ctrl.submit_workflow(
            workflow=workflow,
            name=self.__class__.__name__)

        # Transfer input files
        Helper.transfer_input_files(self.wf_id, self.wf_ctrl)
        # Wait for the worklow to finish
        Helper.wait_workflow(self.wf_id, self.wf_ctrl)
        status = self.wf_ctrl.workflow_status(self.wf_id)
        # Transfer output files
        Helper.transfer_output_files(self.wf_id, self.wf_ctrl)

        status = self.wf_ctrl.workflow_status(self.wf_id)
        self.assertTrue(status == constants.WORKFLOW_DONE,
                        "workflow status : %s. Expected : %s" %
                        (status, constants.WORKFLOW_DONE))
        nb_failed_jobs = len(Helper.list_failed_jobs(
            self.wf_id,
            self.wf_ctrl))
        self.assertTrue(nb_failed_jobs == 0,
                        "nb failed jobs : %i. Expected : %i" %
                        (nb_failed_jobs, 0))
        nb_failed_aborted_jobs = len(Helper.list_failed_jobs(
            self.wf_id,
            self.wf_ctrl,
            include_aborted_jobs=True))
        self.assertTrue(nb_failed_aborted_jobs == 0,
                        "nb failed jobs including aborted : %i. Expected : %i"
                        % (nb_failed_aborted_jobs, 0))

        (jobs_info, transfers_info, workflow_status, workflow_queue,
            tmp_files) = self.wf_ctrl.workflow_elements_status(self.wf_id)

        for (job_id, tmp_status, queue, exit_info, dates) in jobs_info:
            job_list = self.wf_ctrl.jobs([job_id])
            job_name, job_command, job_submission_date = job_list[job_id]

            self.tested_job = job_id

            if exit_info[0] == constants.FINISHED_REGULARLY:
                # To check job standard out and standard err
                job_stdout_file = tempfile.NamedTemporaryFile(
                    prefix="job_soma_out_log_",
                    suffix=repr(job_id),
                    delete=False)
                job_stdout_file = job_stdout_file.name
                job_stderr_file = tempfile.NamedTemporaryFile(
                    prefix="job_soma_outerr_log_",
                    suffix=repr(job_id),
                    delete=False)
                job_stderr_file = job_stderr_file.name

                try:
                    self.wf_ctrl.retrieve_job_stdouterr(job_id,
                                                        job_stdout_file,
                                                        job_stderr_file)
                    if job_name == 'dir_contents':
                        # Test job standard out
                        with open(job_stdout_file, 'r+') as f:
                            dir_contents = f.readlines()
                        dir_path_in = self.wf_examples.lo_in_dir
                        full_path_list = []
                        for element in os.listdir(dir_path_in):
                            full_path_list.append(os.path.join(dir_path_in,
                                                               element))
                        dir_contents_model = list_contents(full_path_list, [])
                        self.assertTrue(
                            sorted(dir_contents) == sorted(dir_contents_model))
                        # Test no stderr
                        self.assertTrue(os.stat(job_stderr_file).st_size == 0,
                                        "job stderr not empty : cf %s" %
                                        job_stderr_file)

                    if job_name == 'multi file format test':
                        # Test job standard out
                        isSame, msg = identical_files(
                            job_stdout_file,
                            self.wf_examples.lo_mff_stdout)
                        self.assertTrue(isSame, msg)
                        # Test no stderr
                        self.assertTrue(os.stat(job_stderr_file).st_size == 0,
                                        "job stderr not empty : cf %s" %
                                        job_stderr_file)
                finally:
                    os.unlink(job_stdout_file)
                    os.unlink(job_stderr_file)

        del self.tested_job
Ejemplo n.º 17
0
    def test_result(self):
        workflow = self.wf_examples.example_simple_exception2()
        self.wf_id = self.wf_ctrl.submit_workflow(workflow=workflow,
                                                  name=self.__class__.__name__,
                                                  queue='Cati_run4')
        # Transfer input files if file transfer
        if self.path_management == self.FILE_TRANSFER or \
                self.path_management == self.SHARED_TRANSFER:
            Helper.transfer_input_files(self.wf_id, self.wf_ctrl)
        # Wait for the workflow to finish
        Helper.wait_workflow(self.wf_id, self.wf_ctrl)
        # Transfer output files if file transfer
        if self.path_management == self.FILE_TRANSFER or \
                self.path_management == self.SHARED_TRANSFER:
            Helper.transfer_output_files(self.wf_id, self.wf_ctrl)

        status = self.wf_ctrl.workflow_status(self.wf_id)
        self.assertTrue(
            status == constants.WORKFLOW_DONE,
            "workflow status : %s. Expected : %s" %
            (status, constants.WORKFLOW_DONE))
        nb_failed_jobs = len(Helper.list_failed_jobs(self.wf_id, self.wf_ctrl))
        self.assertTrue(
            nb_failed_jobs == 1,
            "nb failed jobs : %i. Expected : %i" % (nb_failed_jobs, 1))
        nb_failed_aborted_jobs = len(
            Helper.list_failed_jobs(self.wf_id,
                                    self.wf_ctrl,
                                    include_aborted_jobs=True))
        self.assertTrue(
            nb_failed_aborted_jobs == 2,
            "nb failed jobs including aborted : %i. Expected : %i" %
            (nb_failed_aborted_jobs, 2))

        (jobs_info, transfers_info, workflow_status, workflow_queue,
         tmp_files) = self.wf_ctrl.workflow_elements_status(self.wf_id)

        for (job_id, tmp_status, queue, exit_info, dates, drmaa_id) \
                in jobs_info:
            job_list = self.wf_ctrl.jobs([job_id])
            job_name, job_command, job_submission_date = job_list[job_id]

            self.tested_job = job_id

            if exit_info[0] == constants.FINISHED_REGULARLY:
                # To check job standard out and standard err
                job_stdout_file = tempfile.NamedTemporaryFile(
                    prefix="job_soma_out_log_",
                    suffix=repr(job_id),
                    delete=False)
                job_stdout_file = job_stdout_file.name
                job_stderr_file = tempfile.NamedTemporaryFile(
                    prefix="job_soma_outerr_log_",
                    suffix=repr(job_id),
                    delete=False)
                job_stderr_file = job_stderr_file.name

                try:
                    self.wf_ctrl.retrieve_job_stdouterr(
                        job_id, job_stdout_file, job_stderr_file)
                    if job_name == 'job1':
                        # Test stdout
                        isSame, msg = identical_files(
                            job_stdout_file, self.wf_examples.lo_stdout[1])
                        self.assertTrue(isSame, msg)
                        # Test no stderr
                        self.assertTrue(
                            os.stat(job_stderr_file).st_size == 0,
                            "job stderr not empty : cf %s" % job_stderr_file)
                        # Test output files
                        if self.path_management == self.LOCAL_PATH:
                            isSame, msg = identical_files(
                                self.wf_examples.lo_out_model_file[11],
                                self.wf_examples.lo_file[11])
                            self.assertTrue(isSame, msg)
                            isSame, msg = identical_files(
                                self.wf_examples.lo_out_model_file[12],
                                self.wf_examples.lo_file[12])
                            self.assertTrue(isSame, msg)
                        if self.path_management == self.FILE_TRANSFER or \
                                self.path_management == self.SHARED_TRANSFER:
                            isSame, msg = identical_files(
                                self.wf_examples.lo_out_model_file[11],
                                self.wf_examples.tr_file[11].client_path)
                            self.assertTrue(isSame, msg)
                            isSame, msg = identical_files(
                                self.wf_examples.lo_out_model_file[12],
                                self.wf_examples.tr_file[12].client_path)
                            self.assertTrue(isSame, msg)

                    if job_name == 'job2':
                        # Test stdout
                        isSame, msg = identical_files(
                            job_stdout_file, self.wf_examples.lo_stdout[2])
                        self.assertTrue(isSame, msg)
                        # Test no stderr
                        self.assertTrue(
                            os.stat(job_stderr_file).st_size == 0,
                            "job stderr not empty : cf %s" % job_stderr_file)
                        # Test output files
                        if self.path_management == self.LOCAL_PATH:
                            isSame, msg = identical_files(
                                self.wf_examples.lo_out_model_file[2],
                                self.wf_examples.lo_file[2])
                            self.assertTrue(isSame, msg)
                        if self.path_management == self.FILE_TRANSFER or \
                                self.path_management == self.SHARED_TRANSFER:
                            isSame, msg = identical_files(
                                self.wf_examples.lo_out_model_file[2],
                                self.wf_examples.tr_file[2].client_path)
                            self.assertTrue(isSame, msg)

                    if job_name == 'job3 with exception':
                        # Test stdout
                        isSame, msg = identical_files(
                            job_stdout_file,
                            self.wf_examples.lo_stdout_exception_model)
                        self.assertTrue(isSame, msg)
                        # Test the last line of stderr
                        with open(job_stderr_file) as f:
                            lines = f.readlines()
                        expected_error = 'Exception: Paf Boum Boum Bada Boum !!!\n'
                        isSame = (lines[-1] == expected_error)
                        self.assertTrue(
                            isSame, "Job exception : %s. Expected : %s" %
                            (lines[-1], expected_error))
                finally:
                    os.unlink(job_stdout_file)
                    os.unlink(job_stderr_file)

        del self.tested_job
Ejemplo n.º 18
0
    def test_result(self):
        workflow = self.wf_examples.example_special_transfer()
        self.wf_id = self.wf_ctrl.submit_workflow(workflow=workflow, name=self.__class__.__name__)

        # Transfer input files
        Helper.transfer_input_files(self.wf_id, self.wf_ctrl)
        # Wait for the worklow to finish
        Helper.wait_workflow(self.wf_id, self.wf_ctrl)
        status = self.wf_ctrl.workflow_status(self.wf_id)
        # Transfer output files
        Helper.transfer_output_files(self.wf_id, self.wf_ctrl)

        status = self.wf_ctrl.workflow_status(self.wf_id)
        self.assertTrue(
            status == constants.WORKFLOW_DONE, "workflow status : %s. Expected : %s" % (status, constants.WORKFLOW_DONE)
        )
        nb_failed_jobs = len(Helper.list_failed_jobs(self.wf_id, self.wf_ctrl))
        self.assertTrue(nb_failed_jobs == 0, "nb failed jobs : %i. Expected : %i" % (nb_failed_jobs, 0))
        nb_failed_aborted_jobs = len(Helper.list_failed_jobs(self.wf_id, self.wf_ctrl, include_aborted_jobs=True))
        self.assertTrue(
            nb_failed_aborted_jobs == 0,
            "nb failed jobs including aborted : %i. Expected : %i" % (nb_failed_aborted_jobs, 0),
        )

        (jobs_info, transfers_info, workflow_status, workflow_queue, tmp_files) = self.wf_ctrl.workflow_elements_status(
            self.wf_id
        )

        for (job_id, tmp_status, queue, exit_info, dates) in jobs_info:
            job_list = self.wf_ctrl.jobs([job_id])
            job_name, job_command, job_submission_date = job_list[job_id]

            self.tested_job = job_id

            if exit_info[0] == constants.FINISHED_REGULARLY:
                # To check job standard out and standard err
                job_stdout_file = tempfile.NamedTemporaryFile(
                    prefix="job_soma_out_log_", suffix=repr(job_id), delete=False
                )
                job_stdout_file = job_stdout_file.name
                job_stderr_file = tempfile.NamedTemporaryFile(
                    prefix="job_soma_outerr_log_", suffix=repr(job_id), delete=False
                )
                job_stderr_file = job_stderr_file.name

                try:
                    self.wf_ctrl.retrieve_job_stdouterr(job_id, job_stdout_file, job_stderr_file)
                    if job_name == "dir_contents":
                        # Test job standard out
                        with open(job_stdout_file, "r+") as f:
                            dir_contents = f.readlines()
                        dir_path_in = self.wf_examples.lo_in_dir
                        full_path_list = []
                        for element in os.listdir(dir_path_in):
                            full_path_list.append(os.path.join(dir_path_in, element))
                        dir_contents_model = list_contents(full_path_list, [])
                        self.assertTrue(sorted(dir_contents) == sorted(dir_contents_model))
                        # Test no stderr
                        self.assertTrue(
                            os.stat(job_stderr_file).st_size == 0, "job stderr not empty : cf %s" % job_stderr_file
                        )

                    if job_name == "multi file format test":
                        # Test job standard out
                        isSame, msg = identical_files(job_stdout_file, self.wf_examples.lo_mff_stdout)
                        self.assertTrue(isSame, msg)
                        # Test no stderr
                        self.assertTrue(
                            os.stat(job_stderr_file).st_size == 0, "job stderr not empty : cf %s" % job_stderr_file
                        )
                finally:
                    os.unlink(job_stdout_file)
                    os.unlink(job_stderr_file)

        del self.tested_job
Ejemplo n.º 19
0
    def test_result(self):
        nb = 20
        time_sleep = 1

        workflow = self.wf_examples.example_n_jobs(nb=nb, time=time_sleep)
        self.wf_id = self.wf_ctrl.submit_workflow(workflow=workflow,
                                                  name=self.__class__.__name__)
        # Transfer input files if file transfer
        if self.path_management == self.FILE_TRANSFER or \
                self.path_management == self.SHARED_TRANSFER:
            Helper.transfer_input_files(self.wf_id, self.wf_ctrl)
        # Wait for the workflow to finish
        Helper.wait_workflow(self.wf_id, self.wf_ctrl)
        # Transfer output files if file transfer
        if self.path_management == self.FILE_TRANSFER or \
                self.path_management == self.SHARED_TRANSFER:
            Helper.transfer_output_files(self.wf_id, self.wf_ctrl)

        status = self.wf_ctrl.workflow_status(self.wf_id)
        self.assertTrue(
            status == constants.WORKFLOW_DONE,
            "workflow status : %s. Expected : %s" %
            (status, constants.WORKFLOW_DONE))
        nb_failed_jobs = len(Helper.list_failed_jobs(self.wf_id, self.wf_ctrl))
        self.assertTrue(
            nb_failed_jobs == 0,
            "nb failed jobs : %i. Expected : %i" % (nb_failed_jobs, 0))
        nb_failed_aborted_jobs = len(
            Helper.list_failed_jobs(self.wf_id,
                                    self.wf_ctrl,
                                    include_aborted_jobs=True))
        self.assertTrue(
            nb_failed_aborted_jobs == 0,
            "nb failed jobs including aborted : %i. Expected : %i" %
            (nb_failed_aborted_jobs, 0))

        (jobs_info, transfers_info, workflow_status, workflow_queue,
         tmp_files) = self.wf_ctrl.workflow_elements_status(self.wf_id)

        for (job_id, tmp_status, queue, exit_info, dates, drmaa_id) \
                in jobs_info:
            job_list = self.wf_ctrl.jobs([job_id])
            job_name, job_command, job_submission_date = job_list[job_id]

            self.tested_job = job_id

            if exit_info[0] == constants.FINISHED_REGULARLY:
                # To check job standard out and standard err
                job_stdout_file = tempfile.NamedTemporaryFile(
                    prefix="job_soma_out_log_",
                    suffix=repr(job_id),
                    delete=False)
                job_stdout_file = job_stdout_file.name
                job_stderr_file = tempfile.NamedTemporaryFile(
                    prefix="job_soma_outerr_log_",
                    suffix=repr(job_id),
                    delete=False)
                job_stderr_file = job_stderr_file.name

                try:
                    self.wf_ctrl.retrieve_job_stdouterr(
                        job_id, job_stdout_file, job_stderr_file)
                    # Test stdout
                    self.assertTrue(
                        os.stat(job_stdout_file).st_size == 0,
                        "job stdout not empty : file: %s, "
                        "contents:\n%s" %
                        (job_stdout_file, open(job_stdout_file).read()))
                    # Test no stderr
                    self.assertTrue(
                        os.stat(job_stderr_file).st_size == 0,
                        "job stderr not empty : file %s, "
                        "contents:\n%s" %
                        (job_stderr_file, open(job_stderr_file).read()))
                finally:
                    os.unlink(job_stdout_file)
                    os.unlink(job_stderr_file)

        del self.tested_job
Ejemplo n.º 20
0
echo %s
""" % test_bash_script
    fileout.write(filecontent)
    fileout.close()
    os.chdir(cur_work_dir)

    job1 = Job(command=[u"touch", test_filepath],
               name="epac_job_test",
               working_directory=tmp_work_dir_path)
    job2 = Job(command=["%s/readfile" % cur_file_dir, test_bash_script],
               name="epac_job_test",
               working_directory=tmp_work_dir_path)

    soma_workflow = Workflow(jobs=[job1, job2])

    resource_id = socket.gethostname()
    controller = WorkflowController(resource_id, "", "")
    ## run soma-workflow
    ## =================
    wf_id = controller.submit_workflow(workflow=soma_workflow,
                                       name="epac workflow")
    Helper.wait_workflow(wf_id, controller)
    nb_failed_jobs = len(Helper.list_failed_jobs(wf_id, controller))
    if nb_failed_jobs > 0:
        raise ValueError("Soma-workflow error, cannot use working directory")

    if not os.path.isfile(os.path.join(tmp_work_dir_path, test_filepath)):
        raise ValueError("Soma-workflow cannot define working directory")
    else:
        print("OK for creating new file in working directory")