Пример #1
0
    def setUpClass(cls):
        pipeline = REGISTERED_PIPELINES[cls.PB_PIPELINE_ID]
        log.debug(pipeline)

        cls.bindings = pipeline.all_bindings
        cls.EPOINTS_D = {
            k: get_temp_file(v)
            for k, v in cls.EPOINTS_NAMES.iteritems()
        }

        log.debug(pprint.pformat(cls.bindings, indent=4))
        log.debug(
            "Number of registered tasks {n}".format(n=len(REGISTERED_TASKS)))

        cls.bgraph = B.binding_strs_to_binding_graph(REGISTERED_TASKS,
                                                     cls.bindings)
        d = os.path.expanduser('~/scratch/tmp_pbsmrtpipe') if getpass.getuser(
        ) == 'mkocher' else None
        cls.output_dir = tempfile.mkdtemp(prefix='job_test_', dir=d)

        preset_record = IO.parse_pipeline_preset_xml(
            os.path.join(TEST_DATA_DIR, cls.PRESET_XML))
        cls.workflow_options = preset_record.to_workflow_level_opt()

        # leave this for now
        cls.envs = []
        cls.cluster_engine = C.load_installed_cluster_templates_by_name("sge")
Пример #2
0
    def test_hello_world_job(self):
        r = C.load_installed_cluster_templates_by_name('sge')
        log.debug(r)

        job_name = "int_job_hello"
        output_dir = get_temp_cluster_dir(job_name)

        cmd = "pbsmrtpipe --help"

        def _to_p(x_):
            return os.path.join(output_dir, x_)

        sh_script = _to_p('qsub_test.sh')

        with open(sh_script, 'w') as f:
            f.write(cmd + "\n")

        # qsub output
        stdout = _to_p('stdout')
        stderr = _to_p('stderr')

        for x in [stdout, stderr]:
            with open(x, 'w') as f:
                f.write("")

        log.info(sh_script)
        cmd = r.render("start",
                       sh_script,
                       'test_job_01',
                       stdout=stdout,
                       stderr=stderr,
                       nproc=1)
        log.debug("Running qsub command '{c}'".format(c=cmd))
        time_out = 60 * 5
        rcode, stdout, stderr, run_time = run_command(cmd,
                                                      None,
                                                      None,
                                                      time_out=time_out)
        log.debug((rcode, stdout, stderr, run_time))

        if rcode != 0:
            log.info(stdout)
            log.error(stderr)
            log.error("Failed Integration Job {i} with exit code {r}".format(
                i=job_name, r=rcode))
            if os.path.exists(stderr):
                with open(stderr, 'r') as f:
                    log.error(f.read())
        else:
            try:
                shutil.rmtree(output_dir)
            except Exception as e:
                log.warn("Unable to cleanup testdir {o}. {m}".format(
                    o=output_dir, m=e.message))

        self.assertEqual(rcode, 0, stderr)
Пример #3
0
    def test_hello_world_job(self):
        r = C.load_installed_cluster_templates_by_name('sge')
        log.debug(r)

        job_name = "int_job_hello"
        output_dir = get_temp_cluster_dir(job_name)

        cmd = "pbsmrtpipe --help"

        def _to_p(x_):
            return os.path.join(output_dir, x_)

        sh_script = _to_p('qsub_test.sh')

        with open(sh_script, 'w') as f:
            f.write(cmd + "\n")

        # qsub output
        stdout = _to_p('stdout')
        stderr = _to_p('stderr')

        for x in [stdout, stderr]:
            with open(x, 'w') as f:
                f.write("")

        log.info(sh_script)
        cmd = r.render("start", sh_script, 'test_job_01', stdout=stdout, stderr=stderr, nproc=1)
        log.debug("Running qsub command '{c}'".format(c=cmd))
        time_out = 60 * 5
        with tempfile.TemporaryFile() as stdout_tmp:
            with tempfile.TemporaryFile() as stderr_tmp:
                rcode, stdout, stderr, run_time = run_command(cmd, stdout_tmp, stderr_tmp, time_out=time_out)
        log.debug((rcode, stdout, stderr, run_time))

        if rcode != 0:
            log.info(stdout)
            log.error(stderr)
            log.error("Failed Integration Job {i} with exit code {r}".format(i=job_name, r=rcode))
            if os.path.exists(stderr):
                with open(stderr, 'r') as f:
                    log.error(f.read())
        else:
            try:
                shutil.rmtree(output_dir)
            except Exception as e:
                log.warn("Unable to cleanup testdir {o}. {m}".format(o=output_dir, m=e.message))

        self.assertEqual(rcode, 0, stderr)
Пример #4
0
    def setUpClass(cls):
        pipeline = REGISTERED_PIPELINES[cls.PB_PIPELINE_ID]
        log.debug(pipeline)

        cls.bindings = pipeline.all_bindings
        cls.EPOINTS_D = {k: get_temp_file(v) for k, v in cls.EPOINTS_NAMES.iteritems()}

        log.debug(pprint.pformat(cls.bindings, indent=4))
        log.debug("Number of registered tasks {n}".format(n=len(REGISTERED_TASKS)))

        cls.bgraph = B.binding_strs_to_binding_graph(REGISTERED_TASKS, cls.bindings)
        d = os.path.expanduser('~/scratch/tmp_pbsmrtpipe') if getpass.getuser() == 'mkocher' else None
        cls.output_dir = tempfile.mkdtemp(prefix='job_test_', dir=d)

        preset_record = IO.parse_pipeline_preset_xml(os.path.join(TEST_DATA_DIR, cls.PRESET_XML))
        cls.workflow_options = preset_record.to_workflow_level_opt()

        # leave this for now
        cls.envs = []
        cls.cluster_engine = C.load_installed_cluster_templates_by_name("sge")