def execute(self) -> Tuple[UUID, Dict, Optional[Exception]]: """ :return: Run ID, dict with new files, exception if there is any """ run_id = uuid4() runtime_context = RuntimeContext() runtime_context.outdir = self.file_manager.ROOT_DIRECTORY runtime_context.basedir = self.file_manager.ROOT_DIRECTORY runtime_context.default_stdin = subprocess.DEVNULL runtime_context.default_stdout = subprocess.DEVNULL runtime_context.default_stderr = subprocess.DEVNULL os.chdir(self.file_manager.ROOT_DIRECTORY) factory = Factory(runtime_context=runtime_context) executable = factory.make(self._workflow_path) data = {} for data_file in self._data_paths: with open(data_file) as f: new_data = yaml.load(f, Loader=yaml.Loader) data = {**new_data, **data} try: result: Dict = executable(**data) return run_id, result, None except Exception as e: traceback.print_exc(file=sys.stderr) return run_id, {}, e
def test_sequential_workflow(tmp_path: Path) -> None: test_file = "tests/wf/count-lines1-wf.cwl" executor = MultithreadedJobExecutor() runtime_context = RuntimeContext() runtime_context.outdir = str(tmp_path) runtime_context.select_resources = executor.select_resources factory = get_windows_safe_factory( executor=executor, runtime_context=runtime_context ) echo = factory.make(get_data(test_file)) file_contents = {"class": "File", "location": get_data("tests/wf/whale.txt")} assert echo(file1=file_contents) == {"count_output": 16}
def test_sequential_workflow(tmpdir): test_file = "tests/wf/count-lines1-wf.cwl" executor = MultithreadedJobExecutor() runtime_context = RuntimeContext() runtime_context.outdir = str(tmpdir) runtime_context.select_resources = executor.select_resources factory = get_windows_safe_factory( executor=executor, runtime_context=runtime_context) echo = factory.make(get_data(test_file)) file_contents = {"class": "File", "location": get_data("tests/wf/whale.txt")} assert echo(file1=file_contents) == {"count_output": 16}
def test_scatter_output_filenames(tmpdir: py.path.local) -> None: """If a scatter step produces identically named output then confirm that the final output is renamed correctly.""" cwd = tmpdir.chdir() rtc = RuntimeContext() rtc.outdir = str(cwd) factory = cwltool.factory.Factory(runtime_context=rtc) output_names = ["output.txt", "output.txt_2", "output.txt_3"] scatter_workflow = factory.make(get_data("tests/scatter_numbers.cwl")) result = scatter_workflow(range=3) assert isinstance(result, dict) assert "output" in result locations = sorted([element["location"] for element in result["output"]]) assert ( locations[0].endswith("output.txt") and locations[1].endswith("output.txt_2") and locations[2].endswith("output.txt_3") ), "Locations {} do not end with {}".format(locations, output_names)
def test_repo2cwl(self): output_dir = tempfile.mkdtemp() print(f'output directory:\t{output_dir}') repo2cwl = pkg_resources.load_entry_point('ipython2cwl', 'console_scripts', 'jupyter-repo2cwl') self.assertEqual(0, repo2cwl(['-o', output_dir, self.repo_like_dir])) self.assertListEqual( ['example1.cwl'], [f for f in os.listdir(output_dir) if not f.startswith('.')]) with open(os.path.join(output_dir, 'example1.cwl')) as f: print('workflow file') print(20 * '=') print(f.read()) print(20 * '=') runtime_context = RuntimeContext() runtime_context.outdir = output_dir runtime_context.basedir = output_dir runtime_context.default_stdout = DEVNULL runtime_context.default_stderr = DEVNULL fac = cwltool.factory.Factory(runtime_context=runtime_context) example1_tool = fac.make(os.path.join(output_dir, 'example1.cwl')) result = example1_tool(datafilename={ 'class': 'File', 'location': os.path.join(self.repo_like_dir, 'data.yaml') }, messages=["hello", "test", "!!!"]) with open(result['results_filename']['location'][7:]) as f: new_data = yaml.safe_load(f) self.assertDictEqual({ 'entry1': 2, 'entry2': 'foo', 'entry3': 'bar' }, new_data) with open(result['messages_outputs']['location'][7:]) as f: message = f.read() self.assertEqual("hello test !!!", message) shutil.rmtree(output_dir)