def test_ix_attach_to(self, pipeline_settings, example_pipelines, tmpdir): sparcc_params = setup_external( pipeline_settings, example_pipelines, module="network_inference", pipeline="network_inference.correlation.sparcc", ) json_params = setup_internal(pipeline_settings, example_pipelines) to_remove = [] for io_elem in json_params.input: if io_elem.datatype in {"interaction_table"}: to_remove.append(io_elem) for remove in to_remove: json_params.input.remove(remove) json_params.input.add( Input(datatype=remove.datatype, format=remove.format)) sparcc_process = Process(sparcc_params, profile="local", output_location=".") json_process = Process(json_params, profile="local", output_location=".") output_dir = pathlib.Path.cwd() / "tests/data" sparcc_process.update_location(str(output_dir), "input") sparcc_process.build(tmpdir) json_process.attach_to(sparcc_process) json_process.update_location(str(output_dir), "input") with pytest.raises(FileNotFoundError): json_process.build(tmpdir) sparcc_process.run() sparcc_process.wait() json_process.build(tmpdir) json_process.run() json_process.wait()
def test_attach_to(self, pipeline_settings, example_pipelines, tmpdir): previous_params = setup_internal( pipeline_settings, example_pipelines, module="otu_processing", pipeline="otu_processing.transform.normalize", ) previous_process = Process(previous_params, profile="local", output_location=".") curr_params = setup_internal( pipeline_settings, example_pipelines, module="otu_processing", pipeline="otu_processing.filter.group", ) curr_process = Process(curr_params, profile="local", output_location=".") output_dir = pathlib.Path.cwd() / "tests/data" previous_process.update_location(str(output_dir), "input") previous_process.build(tmpdir) curr_process.attach_to(previous_process) with pytest.raises(FileNotFoundError): curr_process.build(tmpdir) previous_process.run() previous_process.output curr_process.update_location(str(output_dir), "input") curr_process.build(tmpdir) curr_process.run()
def test_clean(self, pipeline_settings, example_pipelines, tmpdir): params = setup_internal( pipeline_settings, example_pipelines, module="otu_processing", pipeline="otu_processing.filter.group", ) process = Process(params, profile="local", output_location=".") process_dir = tmpdir.mkdir("test_process_clean") output_dir = pathlib.Path.cwd() / "tests/data" process.update_location(str(output_dir), "input") process.build(process_dir) cmd = process.cmd paths = [ p for p in str(cmd).split(" ") if not p.startswith("-") and "/" in p ] process.run() process.wait() for path in paths: assert pathlib.Path(path).exists() process.clean("all") for path in paths: assert not pathlib.Path(path).exists() process.build(process_dir) process.clean("work_dir") for path in paths: if "work" in path: assert not pathlib.Path(path).exists() else: if not path.endswith(".log"): assert pathlib.Path(path).exists()
def test_run(self, pipeline_settings, example_pipelines, tmpdir): params = setup_external( pipeline_settings, example_pipelines, module="network_inference", pipeline="network_inference.correlation.sparcc", ) process = Process(params, profile="local", output_location=".") process_dir = tmpdir.mkdir("test_process_run") output_dir = pathlib.Path.cwd() / "tests/data" process.update_location(str(output_dir), "input") process.build(process_dir) process.run() assert process.output assert not process.error process.log() for output in process.params.output: if "*" in str(output.location): str_loc = str(output.location) ind = str_loc.find("*") files = list(pathlib.Path(str_loc[:ind]).glob(str_loc[ind:])) assert len(files) > 0 else: assert output.location.exists()