def test_ix_attach_to(self, pipeline_settings, example_pipelines, tmpdir): sparcc_params = setup_external( pipeline_settings, example_pipelines, module="network_inference", pipeline="network_inference.correlation.sparcc", ) json_params = setup_internal(pipeline_settings, example_pipelines) to_remove = [] for io_elem in json_params.input: if io_elem.datatype in {"interaction_table"}: to_remove.append(io_elem) for remove in to_remove: json_params.input.remove(remove) json_params.input.add( Input(datatype=remove.datatype, format=remove.format)) sparcc_process = Process(sparcc_params, profile="local", output_location=".") json_process = Process(json_params, profile="local", output_location=".") output_dir = pathlib.Path.cwd() / "tests/data" sparcc_process.update_location(str(output_dir), "input") sparcc_process.build(tmpdir) json_process.attach_to(sparcc_process) json_process.update_location(str(output_dir), "input") with pytest.raises(FileNotFoundError): json_process.build(tmpdir) sparcc_process.run() sparcc_process.wait() json_process.build(tmpdir) json_process.run() json_process.wait()
def test_attach_to(self, pipeline_settings, example_pipelines, tmpdir): previous_params = setup_internal( pipeline_settings, example_pipelines, module="otu_processing", pipeline="otu_processing.transform.normalize", ) previous_process = Process(previous_params, profile="local", output_location=".") curr_params = setup_internal( pipeline_settings, example_pipelines, module="otu_processing", pipeline="otu_processing.filter.group", ) curr_process = Process(curr_params, profile="local", output_location=".") output_dir = pathlib.Path.cwd() / "tests/data" previous_process.update_location(str(output_dir), "input") previous_process.build(tmpdir) curr_process.attach_to(previous_process) with pytest.raises(FileNotFoundError): curr_process.build(tmpdir) previous_process.run() previous_process.output curr_process.update_location(str(output_dir), "input") curr_process.build(tmpdir) curr_process.run()