def _pack_idempotently(document): document_loader, workflowobj, uri = fetch_document( get_data(document)) document_loader, _, processobj, metadata, uri = validate_document( document_loader, workflowobj, uri, [], {}) # generate pack output dict packed = json.loads(print_pack(document_loader, processobj, uri, metadata)) document_loader, workflowobj, uri2 = fetch_document(packed) document_loader, _, processobj, metadata, uri2 = validate_document( document_loader, workflowobj, uri, [], {}) double_packed = json.loads(print_pack(document_loader, processobj, uri2, metadata)) assert packed == double_packed
def _pack_idempotently(self, document): document_loader, workflowobj, uri = fetch_document( get_data(document)) document_loader, avsc_names, processobj, metadata, uri = validate_document( document_loader, workflowobj, uri) # generate pack output dict packed = json.loads(print_pack(document_loader, processobj, uri, metadata)) document_loader, workflowobj, uri2 = fetch_document(packed) document_loader, avsc_names, processobj, metadata, uri2 = validate_document( document_loader, workflowobj, uri) double_packed = json.loads(print_pack(document_loader, processobj, uri2, metadata)) self.assertEqual(packed, double_packed)
def test_packed_workflow_execution(wf_path, job_path, namespaced, tmpdir): loadingContext = LoadingContext() loadingContext.resolver = tool_resolver loadingContext, workflowobj, uri = fetch_document( get_data(wf_path), loadingContext) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri)[0] packed = json.loads(print_pack(loadingContext.loader, processobj, uri, loadingContext.metadata)) assert not namespaced or "$namespaces" in packed wf_packed_handle, wf_packed_path = tempfile.mkstemp() with open(wf_packed_path, 'w') as temp_file: json.dump(packed, temp_file) normal_output = StringIO() packed_output = StringIO() normal_params = ['--outdir', str(tmpdir), get_data(wf_path), get_data(job_path)] packed_params = ['--outdir', str(tmpdir), '--debug', get_data(wf_packed_path), get_data(job_path)] assert main(normal_params, stdout=normal_output) == 0 assert main(packed_params, stdout=packed_output) == 0 assert json.loads(packed_output.getvalue()) == json.loads(normal_output.getvalue()) os.close(wf_packed_handle) os.remove(wf_packed_path)
def test_packed_workflow_execution(wf_path, job_path, namespaced): load_tool.loaders = {} document_loader, workflowobj, uri = fetch_document(get_data(wf_path), resolver=tool_resolver) document_loader, _, processobj, metadata, uri = validate_document( document_loader, workflowobj, uri) packed = json.loads(print_pack(document_loader, processobj, uri, metadata)) assert not namespaced or "$namespaces" in packed wf_packed_handle, wf_packed_path = tempfile.mkstemp() with open(wf_packed_path, 'w') as temp_file: json.dump(packed, temp_file) normal_output = StringIO() packed_output = StringIO() normal_params = [get_data(wf_path), get_data(job_path)] packed_params = ['--debug', get_data(wf_packed_path), get_data(job_path)] assert main(normal_params, stdout=normal_output) == 0 assert main(packed_params, stdout=packed_output) == 0 assert json.loads(packed_output.getvalue()) == json.loads( normal_output.getvalue()) os.close(wf_packed_handle) os.remove(wf_packed_path)
def get_file_dependencies_obj(cwl_obj, basedir): """Return a dictionary which contains the CWL workflow file dependencies. :param cwl_obj: A CWL tool or job which might contain file dependencies. :param basedir: Workflow base dir. :returns: A dictionary composed of valid CWL file dependencies. """ # Load de document loading_context = LoadingContext() document_loader, workflow_obj, uri = fetch_document( cwl_obj, resolver=loading_context.resolver, fetcher_constructor=loading_context.fetcher_constructor, ) in_memory_buffer = io.StringIO() if PY3 else io.BytesIO() # Get dependencies printdeps(workflow_obj, document_loader, in_memory_buffer, "primary", uri, basedir=basedir) file_dependencies_obj = yaml.load(in_memory_buffer.getvalue(), Loader=yaml.FullLoader) in_memory_buffer.close() return file_dependencies_obj
def test_pack_input_named_name() -> None: loadingContext, workflowobj, uri = fetch_document( get_data("tests/wf/trick_revsort.cwl")) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri) loader = loadingContext.loader assert loader loader.resolve_ref(uri)[0] with open(get_data("tests/wf/expect_trick_packed.cwl")) as packed_file: expect_packed = yaml.main.round_trip_load(packed_file) packed = cwltool.pack.pack(loadingContext, uri) adjustFileObjs( packed, partial(make_relative, os.path.abspath(get_data("tests/wf")))) adjustDirObjs( packed, partial(make_relative, os.path.abspath(get_data("tests/wf")))) assert "$schemas" in packed packed_schemas = packed["$schemas"] assert isinstance(packed_schemas, Sized) assert len(packed_schemas) == len(expect_packed["$schemas"]) del packed["$schemas"] del expect_packed["$schemas"] assert packed == expect_packed
def test_packed_workflow_execution(self): load_tool.loaders = {} test_wf = "tests/wf/count-lines1-wf.cwl" test_wf_job = "tests/wf/wc-job.json" document_loader, workflowobj, uri = fetch_document( get_data(test_wf), resolver=tool_resolver) document_loader, avsc_names, processobj, metadata, uri = validate_document( document_loader, workflowobj, uri) packed = json.loads( print_pack(document_loader, processobj, uri, metadata)) temp_packed_handle, temp_packed_path = tempfile.mkstemp() with open(temp_packed_path, 'w') as f: json.dump(packed, f) normal_output = StringIO() packed_output = StringIO() self.assertEquals( main( ['--debug', get_data(temp_packed_path), get_data(test_wf_job)], stdout=packed_output), 0) self.assertEquals( main([get_data(test_wf), get_data(test_wf_job)], stdout=normal_output), 0) self.assertEquals(json.loads(packed_output.getvalue()), json.loads(normal_output.getvalue())) os.close(temp_packed_handle) os.remove(temp_packed_path)
def test_packing(unpacked: str, expected: str) -> None: """Compare expected version reality with various workflows and --pack.""" loadingContext, workflowobj, uri = fetch_document(get_data(unpacked)) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri ) packed = json.loads(print_pack(loadingContext, uri)) context_dir = os.path.abspath(os.path.dirname(get_data(unpacked))) adjustFileObjs(packed, partial(make_relative, context_dir)) adjustDirObjs(packed, partial(make_relative, context_dir)) with open(get_data(expected)) as packed_file: expect_packed = json.load(packed_file) if "$schemas" in expect_packed: assert "$schemas" in packed packed_schemas = packed["$schemas"] assert isinstance(packed_schemas, Sized) assert len(packed_schemas) == len(expect_packed["$schemas"]) del packed["$schemas"] del expect_packed["$schemas"] assert packed == expect_packed
def test_packed_workflow_execution(wf_path, job_path, namespaced, tmpdir): load_tool.loaders = {} document_loader, workflowobj, uri = fetch_document( get_data(wf_path), resolver=tool_resolver) document_loader, _, processobj, metadata, uri = validate_document( document_loader, workflowobj, uri, [], {}) packed = json.loads(print_pack(document_loader, processobj, uri, metadata)) assert not namespaced or "$namespaces" in packed wf_packed_handle, wf_packed_path = tempfile.mkstemp() with open(wf_packed_path, 'w') as temp_file: json.dump(packed, temp_file) normal_output = StringIO() packed_output = StringIO() normal_params = ['--outdir', str(tmpdir), get_data(wf_path), get_data(job_path)] packed_params = ['--outdir', str(tmpdir), '--debug', get_data(wf_packed_path), get_data(job_path)] assert main(normal_params, stdout=normal_output) == 0 assert main(packed_params, stdout=packed_output) == 0 assert json.loads(packed_output.getvalue()) == json.loads(normal_output.getvalue()) os.close(wf_packed_handle) os.remove(wf_packed_path)
def test_preserving_namespaces(self): test_wf = "tests/wf/formattest.cwl" test_wf_job = "tests/wf/formattest-job.json" document_loader, workflowobj, uri = fetch_document(get_data(test_wf)) document_loader, avsc_names, processobj, metadata, uri = validate_document( document_loader, workflowobj, uri) packed = json.loads( print_pack(document_loader, processobj, uri, metadata)) assert "$namespaces" in packed temp_packed_handle, temp_packed_path = tempfile.mkstemp() with open(temp_packed_path, 'w') as f: json.dump(packed, f) normal_output = StringIO() packed_output = StringIO() self.assertEquals( main( ['--debug', get_data(temp_packed_path), get_data(test_wf_job)], stdout=packed_output), 0) self.assertEquals( main([get_data(test_wf), get_data(test_wf_job)], stdout=normal_output), 0) self.assertEquals(json.loads(packed_output.getvalue()), json.loads(normal_output.getvalue())) os.close(temp_packed_handle) os.remove(temp_packed_path)
def fetch_workflow(remote_storage, local_input_dir, remote_workflow_file, workflow_file='workflow.cwl'): logging.warning('Downloading cwl') local_workflow_file = local_input_dir + '/' + workflow_file remote_storage.download(remote_workflow_file, local_workflow_file) logging.warning('Validating cwl') document_loader, workflowobj, uri = fetch_document(local_workflow_file) validate_document(document_loader, workflowobj, uri) return local_workflow_file
def test_default_path(self): document_loader, workflowobj, uri = fetch_document( get_data("tests/wf/default_path.cwl")) document_loader, avsc_names, processobj, metadata, uri = validate_document( document_loader, workflowobj, uri) self.assertIsInstance(document_loader,Loader) self.assertIn("cwlVersion",processobj)
def _pack_idempotently(document): loadingContext, workflowobj, uri = fetch_document( get_data(document)) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri)[0] # generate pack output dict packed = json.loads(print_pack(loadingContext.loader, processobj, uri, loadingContext.metadata)) loadingContext, workflowobj, uri2 = fetch_document(packed) loadingContext.do_update = False loadingContext, uri2 = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri2)[0] double_packed = json.loads(print_pack(loadingContext.loader, processobj, uri2, loadingContext.metadata)) assert packed == double_packed
def test_pack_rewrites(self): load_tool.loaders = {} document_loader, workflowobj, uri = fetch_document(get_data("tests/wf/default-wf5.cwl")) document_loader, avsc_names, processobj, metadata, uri = validate_document( document_loader, workflowobj, uri) rewrites = {} packed = cwltool.pack.pack(document_loader, processobj, uri, metadata, rewrite_out=rewrites) self.assertEqual(6, len(rewrites))
def test_default_path(self): """Testing that error is not raised when default path is not present""" document_loader, workflowobj, uri = fetch_document( get_data("tests/wf/default_path.cwl")) document_loader, avsc_names, processobj, metadata, uri = validate_document( document_loader, workflowobj, uri) self.assertIsInstance(document_loader, Loader) self.assertIn("cwlVersion", processobj)
def test_default_path(): """Error is not raised when default path is not present.""" loadingContext, workflowobj, uri = fetch_document( get_data("tests/wf/default_path.cwl")) loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri)[0] assert "cwlVersion" in processobj
def arvados_job_spec(self, dry_run=False, pull_image=True, **kwargs): """Create an Arvados job specification for this workflow. The returned dict can be used to create a job (i.e., passed as the +body+ argument to jobs().create()), or as a component in a pipeline template or pipeline instance. """ self.upload_docker(self.tool) workflowfiles = set() jobfiles = set() workflowfiles.add(self.tool.tool["id"]) self.name = os.path.basename(self.tool.tool["id"]) def visitFiles(files, path): files.add(path) return path document_loader, workflowobj, uri = fetch_document(self.tool.tool["id"]) def loadref(b, u): return document_loader.fetch(urlparse.urljoin(b, u)) sc = scandeps(uri, workflowobj, set(("$import", "run")), set(("$include", "$schemas", "path")), loadref) adjustFiles(sc, partial(visitFiles, workflowfiles)) adjustFiles(self.job_order, partial(visitFiles, jobfiles)) workflowmapper = ArvPathMapper(self.arvrunner, workflowfiles, "", "%s", "%s/%s", name=self.name, **kwargs) jobmapper = ArvPathMapper(self.arvrunner, jobfiles, "", "%s", "%s/%s", name=os.path.basename(self.job_order.get("id", "#")), **kwargs) adjustFiles(self.job_order, lambda p: jobmapper.mapper(p)[1]) if "id" in self.job_order: del self.job_order["id"] self.job_order["cwl:tool"] = workflowmapper.mapper(self.tool.tool["id"])[1] return { "script": "cwl-runner", "script_version": "master", "repository": "arvados", "script_parameters": self.job_order, "runtime_constraints": { "docker_image": "arvados/jobs" } }
def _pack_idempotently(document): loadingContext, workflowobj, uri = fetch_document( get_data(document)) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri)[0] # generate pack output dict packed = json.loads(print_pack(loadingContext.loader, processobj, uri, loadingContext.metadata)) loadingContext, workflowobj, uri2 = fetch_document(packed) loadingContext.do_update = False loadingContext, uri2 = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri)[0] double_packed = json.loads(print_pack(loadingContext.loader, processobj, uri2, loadingContext.metadata)) assert packed == double_packed
def load_cwl(fname): """Load and validate CWL file using cwltool """ # Fetching, preprocessing and validating cwl (document_loader, workflowobj, uri) = fetch_document(fname) (document_loader, _, processobj, metadata, uri) = \ validate_document(document_loader, workflowobj, uri) return document_loader, processobj, metadata, uri
def test_default_path(): """Testing that error is not raised when default path is not present""" document_loader, workflowobj, uri = fetch_document( get_data("tests/wf/default_path.cwl")) document_loader, _, processobj, _, uri = validate_document( document_loader, workflowobj, uri, [], {}) assert isinstance(document_loader, Loader) assert "cwlVersion" in processobj
def test_default_path(): """Testing that error is not raised when default path is not present""" loadingContext, workflowobj, uri = fetch_document( get_data("tests/wf/default_path.cwl")) loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri)[0] assert "cwlVersion" in processobj
def arvados_job_spec(self, *args, **kwargs): self.upload_docker(self.tool) workflowfiles = [] jobfiles = [] workflowfiles.append({"class":"File", "location": self.tool.tool["id"]}) self.name = os.path.basename(self.tool.tool["id"]) def visitFiles(files, path): files.append(path) document_loader, workflowobj, uri = fetch_document(self.tool.tool["id"]) loaded = set() def loadref(b, u): joined = urlparse.urljoin(b, u) if joined not in loaded: loaded.add(joined) return document_loader.fetch(urlparse.urljoin(b, u)) else: return {} sc = scandeps(uri, workflowobj, set(("$import", "run")), set(("$include", "$schemas", "path", "location")), loadref) adjustFileObjs(sc, partial(visitFiles, workflowfiles)) adjustFileObjs(self.job_order, partial(visitFiles, jobfiles)) adjustDirObjs(sc, partial(visitFiles, workflowfiles)) adjustDirObjs(self.job_order, partial(visitFiles, jobfiles)) normalizeFilesDirs(jobfiles) normalizeFilesDirs(workflowfiles) keepprefix = kwargs.get("keepprefix", "") workflowmapper = ArvPathMapper(self.arvrunner, workflowfiles, "", keepprefix+"%s", keepprefix+"%s/%s", name=self.name, **kwargs) jobmapper = ArvPathMapper(self.arvrunner, jobfiles, "", keepprefix+"%s", keepprefix+"%s/%s", name=os.path.basename(self.job_order.get("id", "#")), **kwargs) def setloc(p): p["location"] = jobmapper.mapper(p["location"])[1] adjustFileObjs(self.job_order, setloc) adjustDirObjs(self.job_order, setloc) if "id" in self.job_order: del self.job_order["id"] return workflowmapper
def test_pack_single_tool(): loadingContext, workflowobj, uri = fetch_document( get_data("tests/wf/formattest.cwl")) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri)[0] packed = cwltool.pack.pack(loadingContext.loader, processobj, uri, loadingContext.metadata) assert "$schemas" in packed
def test_pack(self): self.maxDiff = None document_loader, workflowobj, uri = fetch_document("tests/wf/revsort.cwl") document_loader, avsc_names, processobj, metadata, uri = validate_document( document_loader, workflowobj, uri) packed = cwltool.pack.pack(document_loader, processobj, uri, metadata) with open("tests/wf/expect_packed.cwl") as f: expect_packed = json.load(f) self.assertEqual(expect_packed, packed)
def test_pack_single_tool(): load_tool.loaders = {} document_loader, workflowobj, uri = fetch_document( get_data("tests/wf/formattest.cwl")) document_loader, _, processobj, metadata, uri = validate_document( document_loader, workflowobj, uri, [], {}) packed = cwltool.pack.pack(document_loader, processobj, uri, metadata) assert "$schemas" in packed
def pack_cwl(cwl_path): # cwltool needs to be imported on demand since # repeatedly calling functions on a document named # with same name caused errors. from cwltool.load_tool import fetch_document from cwltool.main import print_pack cwltool_version = get_distribution("cwltool").version if StrictVersion(cwltool_version) > StrictVersion("1.0.20181201184214"): from cwltool.load_tool import resolve_and_validate_document loadingContext, workflowobj, uri = fetch_document(cwl_path) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document(loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri)[0] packed_cwl = json.loads(print_pack(loadingContext.loader, processobj, uri, loadingContext.metadata)) else: from cwltool.load_tool import validate_document document_loader, workflowobj, uri = fetch_document(cwl_path) document_loader, _, processobj, metadata, uri = validate_document(document_loader, workflowobj, uri, [], {}) packed_cwl = json.loads(print_pack(document_loader, processobj, uri, metadata)) return packed_cwl
def test_pack_rewrites(): load_tool.loaders = {} rewrites = {} document_loader, workflowobj, uri = fetch_document(get_data("tests/wf/default-wf5.cwl")) document_loader, _, processobj, metadata, uri = validate_document( document_loader, workflowobj, uri, [], {}) cwltool.pack.pack(document_loader, processobj, uri, metadata, rewrite_out=rewrites) assert len(rewrites) == 6
def cwl_graph_generate(cwl_path: str): if cwl_path[:5] != "file:": cwl_path = f"file://{path.abspath(cwl_path)}" document_loader, workflowobj, uri = fetch_document(cwl_path) document_loader, avsc_names, processobj, metadata, uri = validate_document( document_loader, workflowobj, uri, strict=False, preprocess_only=True) loadingContext = LoadingContext() tool = make_tool(document_loader, avsc_names, metadata, uri, loadingContext) cwl_viewer_dot(tool)
def test_pack_rewrites(): rewrites = {} loadingContext, workflowobj, uri = fetch_document(get_data("tests/wf/default-wf5.cwl")) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri)[0] cwltool.pack.pack(loadingContext.loader, processobj, uri, loadingContext.metadata, rewrite_out=rewrites) assert len(rewrites) == 6
def validate_cwl(cwl: str, cmd: Command = None, explore: bool = True): parsed = yaml.load(cwl) with tempfile.TemporaryDirectory() as tmpdir: tmpdir = Path(tmpdir) tmpfile = tmpdir / parsed["id"] tmpfile.write_text(cwl) loading_context, workflowobj, uri = fetch_document(str(tmpfile)) resolve_and_validate_document(loading_context, workflowobj, uri) if cmd: assert len( workflowobj["inputs"]) == len(cmd.positional) + len(cmd.named)
def test_pack_missing_cwlVersion(cwl_path): """Test to ensure the generated pack output is not missing the `cwlVersion` in case of single tool workflow and single step workflow""" # Testing single tool workflow document_loader, workflowobj, uri = fetch_document(get_data(cwl_path)) document_loader, _, processobj, metadata, uri = validate_document( document_loader, workflowobj, uri, [], {}) # generate pack output dict packed = json.loads(print_pack(document_loader, processobj, uri, metadata)) assert packed["cwlVersion"] == 'v1.0'
def test_pack_missing_cwlVersion(cwl_path): """Ensure the generated pack output is not missing the `cwlVersion` in case of single tool workflow and single step workflow.""" # Testing single tool workflow loadingContext, workflowobj, uri = fetch_document(get_data(cwl_path)) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri)[0] # generate pack output dict packed = json.loads(print_pack(loadingContext.loader, processobj, uri, loadingContext.metadata)) assert packed["cwlVersion"] == 'v1.0'
def test_pack(self): self.maxDiff = None document_loader, workflowobj, uri = fetch_document("tests/wf/revsort.cwl") document_loader, avsc_names, processobj, metadata, uri = validate_document( document_loader, workflowobj, uri) packed = cwltool.pack.pack(document_loader, processobj, uri, metadata) with open("tests/wf/expect_packed.cwl") as f: expect_packed = json.load(f) adjustFileObjs(packed, partial(makeRelative, os.path.abspath("tests/wf"))) adjustDirObjs(packed, partial(makeRelative, os.path.abspath("tests/wf"))) self.assertEqual(expect_packed, packed)
def test_pack_missing_cwlVersion(self): """Test to ensure the generated pack output is not missing the `cwlVersion` in case of single tool workflow and single step workflow""" # Testing single tool workflow document_loader, workflowobj, uri = fetch_document( get_data("tests/wf/hello_single_tool.cwl")) document_loader, _, processobj, metadata, uri = validate_document( document_loader, workflowobj, uri) # generate pack output dict packed = json.loads(print_pack(document_loader, processobj, uri, metadata)) self.assertEqual('v1.0', packed["cwlVersion"]) # Testing single step workflow document_loader, workflowobj, uri = fetch_document( get_data("tests/wf/hello-workflow.cwl")) document_loader, _, processobj, metadata, uri = validate_document( document_loader, workflowobj, uri) # generate pack output dict packed = json.loads(print_pack(document_loader, processobj, uri, metadata)) self.assertEqual('v1.0', packed["cwlVersion"])
def test_pack_fragment() -> None: with open(get_data("tests/wf/scatter2_subwf.cwl")) as packed_file: expect_packed = yaml.main.safe_load(packed_file) loadingContext, workflowobj, uri = fetch_document(get_data("tests/wf/scatter2.cwl")) packed = cwltool.pack.pack(loadingContext, uri + "#scatterstep/mysub") adjustFileObjs( packed, partial(make_relative, os.path.abspath(get_data("tests/wf"))) ) adjustDirObjs(packed, partial(make_relative, os.path.abspath(get_data("tests/wf")))) assert json.dumps(packed, sort_keys=True, indent=2) == json.dumps( expect_packed, sort_keys=True, indent=2 )
def test_pack_missing_cwlVersion(self): """Test to ensure the generated pack output is not missing the `cwlVersion` in case of single tool workflow and single step workflow""" # Testing single tool workflow document_loader, workflowobj, uri = fetch_document( get_data("tests/wf/hello_single_tool.cwl")) document_loader, avsc_names, processobj, metadata, uri = validate_document( document_loader, workflowobj, uri) # generate pack output dict packed = json.loads(print_pack(document_loader, processobj, uri, metadata)) self.assertEqual('v1.0', packed["cwlVersion"]) # Testing single step workflow document_loader, workflowobj, uri = fetch_document( get_data("tests/wf/hello-workflow.cwl")) document_loader, avsc_names, processobj, metadata, uri = validate_document( document_loader, workflowobj, uri) # generate pack output dict packed = json.loads(print_pack(document_loader, processobj, uri, metadata)) self.assertEqual('v1.0', packed["cwlVersion"])
def test_pack_rewrites(self): load_tool.loaders = {} document_loader, workflowobj, uri = fetch_document( get_data("tests/wf/default-wf5.cwl")) document_loader, avsc_names, processobj, metadata, uri = validate_document( document_loader, workflowobj, uri) rewrites = {} packed = cwltool.pack.pack(document_loader, processobj, uri, metadata, rewrite_out=rewrites) self.assertEqual(6, len(rewrites))
def validate_cwl_doc(cwl_doc): """ This is adapted from cwltool.main.main and avoids the unnecessary stuff by using cwltool.main.main directly. :param cwl_doc_path: :return: """ if isinstance(cwl_doc, (Path, str)): # Can also be CWLObjectType cwl_doc = str(cwl_doc) if not (urlparse(cwl_doc)[0] and urlparse(cwl_doc)[0] in ['http', 'https', 'file']): cwl_doc = file_uri(os.path.abspath(cwl_doc)) loading_context, workflow_object, uri = fetch_document(cwl_doc) resolve_and_validate_document(loading_context, workflow_object, uri) return
def _pack_idempotently(document: str) -> None: loadingContext, workflowobj, uri = fetch_document(get_data(document)) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri ) loader = loadingContext.loader assert loader loader.resolve_ref(uri)[0] # generate pack output dict packed_text = print_pack(loadingContext, uri) packed = json.loads(packed_text) tmp = NamedTemporaryFile(mode="w", delete=False) try: tmp.write(packed_text) tmp.flush() tmp.close() loadingContext, workflowobj, uri2 = fetch_document(tmp.name) loadingContext.do_update = False loadingContext, uri2 = resolve_and_validate_document( loadingContext, workflowobj, uri2 ) loader2 = loadingContext.loader assert loader2 loader2.resolve_ref(uri2)[0] # generate pack output dict packed_text = print_pack(loadingContext, uri2) double_packed = json.loads(packed_text) finally: os.remove(tmp.name) assert uri != uri2 assert packed == double_packed
def test_pack_missing_cwlVersion(cwl_path): """Test to ensure the generated pack output is not missing the `cwlVersion` in case of single tool workflow and single step workflow""" # Testing single tool workflow loadingContext, workflowobj, uri = fetch_document(get_data(cwl_path)) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri)[0] # generate pack output dict packed = json.loads(print_pack(loadingContext.loader, processobj, uri, loadingContext.metadata)) assert packed["cwlVersion"] == 'v1.0'
def __init__(self, fname, abspath=True, start=os.curdir): if abspath: self.run = os.path.abspath(fname) else: self.run = os.path.relpath(fname, start) bn = os.path.basename(fname) self.name = os.path.splitext(bn)[0] self.python_name = python_name(self.name) self.step_inputs = {} self.input_names = [] self.input_types = {} self.optional_input_names = [] self.optional_input_types = {} self.output_names = [] self.step_outputs = {} self.is_workflow = False self.is_scattered = False self.scattered_inputs = [] # Fetching, preprocessing and validating cwl (document_loader, workflowobj, uri) = fetch_document(fname) (document_loader, avsc_names, processobj, metadata, uri) = validate_document(document_loader, workflowobj, uri) s = processobj valid_classes = ('CommandLineTool', 'Workflow', 'ExpressionTool') if 'class' in s and s['class'] in valid_classes: self.is_workflow = s['class'] == 'Workflow' for inp in s['inputs']: # Due to preprocessing of cwltool the id has become an absolute iri, # for ease of use we keep only the fragment short_id = iri2fragment(inp['id']) if self._input_optional(inp): self.optional_input_names.append(short_id) self.optional_input_types[short_id] = inp['type'] else: self.input_names.append(short_id) self.input_types[short_id] = inp['type'] for o in s['outputs']: short_id = iri2fragment(o['id']) self.output_names.append(short_id) self.step_outputs[short_id] = o['type'] else: msg = '"{}" is a unsupported' raise NotImplementedError(msg.format(self.name))
def test_pack(self): self.maxDiff = None document_loader, workflowobj, uri = fetch_document( "tests/wf/revsort.cwl") document_loader, avsc_names, processobj, metadata, uri = validate_document( document_loader, workflowobj, uri) packed = cwltool.pack.pack(document_loader, processobj, uri, metadata) with open("tests/wf/expect_packed.cwl") as f: expect_packed = json.load(f) adjustFileObjs(packed, partial(makeRelative, os.path.abspath("tests/wf"))) adjustDirObjs(packed, partial(makeRelative, os.path.abspath("tests/wf"))) self.assertEqual(expect_packed, packed)
def test_pack(self): load_tool.loaders = {} document_loader, workflowobj, uri = fetch_document( get_data("tests/wf/revsort.cwl")) document_loader, avsc_names, processobj, metadata, uri = validate_document( document_loader, workflowobj, uri) packed = cwltool.pack.pack(document_loader, processobj, uri, metadata) with open(get_data("tests/wf/expect_packed.cwl")) as f: expect_packed = json.load(f) adjustFileObjs(packed, partial(make_relative, os.path.abspath(get_data("tests/wf")))) adjustDirObjs(packed, partial(make_relative, os.path.abspath(get_data("tests/wf")))) self.assertIn("$schemas", packed) del packed["$schemas"] del expect_packed["$schemas"] self.assertEqual(expect_packed, packed)
def test_pack(): loadingContext, workflowobj, uri = fetch_document(get_data("tests/wf/revsort.cwl")) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri)[0] with open(get_data("tests/wf/expect_packed.cwl")) as packed_file: expect_packed = json.load(packed_file) packed = cwltool.pack.pack(loadingContext.loader, processobj, uri, loadingContext.metadata) adjustFileObjs(packed, partial(make_relative, os.path.abspath(get_data("tests/wf")))) adjustDirObjs(packed, partial(make_relative, os.path.abspath(get_data("tests/wf")))) assert "$schemas" in packed assert len(packed["$schemas"]) == len(expect_packed["$schemas"]) del packed["$schemas"] del expect_packed["$schemas"] assert packed == expect_packed
def test_pack(): load_tool.loaders = {} document_loader, workflowobj, uri = fetch_document(get_data("tests/wf/revsort.cwl")) document_loader, _, processobj, metadata, uri = validate_document( document_loader, workflowobj, uri, [], {}) with open(get_data("tests/wf/expect_packed.cwl")) as packed_file: expect_packed = json.load(packed_file) packed = cwltool.pack.pack(document_loader, processobj, uri, metadata) adjustFileObjs(packed, partial(make_relative, os.path.abspath(get_data("tests/wf")))) adjustDirObjs(packed, partial(make_relative, os.path.abspath(get_data("tests/wf")))) assert "$schemas" in packed assert len(packed["$schemas"]) == len(expect_packed["$schemas"]) del packed["$schemas"] del expect_packed["$schemas"] assert packed == expect_packed
def test_packed_workflow_execution(self): load_tool.loaders = {} test_wf = "tests/wf/count-lines1-wf.cwl" test_wf_job = "tests/wf/wc-job.json" document_loader, workflowobj, uri = fetch_document( get_data(test_wf), resolver=tool_resolver) document_loader, avsc_names, processobj, metadata, uri = validate_document( document_loader, workflowobj, uri) packed = json.loads(print_pack(document_loader, processobj, uri, metadata)) temp_packed_path = tempfile.mkstemp()[1] with open(temp_packed_path, 'w') as f: json.dump(packed, f) normal_output = StringIO() packed_output = StringIO() self.assertEquals(main(['--debug', get_data(temp_packed_path), get_data(test_wf_job)], stdout=packed_output), 0) self.assertEquals(main([get_data(test_wf), get_data(test_wf_job)], stdout=normal_output), 0) self.assertEquals(json.loads(packed_output.getvalue()), json.loads(normal_output.getvalue())) os.remove(temp_packed_path)
def test_preserving_namespaces(self): test_wf = "tests/wf/formattest.cwl" test_wf_job = "tests/wf/formattest-job.json" document_loader, workflowobj, uri = fetch_document( get_data(test_wf)) document_loader, avsc_names, processobj, metadata, uri = validate_document( document_loader, workflowobj, uri) packed = json.loads(print_pack(document_loader, processobj, uri, metadata)) assert "$namespaces" in packed temp_packed_path = tempfile.mkstemp()[1] with open(temp_packed_path, 'w') as f: json.dump(packed, f) normal_output = StringIO() packed_output = StringIO() self.assertEquals(main(['--debug', get_data(temp_packed_path), get_data(test_wf_job)], stdout=packed_output), 0) self.assertEquals(main([get_data(test_wf), get_data(test_wf_job)], stdout=normal_output), 0) self.assertEquals(json.loads(packed_output.getvalue()), json.loads(normal_output.getvalue())) os.remove(temp_packed_path)