def _pack_idempotently(document: str, tmp_path: Path) -> None: loadingContext, workflowobj, uri = fetch_document(get_data(document)) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri) loader = loadingContext.loader assert loader loader.resolve_ref(uri)[0] # generate pack output dict packed_text = print_pack(loadingContext, uri) packed = json.loads(packed_text) tmp_name = tmp_path / "packed.cwl" tmp = tmp_name.open(mode="w") tmp.write(packed_text) tmp.flush() tmp.close() loadingContext, workflowobj, uri2 = fetch_document(tmp.name) loadingContext.do_update = False loadingContext, uri2 = resolve_and_validate_document( loadingContext, workflowobj, uri2) loader2 = loadingContext.loader assert loader2 loader2.resolve_ref(uri2)[0] # generate pack output dict packed_text = print_pack(loadingContext, uri2) double_packed = json.loads(packed_text) assert uri != uri2 assert packed == double_packed
def _pack_idempotently(document): loadingContext, workflowobj, uri = fetch_document(get_data(document)) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri)[0] # generate pack output dict packed_text = print_pack(loadingContext, uri) packed = json.loads(packed_text) tmp = NamedTemporaryFile(mode="w", delete=False) try: tmp.write(packed_text) tmp.flush() tmp.close() loadingContext, workflowobj, uri2 = fetch_document(tmp.name) loadingContext.do_update = False loadingContext, uri2 = resolve_and_validate_document( loadingContext, workflowobj, uri2) processobj = loadingContext.loader.resolve_ref(uri2)[0] # generate pack output dict packed_text = print_pack(loadingContext, uri2) double_packed = json.loads(packed_text) finally: os.remove(tmp.name) assert uri != uri2 assert packed == double_packed
def validate_cwl(cwl: str, cmd: Command = None, explore: bool = True): parsed = yaml.load(cwl) with tempfile.TemporaryDirectory() as tmpdir: tmpdir = Path(tmpdir) tmpfile = tmpdir / parsed["id"] tmpfile.write_text(cwl) loading_context, workflowobj, uri = fetch_document(str(tmpfile)) resolve_and_validate_document(loading_context, workflowobj, uri) if cmd: assert len( workflowobj["inputs"]) == len(cmd.positional) + len(cmd.named)
def validate_cwl_doc(cwl_doc): """ This is adapted from cwltool.main.main and avoids the unnecessary stuff by using cwltool.main.main directly. :param cwl_doc_path: :return: """ if isinstance(cwl_doc, (Path, str)): # Can also be CWLObjectType cwl_doc = str(cwl_doc) if not (urlparse(cwl_doc)[0] and urlparse(cwl_doc)[0] in ['http', 'https', 'file']): cwl_doc = file_uri(os.path.abspath(cwl_doc)) loading_context, workflow_object, uri = fetch_document(cwl_doc) resolve_and_validate_document(loading_context, workflow_object, uri) return
def test_packing(unpacked: str, expected: str) -> None: """Compare expected version reality with various workflows and --pack.""" loadingContext, workflowobj, uri = fetch_document(get_data(unpacked)) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri ) packed = json.loads(print_pack(loadingContext, uri)) context_dir = os.path.abspath(os.path.dirname(get_data(unpacked))) adjustFileObjs(packed, partial(make_relative, context_dir)) adjustDirObjs(packed, partial(make_relative, context_dir)) with open(get_data(expected)) as packed_file: expect_packed = json.load(packed_file) if "$schemas" in expect_packed: assert "$schemas" in packed packed_schemas = packed["$schemas"] assert isinstance(packed_schemas, Sized) assert len(packed_schemas) == len(expect_packed["$schemas"]) del packed["$schemas"] del expect_packed["$schemas"] assert packed == expect_packed
def test_packed_workflow_execution(wf_path, job_path, namespaced, tmpdir): loadingContext = LoadingContext() loadingContext.resolver = tool_resolver loadingContext, workflowobj, uri = fetch_document( get_data(wf_path), loadingContext) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri)[0] packed = json.loads(print_pack(loadingContext.loader, processobj, uri, loadingContext.metadata)) assert not namespaced or "$namespaces" in packed wf_packed_handle, wf_packed_path = tempfile.mkstemp() with open(wf_packed_path, 'w') as temp_file: json.dump(packed, temp_file) normal_output = StringIO() packed_output = StringIO() normal_params = ['--outdir', str(tmpdir), get_data(wf_path), get_data(job_path)] packed_params = ['--outdir', str(tmpdir), '--debug', get_data(wf_packed_path), get_data(job_path)] assert main(normal_params, stdout=normal_output) == 0 assert main(packed_params, stdout=packed_output) == 0 assert json.loads(packed_output.getvalue()) == json.loads(normal_output.getvalue()) os.close(wf_packed_handle) os.remove(wf_packed_path)
def test_packed_workflow_execution(wf_path, job_path, namespaced, tmpdir): loadingContext = LoadingContext() loadingContext.resolver = tool_resolver loadingContext, workflowobj, uri = fetch_document( get_data(wf_path), loadingContext) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri)[0] packed = json.loads(print_pack(loadingContext.loader, processobj, uri, loadingContext.metadata)) assert not namespaced or "$namespaces" in packed wf_packed_handle, wf_packed_path = tempfile.mkstemp() with open(wf_packed_path, 'w') as temp_file: json.dump(packed, temp_file) normal_output = StringIO() packed_output = StringIO() normal_params = ['--outdir', str(tmpdir), get_data(wf_path), get_data(job_path)] packed_params = ['--outdir', str(tmpdir), '--debug', wf_packed_path, get_data(job_path)] assert main(normal_params, stdout=normal_output) == 0 assert main(packed_params, stdout=packed_output) == 0 assert json.loads(packed_output.getvalue()) == json.loads(normal_output.getvalue()) os.close(wf_packed_handle) os.remove(wf_packed_path)
def test_pack_input_named_name(): loadingContext, workflowobj, uri = fetch_document( get_data("tests/wf/trick_revsort.cwl") ) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri ) processobj = loadingContext.loader.resolve_ref(uri)[0] with open(get_data("tests/wf/expect_trick_packed.cwl")) as packed_file: expect_packed = yaml.round_trip_load(packed_file) packed = cwltool.pack.pack( loadingContext.loader, processobj, uri, loadingContext.metadata ) adjustFileObjs( packed, partial(make_relative, os.path.abspath(get_data("tests/wf"))) ) adjustDirObjs(packed, partial(make_relative, os.path.abspath(get_data("tests/wf")))) assert "$schemas" in packed assert len(packed["$schemas"]) == len(expect_packed["$schemas"]) del packed["$schemas"] del expect_packed["$schemas"] assert packed == expect_packed
def pack_cwl(cwl_path): # cwltool needs to be imported on demand since # repeatedly calling functions on a document named # with same name caused errors. from cwltool.load_tool import fetch_document from cwltool.main import print_pack cwltool_version = get_distribution("cwltool").version if StrictVersion(cwltool_version) > StrictVersion("1.0.20181201184214"): from cwltool.load_tool import resolve_and_validate_document loadingContext, workflowobj, uri = fetch_document(cwl_path) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri)[0] packed_cwl = json.loads( print_pack(loadingContext.loader, processobj, uri, loadingContext.metadata)) else: from cwltool.load_tool import validate_document document_loader, workflowobj, uri = fetch_document(cwl_path) document_loader, _, processobj, metadata, uri = validate_document( document_loader, workflowobj, uri, [], {}) packed_cwl = json.loads( print_pack(document_loader, processobj, uri, metadata)) return packed_cwl
def _pack_idempotently(document): loadingContext, workflowobj, uri = fetch_document( get_data(document)) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri)[0] # generate pack output dict packed = json.loads(print_pack(loadingContext.loader, processobj, uri, loadingContext.metadata)) loadingContext, workflowobj, uri2 = fetch_document(packed) loadingContext.do_update = False loadingContext, uri2 = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri)[0] double_packed = json.loads(print_pack(loadingContext.loader, processobj, uri2, loadingContext.metadata)) assert packed == double_packed
def _pack_idempotently(document): loadingContext, workflowobj, uri = fetch_document( get_data(document)) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri)[0] # generate pack output dict packed = json.loads(print_pack(loadingContext.loader, processobj, uri, loadingContext.metadata)) loadingContext, workflowobj, uri2 = fetch_document(packed) loadingContext.do_update = False loadingContext, uri2 = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri2)[0] double_packed = json.loads(print_pack(loadingContext.loader, processobj, uri2, loadingContext.metadata)) assert packed == double_packed
def test_default_path(): """Error is not raised when default path is not present.""" loadingContext, workflowobj, uri = fetch_document( get_data("tests/wf/default_path.cwl")) loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri)[0] assert "cwlVersion" in processobj
def test_default_path(): """Testing that error is not raised when default path is not present""" loadingContext, workflowobj, uri = fetch_document( get_data("tests/wf/default_path.cwl")) loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri)[0] assert "cwlVersion" in processobj
def test_pack_single_tool(): loadingContext, workflowobj, uri = fetch_document( get_data("tests/wf/formattest.cwl")) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri)[0] packed = cwltool.pack.pack(loadingContext.loader, processobj, uri, loadingContext.metadata) assert "$schemas" in packed
def test_pack_single_tool(): loadingContext, workflowobj, uri = fetch_document( get_data("tests/wf/formattest.cwl")) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri)[0] packed = cwltool.pack.pack(loadingContext.loader, processobj, uri, loadingContext.metadata) assert "$schemas" in packed
def load_cwl(fname): """Load and validate CWL file using cwltool """ logger.debug('Loading CWL file "{}"'.format(fname)) # Fetching, preprocessing and validating cwl # Older versions of cwltool if legacy_cwltool: try: (document_loader, workflowobj, uri) = fetch_document(fname) (document_loader, _, processobj, metadata, uri) = \ validate_document(document_loader, workflowobj, uri) except TypeError: from cwltool.context import LoadingContext, getdefault from cwltool import workflow from cwltool.resolver import tool_resolver from cwltool.load_tool import resolve_tool_uri loadingContext = LoadingContext() loadingContext.construct_tool_object = getdefault( loadingContext.construct_tool_object, workflow.default_make_tool) loadingContext.resolver = getdefault(loadingContext.resolver, tool_resolver) uri, tool_file_uri = resolve_tool_uri( fname, resolver=loadingContext.resolver, fetcher_constructor=loadingContext.fetcher_constructor) document_loader, workflowobj, uri = fetch_document( uri, resolver=loadingContext.resolver, fetcher_constructor=loadingContext.fetcher_constructor) document_loader, avsc_names, processobj, metadata, uri = \ validate_document( document_loader, workflowobj, uri, loadingContext.overrides_list, {}, enable_dev=loadingContext.enable_dev, strict=loadingContext.strict, preprocess_only=False, fetcher_constructor=loadingContext.fetcher_constructor, skip_schemas=False, do_validate=loadingContext.do_validate) # Recent versions of cwltool else: (loading_context, workflowobj, uri) = fetch_document(fname) loading_context, uri = resolve_and_validate_document( loading_context, workflowobj, uri) document_loader = loading_context.loader processobj = workflowobj metadata = loading_context.metadata return document_loader, processobj, metadata, uri
def test_pack_rewrites(): rewrites = {} loadingContext, workflowobj, uri = fetch_document(get_data("tests/wf/default-wf5.cwl")) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri)[0] cwltool.pack.pack(loadingContext.loader, processobj, uri, loadingContext.metadata, rewrite_out=rewrites) assert len(rewrites) == 6
def test_pack_rewrites(): rewrites = {} loadingContext, workflowobj, uri = fetch_document(get_data("tests/wf/default-wf5.cwl")) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri)[0] cwltool.pack.pack(loadingContext.loader, processobj, uri, loadingContext.metadata, rewrite_out=rewrites) assert len(rewrites) == 6
def test_pack_missing_cwlVersion(cwl_path): """Ensure the generated pack output is not missing the `cwlVersion` in case of single tool workflow and single step workflow.""" # Testing single tool workflow loadingContext, workflowobj, uri = fetch_document(get_data(cwl_path)) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri)[0] # generate pack output dict packed = json.loads(print_pack(loadingContext.loader, processobj, uri, loadingContext.metadata)) assert packed["cwlVersion"] == 'v1.0'
def test_pack_missing_cwlVersion(cwl_path): """Test to ensure the generated pack output is not missing the `cwlVersion` in case of single tool workflow and single step workflow""" # Testing single tool workflow loadingContext, workflowobj, uri = fetch_document(get_data(cwl_path)) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri)[0] # generate pack output dict packed = json.loads(print_pack(loadingContext.loader, processobj, uri, loadingContext.metadata)) assert packed["cwlVersion"] == 'v1.0'
def test_packed_workflow_execution(wf_path: str, job_path: str, namespaced: bool, tmpdir: py.path.local) -> None: loadingContext = LoadingContext() loadingContext.resolver = tool_resolver loadingContext, workflowobj, uri = fetch_document(get_data(wf_path), loadingContext) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri) loader = loadingContext.loader assert loader loader.resolve_ref(uri)[0] packed = json.loads(print_pack(loadingContext, uri)) assert not namespaced or "$namespaces" in packed wf_packed_handle, wf_packed_path = tempfile.mkstemp() with open(wf_packed_path, "w") as temp_file: json.dump(packed, temp_file) normal_output = StringIO() packed_output = StringIO() normal_params = [ "--outdir", str(tmpdir), get_data(wf_path), get_data(job_path) ] packed_params = [ "--outdir", str(tmpdir), "--debug", wf_packed_path, get_data(job_path), ] assert main(normal_params, stdout=normal_output) == 0 assert main(packed_params, stdout=packed_output) == 0 assert json.loads(packed_output.getvalue()) == json.loads( normal_output.getvalue()) os.close(wf_packed_handle) os.remove(wf_packed_path)
def test_pack_rewrites() -> None: rewrites = {} # type: Dict[str, str] loadingContext, workflowobj, uri = fetch_document( get_data("tests/wf/default-wf5.cwl") ) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri ) loader = loadingContext.loader assert loader loader.resolve_ref(uri)[0] cwltool.pack.pack( loadingContext, uri, rewrite_out=rewrites, ) assert len(rewrites) == 6
def test_pack(): loadingContext, workflowobj, uri = fetch_document(get_data("tests/wf/revsort.cwl")) loadingContext.do_update = False loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri) processobj = loadingContext.loader.resolve_ref(uri)[0] with open(get_data("tests/wf/expect_packed.cwl")) as packed_file: expect_packed = json.load(packed_file) packed = cwltool.pack.pack(loadingContext.loader, processobj, uri, loadingContext.metadata) adjustFileObjs(packed, partial(make_relative, os.path.abspath(get_data("tests/wf")))) adjustDirObjs(packed, partial(make_relative, os.path.abspath(get_data("tests/wf")))) assert "$schemas" in packed assert len(packed["$schemas"]) == len(expect_packed["$schemas"]) del packed["$schemas"] del expect_packed["$schemas"] assert packed == expect_packed
def load_tool(argsworkflow, loadingContext): loadingContext, workflowobj, uri = fetch_document(argsworkflow, loadingContext) loadingContext, uri = resolve_and_validate_document(loadingContext, workflowobj, uri, skip_schemas=True) return make_tool(uri, loadingContext)
def run(args: argparse.Namespace) -> int: """Primary processing loop.""" imports: Set[str] = set() for document in args.inputs: _logger.info("Processing %s.", document) with open(document) as doc_handle: result = yaml.main.round_trip_load(doc_handle, preserve_quotes=True) add_lc_filename(result, document) version = result.get("cwlVersion", None) if version in ("draft-3", "cwl:draft-3", "v1.0", "v1.1"): result = cwlupgrader.upgrade_document(result, False, False, args.dir, imports) else: _logger.error( "Sorry, %s in %s is not a supported CWL version by this tool.", (version, document), ) return -1 uri = Path(document).resolve().as_uri() if not args.no_expression_refactoring: refactored, _ = cwl_v1_2_expression_refactor.traverse( load_document_by_yaml(result, uri), not args.etools, False, args.skip_some1, args.skip_some2, ) if not isinstance(refactored, MutableSequence): result = save( refactored, base_url=refactored.loadingOptions.fileuri if refactored.loadingOptions.fileuri else "", ) # ^^ Setting the base_url and keeping the default value # for relative_uris=True means that the IDs in the generated # JSON/YAML are kept clean of the path to the input document else: result = [ save(result_item, base_url=result_item.loadingOptions.fileuri) for result_item in refactored ] if "$graph" in result: packed = result else: with tempfile.TemporaryDirectory() as tmpdirname: path = Path(tmpdirname) / Path(document).name with open(path, "w") as handle: yaml.main.round_trip_dump(result, handle) # TODO replace the cwltool based packing with a parser_v1_2 based packer runtimeContext = RuntimeContext() loadingContext = LoadingContext() use_standard_schema("v1.2") # loadingContext.construct_tool_object = workflow.default_make_tool # loadingContext.resolver = tool_resolver loadingContext.do_update = False uri, tool_file_uri = resolve_tool_uri( str(path), resolver=loadingContext.resolver, fetcher_constructor=loadingContext.fetcher_constructor, ) loadingContext, workflowobj, uri = fetch_document( uri, loadingContext) loadingContext, uri = resolve_and_validate_document( loadingContext, workflowobj, uri, preprocess_only=True, skip_schemas=True, ) packed = print_pack(loadingContext, uri) output = Path(args.dir) / Path(document).name with open(output, "w", encoding="utf-8") as output_filehandle: output_filehandle.write(packed) return 0