Пример #1
0
    def test_pack_missing_cwlVersion(self):
        """Test to ensure the generated pack output is not missing
        the `cwlVersion` in case of single tool workflow and single step workflow"""

        # Testing single tool workflow
        document_loader, workflowobj, uri = fetch_document(
            get_data("tests/wf/hello_single_tool.cwl"))
        document_loader, _, processobj, metadata, uri = validate_document(
            document_loader, workflowobj, uri)
        # generate pack output dict
        packed = json.loads(
            print_pack(document_loader, processobj, uri, metadata))

        self.assertEqual('v1.0', packed["cwlVersion"])

        # Testing single step workflow
        document_loader, workflowobj, uri = fetch_document(
            get_data("tests/wf/hello-workflow.cwl"))
        document_loader, _, processobj, metadata, uri = validate_document(
            document_loader, workflowobj, uri)
        # generate pack output dict
        packed = json.loads(
            print_pack(document_loader, processobj, uri, metadata))

        self.assertEqual('v1.0', packed["cwlVersion"])
Пример #2
0
def pack_cwl(cwl_path):
    # cwltool needs to be imported on demand since
    # repeatedly calling functions on a document named
    # with same name caused errors.
    from cwltool.load_tool import fetch_document
    from cwltool.main import print_pack
    cwltool_version = get_distribution("cwltool").version
    if StrictVersion(cwltool_version) > StrictVersion("1.0.20181201184214"):
        from cwltool.load_tool import resolve_and_validate_document
        loadingContext, workflowobj, uri = fetch_document(cwl_path)
        loadingContext.do_update = False
        loadingContext, uri = resolve_and_validate_document(
            loadingContext, workflowobj, uri)
        processobj = loadingContext.loader.resolve_ref(uri)[0]
        packed_cwl = json.loads(
            print_pack(loadingContext.loader, processobj, uri,
                       loadingContext.metadata))
    else:
        from cwltool.load_tool import validate_document
        document_loader, workflowobj, uri = fetch_document(cwl_path)
        document_loader, _, processobj, metadata, uri = validate_document(
            document_loader, workflowobj, uri, [], {})
        packed_cwl = json.loads(
            print_pack(document_loader, processobj, uri, metadata))
    return packed_cwl
Пример #3
0
def _pack_idempotently(document):
    loadingContext, workflowobj, uri = fetch_document(get_data(document))
    loadingContext.do_update = False
    loadingContext, uri = resolve_and_validate_document(
        loadingContext, workflowobj, uri)
    processobj = loadingContext.loader.resolve_ref(uri)[0]

    # generate pack output dict
    packed_text = print_pack(loadingContext, uri)
    packed = json.loads(packed_text)

    tmp = NamedTemporaryFile(mode="w", delete=False)
    try:
        tmp.write(packed_text)
        tmp.flush()
        tmp.close()

        loadingContext, workflowobj, uri2 = fetch_document(tmp.name)
        loadingContext.do_update = False
        loadingContext, uri2 = resolve_and_validate_document(
            loadingContext, workflowobj, uri2)
        processobj = loadingContext.loader.resolve_ref(uri2)[0]

        # generate pack output dict
        packed_text = print_pack(loadingContext, uri2)
        double_packed = json.loads(packed_text)
    finally:
        os.remove(tmp.name)

    assert uri != uri2
    assert packed == double_packed
Пример #4
0
def _pack_idempotently(document: str, tmp_path: Path) -> None:
    loadingContext, workflowobj, uri = fetch_document(get_data(document))
    loadingContext.do_update = False
    loadingContext, uri = resolve_and_validate_document(
        loadingContext, workflowobj, uri)
    loader = loadingContext.loader
    assert loader
    loader.resolve_ref(uri)[0]

    # generate pack output dict
    packed_text = print_pack(loadingContext, uri)
    packed = json.loads(packed_text)

    tmp_name = tmp_path / "packed.cwl"
    tmp = tmp_name.open(mode="w")
    tmp.write(packed_text)
    tmp.flush()
    tmp.close()

    loadingContext, workflowobj, uri2 = fetch_document(tmp.name)
    loadingContext.do_update = False
    loadingContext, uri2 = resolve_and_validate_document(
        loadingContext, workflowobj, uri2)
    loader2 = loadingContext.loader
    assert loader2
    loader2.resolve_ref(uri2)[0]

    # generate pack output dict
    packed_text = print_pack(loadingContext, uri2)
    double_packed = json.loads(packed_text)

    assert uri != uri2
    assert packed == double_packed
Пример #5
0
    def _pack_idempotently(self, document):
        document_loader, workflowobj, uri = fetch_document(
            get_data(document))
        document_loader, avsc_names, processobj, metadata, uri = validate_document(
            document_loader, workflowobj, uri)
        # generate pack output dict
        packed = json.loads(print_pack(document_loader, processobj, uri, metadata))

        document_loader, workflowobj, uri2 = fetch_document(packed)
        document_loader, avsc_names, processobj, metadata, uri2 = validate_document(
            document_loader, workflowobj, uri)
        double_packed = json.loads(print_pack(document_loader, processobj, uri2, metadata))
        self.assertEqual(packed, double_packed)
Пример #6
0
    def _pack_idempotently(self, document):
        document_loader, workflowobj, uri = fetch_document(
            get_data(document))
        document_loader, avsc_names, processobj, metadata, uri = validate_document(
            document_loader, workflowobj, uri)
        # generate pack output dict
        packed = json.loads(print_pack(document_loader, processobj, uri, metadata))

        document_loader, workflowobj, uri2 = fetch_document(packed)
        document_loader, avsc_names, processobj, metadata, uri2 = validate_document(
            document_loader, workflowobj, uri)
        double_packed = json.loads(print_pack(document_loader, processobj, uri2, metadata))
        self.assertEqual(packed, double_packed)
Пример #7
0
def _pack_idempotently(document):
    document_loader, workflowobj, uri = fetch_document(
        get_data(document))
    document_loader, _, processobj, metadata, uri = validate_document(
        document_loader, workflowobj, uri, [], {})
    # generate pack output dict
    packed = json.loads(print_pack(document_loader, processobj, uri, metadata))

    document_loader, workflowobj, uri2 = fetch_document(packed)
    document_loader, _, processobj, metadata, uri2 = validate_document(
        document_loader, workflowobj, uri, [], {})
    double_packed = json.loads(print_pack(document_loader, processobj, uri2, metadata))
    assert packed == double_packed
Пример #8
0
def test_packed_workflow_execution(wf_path, job_path, namespaced, tmpdir):
    loadingContext = LoadingContext()
    loadingContext.resolver = tool_resolver
    loadingContext, workflowobj, uri = fetch_document(
        get_data(wf_path), loadingContext)
    loadingContext.do_update = False
    loadingContext, uri = resolve_and_validate_document(
        loadingContext, workflowobj, uri)
    processobj = loadingContext.loader.resolve_ref(uri)[0]
    packed = json.loads(print_pack(loadingContext.loader, processobj, uri, loadingContext.metadata))

    assert not namespaced or "$namespaces" in packed

    wf_packed_handle, wf_packed_path = tempfile.mkstemp()
    with open(wf_packed_path, 'w') as temp_file:
        json.dump(packed, temp_file)

    normal_output = StringIO()
    packed_output = StringIO()

    normal_params = ['--outdir', str(tmpdir), get_data(wf_path), get_data(job_path)]
    packed_params = ['--outdir', str(tmpdir), '--debug', get_data(wf_packed_path), get_data(job_path)]

    assert main(normal_params, stdout=normal_output) == 0
    assert main(packed_params, stdout=packed_output) == 0

    assert json.loads(packed_output.getvalue()) == json.loads(normal_output.getvalue())

    os.close(wf_packed_handle)
    os.remove(wf_packed_path)
Пример #9
0
def test_packed_workflow_execution(wf_path, job_path, namespaced, tmpdir):
    load_tool.loaders = {}

    document_loader, workflowobj, uri = fetch_document(
        get_data(wf_path), resolver=tool_resolver)

    document_loader, _, processobj, metadata, uri = validate_document(
        document_loader, workflowobj, uri, [], {})

    packed = json.loads(print_pack(document_loader, processobj, uri, metadata))

    assert not namespaced or "$namespaces" in packed

    wf_packed_handle, wf_packed_path = tempfile.mkstemp()
    with open(wf_packed_path, 'w') as temp_file:
        json.dump(packed, temp_file)

    normal_output = StringIO()
    packed_output = StringIO()

    normal_params = ['--outdir', str(tmpdir), get_data(wf_path), get_data(job_path)]
    packed_params = ['--outdir', str(tmpdir), '--debug', get_data(wf_packed_path), get_data(job_path)]

    assert main(normal_params, stdout=normal_output) == 0
    assert main(packed_params, stdout=packed_output) == 0

    assert json.loads(packed_output.getvalue()) == json.loads(normal_output.getvalue())

    os.close(wf_packed_handle)
    os.remove(wf_packed_path)
Пример #10
0
 def test_preserving_namespaces(self):
     test_wf = "tests/wf/formattest.cwl"
     test_wf_job = "tests/wf/formattest-job.json"
     document_loader, workflowobj, uri = fetch_document(get_data(test_wf))
     document_loader, avsc_names, processobj, metadata, uri = validate_document(
         document_loader, workflowobj, uri)
     packed = json.loads(
         print_pack(document_loader, processobj, uri, metadata))
     assert "$namespaces" in packed
     temp_packed_path = tempfile.mkstemp()[1]
     with open(temp_packed_path, 'w') as f:
         json.dump(packed, f)
     normal_output = StringIO()
     packed_output = StringIO()
     self.assertEquals(
         main(
             ['--debug',
              get_data(temp_packed_path),
              get_data(test_wf_job)],
             stdout=packed_output), 0)
     self.assertEquals(
         main([get_data(test_wf), get_data(test_wf_job)],
              stdout=normal_output), 0)
     self.assertEquals(json.loads(packed_output.getvalue()),
                       json.loads(normal_output.getvalue()))
     os.remove(temp_packed_path)
Пример #11
0
 def test_packed_workflow_execution(self):
     load_tool.loaders = {}
     test_wf = "tests/wf/count-lines1-wf.cwl"
     test_wf_job = "tests/wf/wc-job.json"
     document_loader, workflowobj, uri = fetch_document(
         get_data(test_wf), resolver=tool_resolver)
     document_loader, avsc_names, processobj, metadata, uri = validate_document(
         document_loader, workflowobj, uri)
     packed = json.loads(
         print_pack(document_loader, processobj, uri, metadata))
     temp_packed_path = tempfile.mkstemp()[1]
     with open(temp_packed_path, 'w') as f:
         json.dump(packed, f)
     normal_output = StringIO()
     packed_output = StringIO()
     self.assertEquals(
         main(
             ['--debug',
              get_data(temp_packed_path),
              get_data(test_wf_job)],
             stdout=packed_output), 0)
     self.assertEquals(
         main([get_data(test_wf), get_data(test_wf_job)],
              stdout=normal_output), 0)
     self.assertEquals(json.loads(packed_output.getvalue()),
                       json.loads(normal_output.getvalue()))
     os.remove(temp_packed_path)
Пример #12
0
def test_packing(unpacked: str, expected: str) -> None:
    """Compare expected version reality with various workflows and --pack."""
    loadingContext, workflowobj, uri = fetch_document(get_data(unpacked))
    loadingContext.do_update = False
    loadingContext, uri = resolve_and_validate_document(
        loadingContext, workflowobj, uri
    )

    packed = json.loads(print_pack(loadingContext, uri))
    context_dir = os.path.abspath(os.path.dirname(get_data(unpacked)))
    adjustFileObjs(packed, partial(make_relative, context_dir))
    adjustDirObjs(packed, partial(make_relative, context_dir))

    with open(get_data(expected)) as packed_file:
        expect_packed = json.load(packed_file)

    if "$schemas" in expect_packed:
        assert "$schemas" in packed
        packed_schemas = packed["$schemas"]
        assert isinstance(packed_schemas, Sized)
        assert len(packed_schemas) == len(expect_packed["$schemas"])
        del packed["$schemas"]
        del expect_packed["$schemas"]

    assert packed == expect_packed
Пример #13
0
def test_packed_workflow_execution(wf_path, job_path, namespaced, tmpdir):
    loadingContext = LoadingContext()
    loadingContext.resolver = tool_resolver
    loadingContext, workflowobj, uri = fetch_document(
        get_data(wf_path), loadingContext)
    loadingContext.do_update = False
    loadingContext, uri = resolve_and_validate_document(
        loadingContext, workflowobj, uri)
    processobj = loadingContext.loader.resolve_ref(uri)[0]
    packed = json.loads(print_pack(loadingContext.loader, processobj, uri, loadingContext.metadata))

    assert not namespaced or "$namespaces" in packed

    wf_packed_handle, wf_packed_path = tempfile.mkstemp()
    with open(wf_packed_path, 'w') as temp_file:
        json.dump(packed, temp_file)

    normal_output = StringIO()
    packed_output = StringIO()

    normal_params = ['--outdir', str(tmpdir), get_data(wf_path), get_data(job_path)]
    packed_params = ['--outdir', str(tmpdir), '--debug', wf_packed_path, get_data(job_path)]

    assert main(normal_params, stdout=normal_output) == 0
    assert main(packed_params, stdout=packed_output) == 0

    assert json.loads(packed_output.getvalue()) == json.loads(normal_output.getvalue())

    os.close(wf_packed_handle)
    os.remove(wf_packed_path)
Пример #14
0
def _pack_idempotently(document):
    loadingContext, workflowobj, uri = fetch_document(
        get_data(document))
    loadingContext.do_update = False
    loadingContext, uri = resolve_and_validate_document(
        loadingContext, workflowobj, uri)
    processobj = loadingContext.loader.resolve_ref(uri)[0]

    # generate pack output dict
    packed = json.loads(print_pack(loadingContext.loader, processobj, uri, loadingContext.metadata))

    loadingContext, workflowobj, uri2 = fetch_document(packed)
    loadingContext.do_update = False
    loadingContext, uri2 = resolve_and_validate_document(
        loadingContext, workflowobj, uri)
    processobj = loadingContext.loader.resolve_ref(uri)[0]
    double_packed = json.loads(print_pack(loadingContext.loader, processobj, uri2, loadingContext.metadata))
    assert packed == double_packed
Пример #15
0
def _pack_idempotently(document):
    loadingContext, workflowobj, uri = fetch_document(
        get_data(document))
    loadingContext.do_update = False
    loadingContext, uri = resolve_and_validate_document(
        loadingContext, workflowobj, uri)
    processobj = loadingContext.loader.resolve_ref(uri)[0]

    # generate pack output dict
    packed = json.loads(print_pack(loadingContext.loader, processobj, uri, loadingContext.metadata))

    loadingContext, workflowobj, uri2 = fetch_document(packed)
    loadingContext.do_update = False
    loadingContext, uri2 = resolve_and_validate_document(
        loadingContext, workflowobj, uri)
    processobj = loadingContext.loader.resolve_ref(uri2)[0]
    double_packed = json.loads(print_pack(loadingContext.loader, processobj, uri2, loadingContext.metadata))
    assert packed == double_packed
Пример #16
0
def test_pack_missing_cwlVersion(cwl_path):
    """Test to ensure the generated pack output is not missing
    the `cwlVersion` in case of single tool workflow and single step workflow"""

    # Testing single tool workflow
    document_loader, workflowobj, uri = fetch_document(get_data(cwl_path))
    document_loader, _, processobj, metadata, uri = validate_document(
        document_loader, workflowobj, uri, [], {})
    # generate pack output dict
    packed = json.loads(print_pack(document_loader, processobj, uri, metadata))

    assert packed["cwlVersion"] == 'v1.0'
Пример #17
0
def test_pack_missing_cwlVersion(cwl_path):
    """Ensure the generated pack output is not missing the `cwlVersion` in case of single tool workflow and single step workflow."""
    # Testing single tool workflow
    loadingContext, workflowobj, uri = fetch_document(get_data(cwl_path))
    loadingContext.do_update = False
    loadingContext, uri = resolve_and_validate_document(
        loadingContext, workflowobj, uri)
    processobj = loadingContext.loader.resolve_ref(uri)[0]

    # generate pack output dict
    packed = json.loads(print_pack(loadingContext.loader, processobj, uri, loadingContext.metadata))

    assert packed["cwlVersion"] == 'v1.0'
Пример #18
0
    def test_pack_missing_cwlVersion(self):
        """Test to ensure the generated pack output is not missing
        the `cwlVersion` in case of single tool workflow and single step workflow"""

        # Testing single tool workflow
        document_loader, workflowobj, uri = fetch_document(
            get_data("tests/wf/hello_single_tool.cwl"))
        document_loader, _, processobj, metadata, uri = validate_document(
            document_loader, workflowobj, uri)
        # generate pack output dict
        packed = json.loads(print_pack(document_loader, processobj, uri, metadata))

        self.assertEqual('v1.0', packed["cwlVersion"])

        # Testing single step workflow
        document_loader, workflowobj, uri = fetch_document(
            get_data("tests/wf/hello-workflow.cwl"))
        document_loader, _, processobj, metadata, uri = validate_document(
            document_loader, workflowobj, uri)
        # generate pack output dict
        packed = json.loads(print_pack(document_loader, processobj, uri, metadata))

        self.assertEqual('v1.0', packed["cwlVersion"])
Пример #19
0
def test_pack_missing_cwlVersion(cwl_path):
    """Test to ensure the generated pack output is not missing
    the `cwlVersion` in case of single tool workflow and single step workflow"""

    # Testing single tool workflow
    loadingContext, workflowobj, uri = fetch_document(get_data(cwl_path))
    loadingContext.do_update = False
    loadingContext, uri = resolve_and_validate_document(
        loadingContext, workflowobj, uri)
    processobj = loadingContext.loader.resolve_ref(uri)[0]

    # generate pack output dict
    packed = json.loads(print_pack(loadingContext.loader, processobj, uri, loadingContext.metadata))

    assert packed["cwlVersion"] == 'v1.0'
Пример #20
0
def test_packed_workflow_execution(wf_path: str, job_path: str,
                                   namespaced: bool,
                                   tmpdir: py.path.local) -> None:
    loadingContext = LoadingContext()
    loadingContext.resolver = tool_resolver
    loadingContext, workflowobj, uri = fetch_document(get_data(wf_path),
                                                      loadingContext)
    loadingContext.do_update = False
    loadingContext, uri = resolve_and_validate_document(
        loadingContext, workflowobj, uri)
    loader = loadingContext.loader
    assert loader
    loader.resolve_ref(uri)[0]
    packed = json.loads(print_pack(loadingContext, uri))

    assert not namespaced or "$namespaces" in packed

    wf_packed_handle, wf_packed_path = tempfile.mkstemp()
    with open(wf_packed_path, "w") as temp_file:
        json.dump(packed, temp_file)

    normal_output = StringIO()
    packed_output = StringIO()

    normal_params = [
        "--outdir",
        str(tmpdir),
        get_data(wf_path),
        get_data(job_path)
    ]
    packed_params = [
        "--outdir",
        str(tmpdir),
        "--debug",
        wf_packed_path,
        get_data(job_path),
    ]

    assert main(normal_params, stdout=normal_output) == 0
    assert main(packed_params, stdout=packed_output) == 0

    assert json.loads(packed_output.getvalue()) == json.loads(
        normal_output.getvalue())

    os.close(wf_packed_handle)
    os.remove(wf_packed_path)
Пример #21
0
    def _pack(self, fname, encoding):
        """Save workflow with ``--pack`` option

        This means that al tools and subworkflows are included in the workflow
        file that is created. A packed workflow cannot be loaded and used in
        scriptcwl.
        """
        (fd, tmpfile) = tempfile.mkstemp()
        os.close(fd)
        try:
            self.save(tmpfile, mode='abs', validate=False)
            document_loader, processobj, metadata, uri = load_cwl(tmpfile)
        finally:
            # cleanup tmpfile
            os.remove(tmpfile)

        with codecs.open(fname, 'wb', encoding=encoding) as f:
            f.write(print_pack(document_loader, processobj, uri, metadata))
Пример #22
0
 def test_preserving_namespaces(self):
     test_wf = "tests/wf/formattest.cwl"
     test_wf_job = "tests/wf/formattest-job.json"
     document_loader, workflowobj, uri = fetch_document(
         get_data(test_wf))
     document_loader, avsc_names, processobj, metadata, uri = validate_document(
         document_loader, workflowobj, uri)
     packed = json.loads(print_pack(document_loader, processobj, uri, metadata))
     assert "$namespaces" in packed
     temp_packed_path = tempfile.mkstemp()[1]
     with open(temp_packed_path, 'w') as f:
         json.dump(packed, f)
     normal_output = StringIO()
     packed_output = StringIO()
     self.assertEquals(main(['--debug', get_data(temp_packed_path),
                             get_data(test_wf_job)],
                            stdout=packed_output), 0)
     self.assertEquals(main([get_data(test_wf),
                             get_data(test_wf_job)],
                            stdout=normal_output), 0)
     self.assertEquals(json.loads(packed_output.getvalue()), json.loads(normal_output.getvalue()))
     os.remove(temp_packed_path)
Пример #23
0
 def test_packed_workflow_execution(self):
     load_tool.loaders = {}
     test_wf = "tests/wf/count-lines1-wf.cwl"
     test_wf_job = "tests/wf/wc-job.json"
     document_loader, workflowobj, uri = fetch_document(
         get_data(test_wf), resolver=tool_resolver)
     document_loader, avsc_names, processobj, metadata, uri = validate_document(
         document_loader, workflowobj, uri)
     packed = json.loads(print_pack(document_loader, processobj, uri, metadata))
     temp_packed_path = tempfile.mkstemp()[1]
     with open(temp_packed_path, 'w') as f:
         json.dump(packed, f)
     normal_output = StringIO()
     packed_output = StringIO()
     self.assertEquals(main(['--debug', get_data(temp_packed_path),
                             get_data(test_wf_job)],
                            stdout=packed_output), 0)
     self.assertEquals(main([get_data(test_wf),
                             get_data(test_wf_job)],
                            stdout=normal_output), 0)
     self.assertEquals(json.loads(packed_output.getvalue()), json.loads(normal_output.getvalue()))
     os.remove(temp_packed_path)
def run(args: argparse.Namespace) -> int:
    """Primary processing loop."""
    imports: Set[str] = set()
    for document in args.inputs:
        _logger.info("Processing %s.", document)
        with open(document) as doc_handle:
            result = yaml.main.round_trip_load(doc_handle,
                                               preserve_quotes=True)
        add_lc_filename(result, document)
        version = result.get("cwlVersion", None)
        if version in ("draft-3", "cwl:draft-3", "v1.0", "v1.1"):
            result = cwlupgrader.upgrade_document(result, False, False,
                                                  args.dir, imports)
        else:
            _logger.error(
                "Sorry, %s in %s is not a supported CWL version by this tool.",
                (version, document),
            )
            return -1
        uri = Path(document).resolve().as_uri()
        if not args.no_expression_refactoring:
            refactored, _ = cwl_v1_2_expression_refactor.traverse(
                load_document_by_yaml(result, uri),
                not args.etools,
                False,
                args.skip_some1,
                args.skip_some2,
            )
            if not isinstance(refactored, MutableSequence):
                result = save(
                    refactored,
                    base_url=refactored.loadingOptions.fileuri
                    if refactored.loadingOptions.fileuri else "",
                )
            #   ^^ Setting the base_url and keeping the default value
            #      for relative_uris=True means that the IDs in the generated
            #      JSON/YAML are kept clean of the path to the input document
            else:
                result = [
                    save(result_item,
                         base_url=result_item.loadingOptions.fileuri)
                    for result_item in refactored
                ]
        if "$graph" in result:
            packed = result
        else:
            with tempfile.TemporaryDirectory() as tmpdirname:
                path = Path(tmpdirname) / Path(document).name
                with open(path, "w") as handle:
                    yaml.main.round_trip_dump(result, handle)
                # TODO replace the cwltool based packing with a parser_v1_2 based packer
                runtimeContext = RuntimeContext()
                loadingContext = LoadingContext()
                use_standard_schema("v1.2")
                # loadingContext.construct_tool_object = workflow.default_make_tool
                # loadingContext.resolver = tool_resolver
                loadingContext.do_update = False
                uri, tool_file_uri = resolve_tool_uri(
                    str(path),
                    resolver=loadingContext.resolver,
                    fetcher_constructor=loadingContext.fetcher_constructor,
                )
                loadingContext, workflowobj, uri = fetch_document(
                    uri, loadingContext)
                loadingContext, uri = resolve_and_validate_document(
                    loadingContext,
                    workflowobj,
                    uri,
                    preprocess_only=True,
                    skip_schemas=True,
                )
                packed = print_pack(loadingContext, uri)
        output = Path(args.dir) / Path(document).name
        with open(output, "w", encoding="utf-8") as output_filehandle:
            output_filehandle.write(packed)
    return 0
Пример #25
0
    def cwlmain(
        self,
        argsl=None,  # type: List[str]
        args=None,  # type: argparse.Namespace
        job_order_object=None,  # type: MutableMapping[Text, Any]
        stdin=sys.stdin,  # type: IO[Any]
        stdout=None,  # type: Union[TextIO, codecs.StreamWriter]
        stderr=sys.stderr,  # type: IO[Any]
        versionfunc=versionstring,  # type: Callable[[], Text]
        logger_handler=None,  #
        custom_schema_callback=None,  # type: Callable[[], None]
        executor=None,  # type: Callable[..., Tuple[Dict[Text, Any], Text]]
        loadingContext=None,  # type: LoadingContext
        runtimeContext=None  # type: RuntimeContext
    ):  # type: (...) -> int

        if not stdout:
            stdout = codecs.getwriter('utf-8')(sys.stdout)
        _logger.removeHandler(defaultStreamHandler)
        if logger_handler:
            stderr_handler = logger_handler
        else:
            stderr_handler = logging.StreamHandler(stderr)
        _logger.addHandler(stderr_handler)
        try:
            if args is None:
                args = arg_parser().parse_args(argsl)
                if args.workflow and "--outdir" not in argsl:
                    outputPath = args.workflow.split('/')[-1].split('.')[0]
                    setattr(
                        args, "outdir",
                        os.getcwd() + "/" + outputPath + "/" +
                        datetime.datetime.now().strftime('%Y-%m-%d-%H%M'))
            if runtimeContext is None:
                runtimeContext = RuntimeContext(vars(args))
            else:
                runtimeContext = runtimeContext.copy()

            rdflib_logger = logging.getLogger("rdflib.term")
            rdflib_logger.addHandler(stderr_handler)
            rdflib_logger.setLevel(logging.ERROR)
            if args.quiet:
                _logger.setLevel(logging.WARN)
            if runtimeContext.debug:
                _logger.setLevel(logging.DEBUG)
                rdflib_logger.setLevel(logging.DEBUG)
            if args.timestamps:
                formatter = logging.Formatter("[%(asctime)s] %(message)s",
                                              "%Y-%m-%d %H:%M:%S")
                stderr_handler.setFormatter(formatter)
            # version
            if args.version:
                return versionfunc(), 0
            else:
                _logger.info(versionfunc())

            if args.print_supported_versions:
                return "\n".join(supportedCWLversions(args.enable_dev)), 0

            if not args.workflow:
                if os.path.isfile("CWLFile"):
                    setattr(args, "workflow", "CWLFile")
                else:
                    _logger.error("")
                    _logger.error(
                        "CWL document required, no input file was provided")
                    arg_parser().print_help()
                    return "CWL document required, no input file was provided", 1
            if args.relax_path_checks:
                command_line_tool.ACCEPTLIST_RE = command_line_tool.ACCEPTLIST_EN_RELAXED_RE

            if args.ga4gh_tool_registries:
                ga4gh_tool_registries[:] = args.ga4gh_tool_registries
            if not args.enable_ga4gh_tool_registry:
                del ga4gh_tool_registries[:]

            if custom_schema_callback:
                custom_schema_callback()
            elif args.enable_ext:
                res = pkg_resources.resource_stream(__name__, 'extensions.yml')
                use_custom_schema("v1.0", "http://commonwl.org/cwltool",
                                  res.read())
                res.close()
            else:
                use_standard_schema("v1.0")

            if loadingContext is None:
                loadingContext = LoadingContext(vars(args))
            else:
                loadingContext = loadingContext.copy()

            loadingContext.disable_js_validation = \
                args.disable_js_validation or (not args.do_validate)
            loadingContext.construct_tool_object = getdefault(
                loadingContext.construct_tool_object,
                workflow.default_make_tool)
            loadingContext.resolver = getdefault(loadingContext.resolver,
                                                 tool_resolver)
            try:
                uri, tool_file_uri = resolve_tool_uri(
                    args.workflow,
                    resolver=loadingContext.resolver,
                    fetcher_constructor=loadingContext.fetcher_constructor)
            except:
                return "Can't find file " + args.workflow, 0

            try_again_msg = "" if args.debug else ", try again with --debug for more information"

            try:
                job_order_object, input_basedir, jobloader = load_job_order(
                    args, stdin, loadingContext.fetcher_constructor,
                    loadingContext.overrides_list, tool_file_uri)

                if args.overrides:
                    loadingContext.overrides_list.extend(
                        load_overrides(
                            file_uri(os.path.abspath(args.overrides)),
                            tool_file_uri))

                document_loader, workflowobj, uri = fetch_document(
                    uri,
                    resolver=loadingContext.resolver,
                    fetcher_constructor=loadingContext.fetcher_constructor)

                if args.print_deps:
                    # printdeps(workflowobj, document_loader, stdout, args.relative_deps, uri)
                    result = returndeps(workflowobj, document_loader, stdout,
                                        args.relative_deps, uri)
                    return result, 0

                document_loader, avsc_names, processobj, metadata, uri \
                    = validate_document(document_loader, workflowobj, uri,
                                        enable_dev=loadingContext.enable_dev,
                                        strict=loadingContext.strict,
                                        preprocess_only=(args.print_pre or args.pack),
                                        fetcher_constructor=loadingContext.fetcher_constructor,
                                        skip_schemas=args.skip_schemas,
                                        overrides=loadingContext.overrides_list,
                                        do_validate=loadingContext.do_validate)

                if args.print_pre:
                    # stdout.write(json_dumps(processobj, indent=4))
                    return json_dumps(processobj, indent=4), 0

                loadingContext.overrides_list.extend(
                    metadata.get("cwltool:overrides", []))

                tool = make_tool(document_loader, avsc_names, metadata, uri,
                                 loadingContext)
                if args.make_template:
                    yaml.safe_dump(generate_input_template(tool),
                                   sys.stdout,
                                   default_flow_style=False,
                                   indent=4,
                                   block_seq_indent=2)
                    return yaml.safe_dump(generate_input_template(tool),
                                          indent=4), 0

                if args.validate:
                    _logger.info("Tool definition is valid")
                    return "Tool definition is valid", 0

                if args.pack:
                    stdout.write(
                        print_pack(document_loader, processobj, uri, metadata))
                    return print_pack(document_loader, processobj, uri,
                                      metadata), 0

                if args.print_rdf:
                    stdout.write(
                        printrdf(tool, document_loader.ctx,
                                 args.rdf_serializer))
                    return printrdf(tool, document_loader.ctx,
                                    args.rdf_serializer), 0

                if args.print_dot:
                    printdot(tool, document_loader.ctx, stdout)
                    return "args.print_dot still not solved", 0

            except (validate.ValidationException) as exc:
                _logger.error(u"Tool definition failed validation:\n%s",
                              exc,
                              exc_info=args.debug)
                infor = "Tool definition failed validation:\n%s" + exc + args.debug
                return infor, 1
            except (RuntimeError, WorkflowException) as exc:
                _logger.error(u"Tool definition failed initialization:\n%s",
                              exc,
                              exc_info=args.debug)
                infor = "Tool definition failed initialization:\n%s" + exc + args.debug
                return infor, 1
            except Exception as exc:
                _logger.error(
                    u"I'm sorry, I couldn't load this CWL file%s.\nThe error was: %s",
                    try_again_msg,
                    exc if not args.debug else "",
                    exc_info=args.debug)
                return "I'm sorry, I couldn't load this CWL file", 1

            if isinstance(tool, int):
                return tool, 0

            # If on MacOS platform, TMPDIR must be set to be under one of the
            # shared volumes in Docker for Mac
            # More info: https://dockstore.org/docs/faq
            if sys.platform == "darwin":
                default_mac_path = "/private/tmp/docker_tmp"
                if runtimeContext.tmp_outdir_prefix == DEFAULT_TMP_PREFIX:
                    runtimeContext.tmp_outdir_prefix = default_mac_path

            for dirprefix in ("tmpdir_prefix", "tmp_outdir_prefix",
                              "cachedir"):
                if getattr(runtimeContext, dirprefix) and getattr(
                        runtimeContext, dirprefix) != DEFAULT_TMP_PREFIX:
                    sl = "/" if getattr(runtimeContext, dirprefix).endswith("/") or dirprefix == "cachedir" \
                        else ""
                    setattr(
                        runtimeContext, dirprefix,
                        os.path.abspath(getattr(runtimeContext, dirprefix)) +
                        sl)
                    if not os.path.exists(
                            os.path.dirname(getattr(runtimeContext,
                                                    dirprefix))):
                        try:
                            os.makedirs(
                                os.path.dirname(
                                    getattr(runtimeContext, dirprefix)))
                        except Exception as e:
                            _logger.error("Failed to create directory: %s", e)
                            infor = "Failed to create directory: %s" + e + ""
                            return infor, 1

            if args.cachedir:
                if args.move_outputs == "move":
                    runtimeContext.move_outputs = "copy"
                runtimeContext.tmp_outdir_prefix = args.cachedir

            runtimeContext.secret_store = getdefault(
                runtimeContext.secret_store, SecretStore())

            try:
                initialized_job_order_object = init_job_order(
                    job_order_object,
                    args,
                    tool,
                    jobloader,
                    stdout,
                    print_input_deps=args.print_input_deps,
                    relative_deps=args.relative_deps,
                    input_basedir=input_basedir,
                    secret_store=runtimeContext.secret_store)
            except SystemExit as err:
                return err.code
            if not executor:
                if args.parallel:
                    executor = MultithreadedJobExecutor()
                else:
                    executor = SingleJobExecutor()
            assert executor is not None

            if isinstance(initialized_job_order_object, int):
                return initialized_job_order_object

            try:
                runtimeContext.basedir = input_basedir
                del args.workflow
                del args.job_order

                conf_file = getattr(args,
                                    "beta_dependency_resolvers_configuration",
                                    None)  # Text
                use_conda_dependencies = getattr(args,
                                                 "beta_conda_dependencies",
                                                 None)  # Text

                job_script_provider = None  # type: Optional[DependenciesConfiguration]
                if conf_file or use_conda_dependencies:
                    runtimeContext.job_script_provider = DependenciesConfiguration(
                        args)

                runtimeContext.find_default_container = \
                    functools.partial(find_default_container, args)
                runtimeContext.make_fs_access = getdefault(
                    runtimeContext.make_fs_access, StdFsAccess)

                (out, status) = executor(tool,
                                         initialized_job_order_object,
                                         runtimeContext,
                                         logger=_logger)
                # This is the workflow output, it needs to be written
                if out is not None:

                    def loc_to_path(obj):
                        for field in ("path", "nameext", "nameroot",
                                      "dirname"):
                            if field in obj:
                                del obj[field]
                        if obj["location"].startswith("file://"):
                            obj["path"] = uri_file_path(obj["location"])

                    visit_class(out, ("File", "Directory"), loc_to_path)

                    # Unsetting the Generation fron final output object
                    visit_class(out, ("File", ),
                                MutationManager().unset_generation)

                    if isinstance(out, string_types):
                        stdout.write(out)
                    else:
                        stdout.write(
                            json_dumps(
                                out,
                                indent=4,  # type: ignore
                                ensure_ascii=False))
                    stdout.write("\n")
                    if hasattr(stdout, "flush"):
                        stdout.flush()  # type: ignore

                if status != "success":
                    _logger.warning(u"Final process status is %s", status)
                    infor = "Final process status is %s" + status + ""
                    return infor, 1

                _logger.info(u"Final process status is %s", status)
                return out, status

            except (validate.ValidationException) as exc:
                _logger.error(u"Input object failed validation:\n%s",
                              exc,
                              exc_info=args.debug)
                infor = "Input object failed validation:\n%s" + exc + args.debug
                return infor, 1
            except UnsupportedRequirement as exc:
                _logger.error(
                    u"Workflow or tool uses unsupported feature:\n%s",
                    exc,
                    exc_info=args.debug)
                infor = "Workflow or tool uses unsupported feature:\n%s" + exc + args.debug
                return infor, 3
            except WorkflowException as exc:
                _logger.error(u"Workflow error%s:\n%s",
                              try_again_msg,
                              strip_dup_lineno(six.text_type(exc)),
                              exc_info=args.debug)
                infor = "Workflow error%s:\n%s" + try_again_msg + strip_dup_lineno(
                    six.text_type(exc)) + args.debug
                return infor, 1
            except Exception as exc:
                _logger.error(u"Unhandled error%s:\n  %s",
                              try_again_msg,
                              exc,
                              exc_info=args.debug)
                infor = "Unhandled error%s:\n  %s" + try_again_msg + exc + args.debug
                return infor, 1

        finally:
            _logger.removeHandler(stderr_handler)
            _logger.addHandler(defaultStreamHandler)