示例#1
0
    def test_pack_missing_cwlVersion(self):
        """Test to ensure the generated pack output is not missing
        the `cwlVersion` in case of single tool workflow and single step workflow"""

        # Testing single tool workflow
        document_loader, workflowobj, uri = fetch_document(
            get_data("tests/wf/hello_single_tool.cwl"))
        document_loader, _, processobj, metadata, uri = validate_document(
            document_loader, workflowobj, uri)
        # generate pack output dict
        packed = json.loads(
            print_pack(document_loader, processobj, uri, metadata))

        self.assertEqual('v1.0', packed["cwlVersion"])

        # Testing single step workflow
        document_loader, workflowobj, uri = fetch_document(
            get_data("tests/wf/hello-workflow.cwl"))
        document_loader, _, processobj, metadata, uri = validate_document(
            document_loader, workflowobj, uri)
        # generate pack output dict
        packed = json.loads(
            print_pack(document_loader, processobj, uri, metadata))

        self.assertEqual('v1.0', packed["cwlVersion"])
def fetch_workflow(remote_storage, local_input_dir, remote_workflow_file, workflow_file='workflow.cwl'):
    logging.warning('Downloading cwl')
    local_workflow_file = local_input_dir + '/' + workflow_file
    remote_storage.download(remote_workflow_file, local_workflow_file)
    logging.warning('Validating cwl')
    document_loader, workflowobj, uri = fetch_document(local_workflow_file)
    validate_document(document_loader, workflowobj, uri)
    return local_workflow_file
def fetch_workflow(remote_storage, local_input_dir, remote_workflow_file, workflow_file='workflow.cwl'):
    logging.warning('Downloading cwl')
    local_workflow_file = local_input_dir + '/' + workflow_file
    remote_storage.download(remote_workflow_file, local_workflow_file)
    logging.warning('Validating cwl')
    document_loader, workflowobj, uri = fetch_document(local_workflow_file)
    validate_document(document_loader, workflowobj, uri)
    return local_workflow_file
示例#4
0
def load_cwl(fname):
    """Load and validate CWL file using cwltool
    """
    logger.debug('Loading CWL file "{}"'.format(fname))
    # Fetching, preprocessing and validating cwl

    # Older versions of cwltool
    if legacy_cwltool:
        try:
            (document_loader, workflowobj, uri) = fetch_document(fname)
            (document_loader, _, processobj, metadata, uri) = \
                validate_document(document_loader, workflowobj, uri)
        except TypeError:
            from cwltool.context import LoadingContext, getdefault
            from cwltool import workflow
            from cwltool.resolver import tool_resolver
            from cwltool.load_tool import resolve_tool_uri

            loadingContext = LoadingContext()
            loadingContext.construct_tool_object = getdefault(
                loadingContext.construct_tool_object,
                workflow.default_make_tool)
            loadingContext.resolver = getdefault(loadingContext.resolver,
                                                 tool_resolver)

            uri, tool_file_uri = resolve_tool_uri(
                fname,
                resolver=loadingContext.resolver,
                fetcher_constructor=loadingContext.fetcher_constructor)

            document_loader, workflowobj, uri = fetch_document(
                uri,
                resolver=loadingContext.resolver,
                fetcher_constructor=loadingContext.fetcher_constructor)
            document_loader, avsc_names, processobj, metadata, uri = \
                validate_document(
                    document_loader, workflowobj, uri,
                    loadingContext.overrides_list, {},
                    enable_dev=loadingContext.enable_dev,
                    strict=loadingContext.strict,
                    preprocess_only=False,
                    fetcher_constructor=loadingContext.fetcher_constructor,
                    skip_schemas=False,
                    do_validate=loadingContext.do_validate)
    # Recent versions of cwltool
    else:
        (loading_context, workflowobj, uri) = fetch_document(fname)
        loading_context, uri = resolve_and_validate_document(
            loading_context, workflowobj, uri)
        document_loader = loading_context.loader
        processobj = workflowobj
        metadata = loading_context.metadata

    return document_loader, processobj, metadata, uri
示例#5
0
    def _pack_idempotently(self, document):
        document_loader, workflowobj, uri = fetch_document(
            get_data(document))
        document_loader, avsc_names, processobj, metadata, uri = validate_document(
            document_loader, workflowobj, uri)
        # generate pack output dict
        packed = json.loads(print_pack(document_loader, processobj, uri, metadata))

        document_loader, workflowobj, uri2 = fetch_document(packed)
        document_loader, avsc_names, processobj, metadata, uri2 = validate_document(
            document_loader, workflowobj, uri)
        double_packed = json.loads(print_pack(document_loader, processobj, uri2, metadata))
        self.assertEqual(packed, double_packed)
示例#6
0
def _pack_idempotently(document):
    document_loader, workflowobj, uri = fetch_document(
        get_data(document))
    document_loader, _, processobj, metadata, uri = validate_document(
        document_loader, workflowobj, uri, [], {})
    # generate pack output dict
    packed = json.loads(print_pack(document_loader, processobj, uri, metadata))

    document_loader, workflowobj, uri2 = fetch_document(packed)
    document_loader, _, processobj, metadata, uri2 = validate_document(
        document_loader, workflowobj, uri, [], {})
    double_packed = json.loads(print_pack(document_loader, processobj, uri2, metadata))
    assert packed == double_packed
示例#7
0
    def _pack_idempotently(self, document):
        document_loader, workflowobj, uri = fetch_document(
            get_data(document))
        document_loader, avsc_names, processobj, metadata, uri = validate_document(
            document_loader, workflowobj, uri)
        # generate pack output dict
        packed = json.loads(print_pack(document_loader, processobj, uri, metadata))

        document_loader, workflowobj, uri2 = fetch_document(packed)
        document_loader, avsc_names, processobj, metadata, uri2 = validate_document(
            document_loader, workflowobj, uri)
        double_packed = json.loads(print_pack(document_loader, processobj, uri2, metadata))
        self.assertEqual(packed, double_packed)
示例#8
0
 def test_preserving_namespaces(self):
     test_wf = "tests/wf/formattest.cwl"
     test_wf_job = "tests/wf/formattest-job.json"
     document_loader, workflowobj, uri = fetch_document(get_data(test_wf))
     document_loader, avsc_names, processobj, metadata, uri = validate_document(
         document_loader, workflowobj, uri)
     packed = json.loads(
         print_pack(document_loader, processobj, uri, metadata))
     assert "$namespaces" in packed
     temp_packed_path = tempfile.mkstemp()[1]
     with open(temp_packed_path, 'w') as f:
         json.dump(packed, f)
     normal_output = StringIO()
     packed_output = StringIO()
     self.assertEquals(
         main(
             ['--debug',
              get_data(temp_packed_path),
              get_data(test_wf_job)],
             stdout=packed_output), 0)
     self.assertEquals(
         main([get_data(test_wf), get_data(test_wf_job)],
              stdout=normal_output), 0)
     self.assertEquals(json.loads(packed_output.getvalue()),
                       json.loads(normal_output.getvalue()))
     os.remove(temp_packed_path)
示例#9
0
 def test_packed_workflow_execution(self):
     load_tool.loaders = {}
     test_wf = "tests/wf/count-lines1-wf.cwl"
     test_wf_job = "tests/wf/wc-job.json"
     document_loader, workflowobj, uri = fetch_document(
         get_data(test_wf), resolver=tool_resolver)
     document_loader, avsc_names, processobj, metadata, uri = validate_document(
         document_loader, workflowobj, uri)
     packed = json.loads(
         print_pack(document_loader, processobj, uri, metadata))
     temp_packed_path = tempfile.mkstemp()[1]
     with open(temp_packed_path, 'w') as f:
         json.dump(packed, f)
     normal_output = StringIO()
     packed_output = StringIO()
     self.assertEquals(
         main(
             ['--debug',
              get_data(temp_packed_path),
              get_data(test_wf_job)],
             stdout=packed_output), 0)
     self.assertEquals(
         main([get_data(test_wf), get_data(test_wf_job)],
              stdout=normal_output), 0)
     self.assertEquals(json.loads(packed_output.getvalue()),
                       json.loads(normal_output.getvalue()))
     os.remove(temp_packed_path)
示例#10
0
def test_packed_workflow_execution(wf_path, job_path, namespaced, tmpdir):
    load_tool.loaders = {}

    document_loader, workflowobj, uri = fetch_document(
        get_data(wf_path), resolver=tool_resolver)

    document_loader, _, processobj, metadata, uri = validate_document(
        document_loader, workflowobj, uri, [], {})

    packed = json.loads(print_pack(document_loader, processobj, uri, metadata))

    assert not namespaced or "$namespaces" in packed

    wf_packed_handle, wf_packed_path = tempfile.mkstemp()
    with open(wf_packed_path, 'w') as temp_file:
        json.dump(packed, temp_file)

    normal_output = StringIO()
    packed_output = StringIO()

    normal_params = ['--outdir', str(tmpdir), get_data(wf_path), get_data(job_path)]
    packed_params = ['--outdir', str(tmpdir), '--debug', get_data(wf_packed_path), get_data(job_path)]

    assert main(normal_params, stdout=normal_output) == 0
    assert main(packed_params, stdout=packed_output) == 0

    assert json.loads(packed_output.getvalue()) == json.loads(normal_output.getvalue())

    os.close(wf_packed_handle)
    os.remove(wf_packed_path)
示例#11
0
def pack_cwl(cwl_path):
    # cwltool needs to be imported on demand since
    # repeatedly calling functions on a document named
    # with same name caused errors.
    from cwltool.load_tool import fetch_document
    from cwltool.main import print_pack
    cwltool_version = get_distribution("cwltool").version
    if StrictVersion(cwltool_version) > StrictVersion("1.0.20181201184214"):
        from cwltool.load_tool import resolve_and_validate_document
        loadingContext, workflowobj, uri = fetch_document(cwl_path)
        loadingContext.do_update = False
        loadingContext, uri = resolve_and_validate_document(
            loadingContext, workflowobj, uri)
        processobj = loadingContext.loader.resolve_ref(uri)[0]
        packed_cwl = json.loads(
            print_pack(loadingContext.loader, processobj, uri,
                       loadingContext.metadata))
    else:
        from cwltool.load_tool import validate_document
        document_loader, workflowobj, uri = fetch_document(cwl_path)
        document_loader, _, processobj, metadata, uri = validate_document(
            document_loader, workflowobj, uri, [], {})
        packed_cwl = json.loads(
            print_pack(document_loader, processobj, uri, metadata))
    return packed_cwl
示例#12
0
def load_tool(
        argsworkflow,  # type: Union[Text, Dict[Text, Any]]
        makeTool,  # type: Callable[..., Process]
        kwargs=None,  # type: Dict
        enable_dev=False,  # type: bool
        strict=False,  # type: bool
        resolver=None,  # type: Callable[[Loader, Union[Text, Dict[Text, Any]]], Text]
        fetcher_constructor=None,  # type: FetcherConstructorType
        overrides=None):
    # type: (...) -> Process
    uri, tool_file_uri = resolve_tool_uri(
        argsworkflow,
        resolver=resolver,
        fetcher_constructor=fetcher_constructor)

    document_loader, workflowobj, uri = fetch_document(
        uri, resolver=resolver, fetcher_constructor=fetcher_constructor)

    document_loader, avsc_names, processobj, metadata, uri \
        = validate_document(document_loader, workflowobj, uri,
                            enable_dev=enable_dev,
                            strict=strict,
                            fetcher_constructor=fetcher_constructor,
                            overrides=overrides,
                            skip_schemas=kwargs.get('skip_schemas', True) if kwargs else True,
                            metadata=kwargs.get('metadata', None) if kwargs else None)
    return make_tool(document_loader, avsc_names, metadata, uri,
                     LoadingContext())
示例#13
0
    def test_default_path(self):
        document_loader, workflowobj, uri = fetch_document(
            get_data("tests/wf/default_path.cwl"))
        document_loader, avsc_names, processobj, metadata, uri = validate_document(
            document_loader, workflowobj, uri)

        self.assertIsInstance(document_loader,Loader)
        self.assertIn("cwlVersion",processobj)
    def test_default_path(self):
        """Testing that error is not raised when default path is not present"""
        document_loader, workflowobj, uri = fetch_document(
            get_data("tests/wf/default_path.cwl"))
        document_loader, avsc_names, processobj, metadata, uri = validate_document(
            document_loader, workflowobj, uri)

        self.assertIsInstance(document_loader, Loader)
        self.assertIn("cwlVersion", processobj)
示例#15
0
    def test_pack_rewrites(self):
        load_tool.loaders = {}

        document_loader, workflowobj, uri = fetch_document(get_data("tests/wf/default-wf5.cwl"))
        document_loader, avsc_names, processobj, metadata, uri = validate_document(
            document_loader, workflowobj, uri)
        rewrites = {}
        packed = cwltool.pack.pack(document_loader, processobj, uri, metadata, rewrite_out=rewrites)
        self.assertEqual(6, len(rewrites))
示例#16
0
def load_cwl(fname):
    """Load and validate CWL file using cwltool
    """
    # Fetching, preprocessing and validating cwl
    (document_loader, workflowobj, uri) = fetch_document(fname)
    (document_loader, _, processobj, metadata, uri) = \
        validate_document(document_loader, workflowobj, uri)

    return document_loader, processobj, metadata, uri
示例#17
0
def test_default_path():
    """Testing that error is not raised when default path is not present"""
    document_loader, workflowobj, uri = fetch_document(
        get_data("tests/wf/default_path.cwl"))
    document_loader, _, processobj, _, uri = validate_document(
        document_loader, workflowobj, uri, [], {})

    assert isinstance(document_loader, Loader)
    assert "cwlVersion" in processobj
示例#18
0
def test_pack_single_tool():
    load_tool.loaders = {}

    document_loader, workflowobj, uri = fetch_document(
        get_data("tests/wf/formattest.cwl"))
    document_loader, _, processobj, metadata, uri = validate_document(
        document_loader, workflowobj, uri, [], {})

    packed = cwltool.pack.pack(document_loader, processobj, uri, metadata)
    assert "$schemas" in packed
示例#19
0
    def test_pack(self):
        self.maxDiff = None

        document_loader, workflowobj, uri = fetch_document("tests/wf/revsort.cwl")
        document_loader, avsc_names, processobj, metadata, uri = validate_document(
            document_loader, workflowobj, uri)
        packed = cwltool.pack.pack(document_loader, processobj, uri, metadata)
        with open("tests/wf/expect_packed.cwl") as f:
            expect_packed = json.load(f)
        self.assertEqual(expect_packed, packed)
示例#20
0
def test_pack_rewrites():
    load_tool.loaders = {}
    rewrites = {}

    document_loader, workflowobj, uri = fetch_document(get_data("tests/wf/default-wf5.cwl"))
    document_loader, _, processobj, metadata, uri = validate_document(
        document_loader, workflowobj, uri, [], {})

    cwltool.pack.pack(document_loader, processobj, uri, metadata, rewrite_out=rewrites)

    assert len(rewrites) == 6
def cwl_graph_generate(cwl_path: str):
    if cwl_path[:5] != "file:":
        cwl_path = f"file://{path.abspath(cwl_path)}"

    document_loader, workflowobj, uri = fetch_document(cwl_path)
    document_loader, avsc_names, processobj, metadata, uri = validate_document(
        document_loader, workflowobj, uri, strict=False, preprocess_only=True)
    loadingContext = LoadingContext()
    tool = make_tool(document_loader, avsc_names, metadata, uri,
                     loadingContext)
    cwl_viewer_dot(tool)
示例#22
0
def expand_cwl(cwl, uri, g):
    try:
        document_loader = Loader(
            {"cwl": "https://w3id.org/cwl/cwl#", "id": "@id"})
        cwl = yaml.load(cwl)
        document_loader, avsc_names, processobj, metadata, uri = validate_document(
            document_loader, cwl, uri, strict=False)
        jsonld_context.makerdf(uri, processobj, document_loader.ctx, graph=g)
        sys.stderr.write("\n%s: imported ok\n" % (uri))
    except Exception as e:
        sys.stderr.write("\n%s: %s\n" % (uri, e))
示例#23
0
def test_pack_missing_cwlVersion(cwl_path):
    """Test to ensure the generated pack output is not missing
    the `cwlVersion` in case of single tool workflow and single step workflow"""

    # Testing single tool workflow
    document_loader, workflowobj, uri = fetch_document(get_data(cwl_path))
    document_loader, _, processobj, metadata, uri = validate_document(
        document_loader, workflowobj, uri, [], {})
    # generate pack output dict
    packed = json.loads(print_pack(document_loader, processobj, uri, metadata))

    assert packed["cwlVersion"] == 'v1.0'
def expand_cwl(cwl, uri, g):
    try:
        document_loader = Loader({
            "cwl": "https://w3id.org/cwl/cwl#",
            "id": "@id"
        })
        cwl = yaml.load(cwl)
        document_loader, avsc_names, processobj, metadata, uri = validate_document(
            document_loader, cwl, uri, strict=False)
        jsonld_context.makerdf(uri, processobj, document_loader.ctx, graph=g)
        sys.stderr.write("\n%s: imported ok\n" % (uri))
    except Exception as e:
        sys.stderr.write("\n%s: %s\n" % (uri, e))
示例#25
0
    def test_pack(self):
        self.maxDiff = None

        document_loader, workflowobj, uri = fetch_document("tests/wf/revsort.cwl")
        document_loader, avsc_names, processobj, metadata, uri = validate_document(
            document_loader, workflowobj, uri)
        packed = cwltool.pack.pack(document_loader, processobj, uri, metadata)
        with open("tests/wf/expect_packed.cwl") as f:
            expect_packed = json.load(f)
        adjustFileObjs(packed, partial(makeRelative, os.path.abspath("tests/wf")))
        adjustDirObjs(packed, partial(makeRelative, os.path.abspath("tests/wf")))

        self.assertEqual(expect_packed, packed)
示例#26
0
    def test_pack_rewrites(self):
        load_tool.loaders = {}

        document_loader, workflowobj, uri = fetch_document(
            get_data("tests/wf/default-wf5.cwl"))
        document_loader, avsc_names, processobj, metadata, uri = validate_document(
            document_loader, workflowobj, uri)
        rewrites = {}
        packed = cwltool.pack.pack(document_loader,
                                   processobj,
                                   uri,
                                   metadata,
                                   rewrite_out=rewrites)
        self.assertEqual(6, len(rewrites))
示例#27
0
    def test_pack_missing_cwlVersion(self):
        """Test to ensure the generated pack output is not missing
        the `cwlVersion` in case of single tool workflow and single step workflow"""

        # Testing single tool workflow
        document_loader, workflowobj, uri = fetch_document(
            get_data("tests/wf/hello_single_tool.cwl"))
        document_loader, _, processobj, metadata, uri = validate_document(
            document_loader, workflowobj, uri)
        # generate pack output dict
        packed = json.loads(print_pack(document_loader, processobj, uri, metadata))

        self.assertEqual('v1.0', packed["cwlVersion"])

        # Testing single step workflow
        document_loader, workflowobj, uri = fetch_document(
            get_data("tests/wf/hello-workflow.cwl"))
        document_loader, _, processobj, metadata, uri = validate_document(
            document_loader, workflowobj, uri)
        # generate pack output dict
        packed = json.loads(print_pack(document_loader, processobj, uri, metadata))

        self.assertEqual('v1.0', packed["cwlVersion"])
示例#28
0
    def __init__(self, fname, abspath=True, start=os.curdir):
        if abspath:
            self.run = os.path.abspath(fname)
        else:
            self.run = os.path.relpath(fname, start)

        bn = os.path.basename(fname)
        self.name = os.path.splitext(bn)[0]
        self.python_name = python_name(self.name)

        self.step_inputs = {}
        self.input_names = []
        self.input_types = {}
        self.optional_input_names = []
        self.optional_input_types = {}
        self.output_names = []
        self.step_outputs = {}
        self.is_workflow = False
        self.is_scattered = False
        self.scattered_inputs = []

        # Fetching, preprocessing and validating cwl
        (document_loader, workflowobj, uri) = fetch_document(fname)
        (document_loader, avsc_names, processobj, metadata,
         uri) = validate_document(document_loader, workflowobj, uri)
        s = processobj

        valid_classes = ('CommandLineTool', 'Workflow', 'ExpressionTool')
        if 'class' in s and s['class'] in valid_classes:
            self.is_workflow = s['class'] == 'Workflow'
            for inp in s['inputs']:
                # Due to preprocessing of cwltool the id has become an absolute iri,
                # for ease of use we keep only the fragment
                short_id = iri2fragment(inp['id'])
                if self._input_optional(inp):
                    self.optional_input_names.append(short_id)
                    self.optional_input_types[short_id] = inp['type']
                else:
                    self.input_names.append(short_id)
                    self.input_types[short_id] = inp['type']

            for o in s['outputs']:
                short_id = iri2fragment(o['id'])
                self.output_names.append(short_id)
                self.step_outputs[short_id] = o['type']
        else:
            msg = '"{}" is a unsupported'
            raise NotImplementedError(msg.format(self.name))
示例#29
0
    def test_pack(self):
        self.maxDiff = None

        document_loader, workflowobj, uri = fetch_document(
            "tests/wf/revsort.cwl")
        document_loader, avsc_names, processobj, metadata, uri = validate_document(
            document_loader, workflowobj, uri)
        packed = cwltool.pack.pack(document_loader, processobj, uri, metadata)
        with open("tests/wf/expect_packed.cwl") as f:
            expect_packed = json.load(f)
        adjustFileObjs(packed,
                       partial(makeRelative, os.path.abspath("tests/wf")))
        adjustDirObjs(packed, partial(makeRelative,
                                      os.path.abspath("tests/wf")))

        self.assertEqual(expect_packed, packed)
示例#30
0
    def test_pack(self):
        load_tool.loaders = {}

        document_loader, workflowobj, uri = fetch_document(
            get_data("tests/wf/revsort.cwl"))
        document_loader, avsc_names, processobj, metadata, uri = validate_document(
            document_loader, workflowobj, uri)
        packed = cwltool.pack.pack(document_loader, processobj, uri, metadata)
        with open(get_data("tests/wf/expect_packed.cwl")) as f:
            expect_packed = json.load(f)
        adjustFileObjs(packed, partial(make_relative,
            os.path.abspath(get_data("tests/wf"))))
        adjustDirObjs(packed, partial(make_relative,
            os.path.abspath(get_data("tests/wf"))))
        self.assertIn("$schemas", packed)
        del packed["$schemas"]
        del expect_packed["$schemas"]

        self.assertEqual(expect_packed, packed)
示例#31
0
def test_pack():
    load_tool.loaders = {}

    document_loader, workflowobj, uri = fetch_document(get_data("tests/wf/revsort.cwl"))
    document_loader, _, processobj, metadata, uri = validate_document(
        document_loader, workflowobj, uri, [], {})

    with open(get_data("tests/wf/expect_packed.cwl")) as packed_file:
        expect_packed = json.load(packed_file)

    packed = cwltool.pack.pack(document_loader, processobj, uri, metadata)
    adjustFileObjs(packed, partial(make_relative, os.path.abspath(get_data("tests/wf"))))
    adjustDirObjs(packed, partial(make_relative, os.path.abspath(get_data("tests/wf"))))

    assert "$schemas" in packed
    assert len(packed["$schemas"]) == len(expect_packed["$schemas"])
    del packed["$schemas"]
    del expect_packed["$schemas"]

    assert packed == expect_packed
示例#32
0
    def test_pack(self):
        load_tool.loaders = {}

        document_loader, workflowobj, uri = fetch_document(
            get_data("tests/wf/revsort.cwl"))
        document_loader, avsc_names, processobj, metadata, uri = validate_document(
            document_loader, workflowobj, uri)
        packed = cwltool.pack.pack(document_loader, processobj, uri, metadata)
        with open(get_data("tests/wf/expect_packed.cwl")) as f:
            expect_packed = json.load(f)
        adjustFileObjs(
            packed, partial(makeRelative,
                            os.path.abspath(get_data("tests/wf"))))
        adjustDirObjs(
            packed, partial(makeRelative,
                            os.path.abspath(get_data("tests/wf"))))
        self.assertIn("$schemas", packed)
        del packed["$schemas"]
        del expect_packed["$schemas"]

        self.assertEqual(expect_packed, packed)
示例#33
0
文件: test_pack.py 项目: bmeg/cwltool
 def test_packed_workflow_execution(self):
     load_tool.loaders = {}
     test_wf = "tests/wf/count-lines1-wf.cwl"
     test_wf_job = "tests/wf/wc-job.json"
     document_loader, workflowobj, uri = fetch_document(
         get_data(test_wf), resolver=tool_resolver)
     document_loader, avsc_names, processobj, metadata, uri = validate_document(
         document_loader, workflowobj, uri)
     packed = json.loads(print_pack(document_loader, processobj, uri, metadata))
     temp_packed_path = tempfile.mkstemp()[1]
     with open(temp_packed_path, 'w') as f:
         json.dump(packed, f)
     normal_output = StringIO()
     packed_output = StringIO()
     self.assertEquals(main(['--debug', get_data(temp_packed_path),
                             get_data(test_wf_job)],
                            stdout=packed_output), 0)
     self.assertEquals(main([get_data(test_wf),
                             get_data(test_wf_job)],
                            stdout=normal_output), 0)
     self.assertEquals(json.loads(packed_output.getvalue()), json.loads(normal_output.getvalue()))
     os.remove(temp_packed_path)
示例#34
0
文件: test_pack.py 项目: bmeg/cwltool
 def test_preserving_namespaces(self):
     test_wf = "tests/wf/formattest.cwl"
     test_wf_job = "tests/wf/formattest-job.json"
     document_loader, workflowobj, uri = fetch_document(
         get_data(test_wf))
     document_loader, avsc_names, processobj, metadata, uri = validate_document(
         document_loader, workflowobj, uri)
     packed = json.loads(print_pack(document_loader, processobj, uri, metadata))
     assert "$namespaces" in packed
     temp_packed_path = tempfile.mkstemp()[1]
     with open(temp_packed_path, 'w') as f:
         json.dump(packed, f)
     normal_output = StringIO()
     packed_output = StringIO()
     self.assertEquals(main(['--debug', get_data(temp_packed_path),
                             get_data(test_wf_job)],
                            stdout=packed_output), 0)
     self.assertEquals(main([get_data(test_wf),
                             get_data(test_wf_job)],
                            stdout=normal_output), 0)
     self.assertEquals(json.loads(packed_output.getvalue()), json.loads(normal_output.getvalue()))
     os.remove(temp_packed_path)
示例#35
0
def load_tool(
    argsworkflow,  # type: Union[Text, Dict[Text, Any]]
    loadingContext  # type: LoadingContext
):  # type: (...) -> Process

    document_loader, workflowobj, uri = fetch_document(
        argsworkflow,
        resolver=loadingContext.resolver,
        fetcher_constructor=loadingContext.fetcher_constructor)

    document_loader, avsc_names, _, metadata, uri = validate_document(
        document_loader,
        workflowobj,
        uri,
        enable_dev=loadingContext.enable_dev,
        strict=loadingContext.strict,
        fetcher_constructor=loadingContext.fetcher_constructor,
        overrides=loadingContext.overrides_list,
        skip_schemas=True,
        metadata=loadingContext.metadata)

    return make_tool(document_loader, avsc_names, metadata, uri,
                     loadingContext)
示例#36
0
    parser = argparse.ArgumentParser()
    parser.add_argument("dir")
    args = parser.parse_args()

    for dirpath, dirnames, filenames in os.walk(args.dir):
        for f in filenames:
            if f.endswith(".cwl"):
                path = os.path.realpath(os.path.join(args.dir, dirpath, f))
                try:
                    with open(path) as f2:
                        content = f2.read()
                        cwl = yaml.load(content)
                    document_loader = Loader(
                        {"cwl": "https://w3id.org/cwl/cwl#", "id": "@id"})
                    document_loader, avsc_names, processobj, metadata, uri = validate_document(
                        document_loader, cwl, "file://" + path, strict=False)

                    tools[f] = {
                        "url": "/api/ga4gh/v1/tools/%s" %
                        f,
                        "id": f,
                        "organization": "",
                        "author": "",
                        "meta-version": "",
                        "toolclass": {
                            "id": cwl["class"],
                        },
                        "versions": [
                            {
                                "url": "/api/ga4gh/v1/tools/%s/versions/0" %
                                f,
示例#37
0
    def cwlmain(
        self,
        argsl=None,  # type: List[str]
        args=None,  # type: argparse.Namespace
        job_order_object=None,  # type: MutableMapping[Text, Any]
        stdin=sys.stdin,  # type: IO[Any]
        stdout=None,  # type: Union[TextIO, codecs.StreamWriter]
        stderr=sys.stderr,  # type: IO[Any]
        versionfunc=versionstring,  # type: Callable[[], Text]
        logger_handler=None,  #
        custom_schema_callback=None,  # type: Callable[[], None]
        executor=None,  # type: Callable[..., Tuple[Dict[Text, Any], Text]]
        loadingContext=None,  # type: LoadingContext
        runtimeContext=None  # type: RuntimeContext
    ):  # type: (...) -> int

        if not stdout:
            stdout = codecs.getwriter('utf-8')(sys.stdout)
        _logger.removeHandler(defaultStreamHandler)
        if logger_handler:
            stderr_handler = logger_handler
        else:
            stderr_handler = logging.StreamHandler(stderr)
        _logger.addHandler(stderr_handler)
        try:
            if args is None:
                args = arg_parser().parse_args(argsl)
                if args.workflow and "--outdir" not in argsl:
                    outputPath = args.workflow.split('/')[-1].split('.')[0]
                    setattr(
                        args, "outdir",
                        os.getcwd() + "/" + outputPath + "/" +
                        datetime.datetime.now().strftime('%Y-%m-%d-%H%M'))
            if runtimeContext is None:
                runtimeContext = RuntimeContext(vars(args))
            else:
                runtimeContext = runtimeContext.copy()

            rdflib_logger = logging.getLogger("rdflib.term")
            rdflib_logger.addHandler(stderr_handler)
            rdflib_logger.setLevel(logging.ERROR)
            if args.quiet:
                _logger.setLevel(logging.WARN)
            if runtimeContext.debug:
                _logger.setLevel(logging.DEBUG)
                rdflib_logger.setLevel(logging.DEBUG)
            if args.timestamps:
                formatter = logging.Formatter("[%(asctime)s] %(message)s",
                                              "%Y-%m-%d %H:%M:%S")
                stderr_handler.setFormatter(formatter)
            # version
            if args.version:
                return versionfunc(), 0
            else:
                _logger.info(versionfunc())

            if args.print_supported_versions:
                return "\n".join(supportedCWLversions(args.enable_dev)), 0

            if not args.workflow:
                if os.path.isfile("CWLFile"):
                    setattr(args, "workflow", "CWLFile")
                else:
                    _logger.error("")
                    _logger.error(
                        "CWL document required, no input file was provided")
                    arg_parser().print_help()
                    return "CWL document required, no input file was provided", 1
            if args.relax_path_checks:
                command_line_tool.ACCEPTLIST_RE = command_line_tool.ACCEPTLIST_EN_RELAXED_RE

            if args.ga4gh_tool_registries:
                ga4gh_tool_registries[:] = args.ga4gh_tool_registries
            if not args.enable_ga4gh_tool_registry:
                del ga4gh_tool_registries[:]

            if custom_schema_callback:
                custom_schema_callback()
            elif args.enable_ext:
                res = pkg_resources.resource_stream(__name__, 'extensions.yml')
                use_custom_schema("v1.0", "http://commonwl.org/cwltool",
                                  res.read())
                res.close()
            else:
                use_standard_schema("v1.0")

            if loadingContext is None:
                loadingContext = LoadingContext(vars(args))
            else:
                loadingContext = loadingContext.copy()

            loadingContext.disable_js_validation = \
                args.disable_js_validation or (not args.do_validate)
            loadingContext.construct_tool_object = getdefault(
                loadingContext.construct_tool_object,
                workflow.default_make_tool)
            loadingContext.resolver = getdefault(loadingContext.resolver,
                                                 tool_resolver)
            try:
                uri, tool_file_uri = resolve_tool_uri(
                    args.workflow,
                    resolver=loadingContext.resolver,
                    fetcher_constructor=loadingContext.fetcher_constructor)
            except:
                return "Can't find file " + args.workflow, 0

            try_again_msg = "" if args.debug else ", try again with --debug for more information"

            try:
                job_order_object, input_basedir, jobloader = load_job_order(
                    args, stdin, loadingContext.fetcher_constructor,
                    loadingContext.overrides_list, tool_file_uri)

                if args.overrides:
                    loadingContext.overrides_list.extend(
                        load_overrides(
                            file_uri(os.path.abspath(args.overrides)),
                            tool_file_uri))

                document_loader, workflowobj, uri = fetch_document(
                    uri,
                    resolver=loadingContext.resolver,
                    fetcher_constructor=loadingContext.fetcher_constructor)

                if args.print_deps:
                    # printdeps(workflowobj, document_loader, stdout, args.relative_deps, uri)
                    result = returndeps(workflowobj, document_loader, stdout,
                                        args.relative_deps, uri)
                    return result, 0

                document_loader, avsc_names, processobj, metadata, uri \
                    = validate_document(document_loader, workflowobj, uri,
                                        enable_dev=loadingContext.enable_dev,
                                        strict=loadingContext.strict,
                                        preprocess_only=(args.print_pre or args.pack),
                                        fetcher_constructor=loadingContext.fetcher_constructor,
                                        skip_schemas=args.skip_schemas,
                                        overrides=loadingContext.overrides_list,
                                        do_validate=loadingContext.do_validate)

                if args.print_pre:
                    # stdout.write(json_dumps(processobj, indent=4))
                    return json_dumps(processobj, indent=4), 0

                loadingContext.overrides_list.extend(
                    metadata.get("cwltool:overrides", []))

                tool = make_tool(document_loader, avsc_names, metadata, uri,
                                 loadingContext)
                if args.make_template:
                    yaml.safe_dump(generate_input_template(tool),
                                   sys.stdout,
                                   default_flow_style=False,
                                   indent=4,
                                   block_seq_indent=2)
                    return yaml.safe_dump(generate_input_template(tool),
                                          indent=4), 0

                if args.validate:
                    _logger.info("Tool definition is valid")
                    return "Tool definition is valid", 0

                if args.pack:
                    stdout.write(
                        print_pack(document_loader, processobj, uri, metadata))
                    return print_pack(document_loader, processobj, uri,
                                      metadata), 0

                if args.print_rdf:
                    stdout.write(
                        printrdf(tool, document_loader.ctx,
                                 args.rdf_serializer))
                    return printrdf(tool, document_loader.ctx,
                                    args.rdf_serializer), 0

                if args.print_dot:
                    printdot(tool, document_loader.ctx, stdout)
                    return "args.print_dot still not solved", 0

            except (validate.ValidationException) as exc:
                _logger.error(u"Tool definition failed validation:\n%s",
                              exc,
                              exc_info=args.debug)
                infor = "Tool definition failed validation:\n%s" + exc + args.debug
                return infor, 1
            except (RuntimeError, WorkflowException) as exc:
                _logger.error(u"Tool definition failed initialization:\n%s",
                              exc,
                              exc_info=args.debug)
                infor = "Tool definition failed initialization:\n%s" + exc + args.debug
                return infor, 1
            except Exception as exc:
                _logger.error(
                    u"I'm sorry, I couldn't load this CWL file%s.\nThe error was: %s",
                    try_again_msg,
                    exc if not args.debug else "",
                    exc_info=args.debug)
                return "I'm sorry, I couldn't load this CWL file", 1

            if isinstance(tool, int):
                return tool, 0

            # If on MacOS platform, TMPDIR must be set to be under one of the
            # shared volumes in Docker for Mac
            # More info: https://dockstore.org/docs/faq
            if sys.platform == "darwin":
                default_mac_path = "/private/tmp/docker_tmp"
                if runtimeContext.tmp_outdir_prefix == DEFAULT_TMP_PREFIX:
                    runtimeContext.tmp_outdir_prefix = default_mac_path

            for dirprefix in ("tmpdir_prefix", "tmp_outdir_prefix",
                              "cachedir"):
                if getattr(runtimeContext, dirprefix) and getattr(
                        runtimeContext, dirprefix) != DEFAULT_TMP_PREFIX:
                    sl = "/" if getattr(runtimeContext, dirprefix).endswith("/") or dirprefix == "cachedir" \
                        else ""
                    setattr(
                        runtimeContext, dirprefix,
                        os.path.abspath(getattr(runtimeContext, dirprefix)) +
                        sl)
                    if not os.path.exists(
                            os.path.dirname(getattr(runtimeContext,
                                                    dirprefix))):
                        try:
                            os.makedirs(
                                os.path.dirname(
                                    getattr(runtimeContext, dirprefix)))
                        except Exception as e:
                            _logger.error("Failed to create directory: %s", e)
                            infor = "Failed to create directory: %s" + e + ""
                            return infor, 1

            if args.cachedir:
                if args.move_outputs == "move":
                    runtimeContext.move_outputs = "copy"
                runtimeContext.tmp_outdir_prefix = args.cachedir

            runtimeContext.secret_store = getdefault(
                runtimeContext.secret_store, SecretStore())

            try:
                initialized_job_order_object = init_job_order(
                    job_order_object,
                    args,
                    tool,
                    jobloader,
                    stdout,
                    print_input_deps=args.print_input_deps,
                    relative_deps=args.relative_deps,
                    input_basedir=input_basedir,
                    secret_store=runtimeContext.secret_store)
            except SystemExit as err:
                return err.code
            if not executor:
                if args.parallel:
                    executor = MultithreadedJobExecutor()
                else:
                    executor = SingleJobExecutor()
            assert executor is not None

            if isinstance(initialized_job_order_object, int):
                return initialized_job_order_object

            try:
                runtimeContext.basedir = input_basedir
                del args.workflow
                del args.job_order

                conf_file = getattr(args,
                                    "beta_dependency_resolvers_configuration",
                                    None)  # Text
                use_conda_dependencies = getattr(args,
                                                 "beta_conda_dependencies",
                                                 None)  # Text

                job_script_provider = None  # type: Optional[DependenciesConfiguration]
                if conf_file or use_conda_dependencies:
                    runtimeContext.job_script_provider = DependenciesConfiguration(
                        args)

                runtimeContext.find_default_container = \
                    functools.partial(find_default_container, args)
                runtimeContext.make_fs_access = getdefault(
                    runtimeContext.make_fs_access, StdFsAccess)

                (out, status) = executor(tool,
                                         initialized_job_order_object,
                                         runtimeContext,
                                         logger=_logger)
                # This is the workflow output, it needs to be written
                if out is not None:

                    def loc_to_path(obj):
                        for field in ("path", "nameext", "nameroot",
                                      "dirname"):
                            if field in obj:
                                del obj[field]
                        if obj["location"].startswith("file://"):
                            obj["path"] = uri_file_path(obj["location"])

                    visit_class(out, ("File", "Directory"), loc_to_path)

                    # Unsetting the Generation fron final output object
                    visit_class(out, ("File", ),
                                MutationManager().unset_generation)

                    if isinstance(out, string_types):
                        stdout.write(out)
                    else:
                        stdout.write(
                            json_dumps(
                                out,
                                indent=4,  # type: ignore
                                ensure_ascii=False))
                    stdout.write("\n")
                    if hasattr(stdout, "flush"):
                        stdout.flush()  # type: ignore

                if status != "success":
                    _logger.warning(u"Final process status is %s", status)
                    infor = "Final process status is %s" + status + ""
                    return infor, 1

                _logger.info(u"Final process status is %s", status)
                return out, status

            except (validate.ValidationException) as exc:
                _logger.error(u"Input object failed validation:\n%s",
                              exc,
                              exc_info=args.debug)
                infor = "Input object failed validation:\n%s" + exc + args.debug
                return infor, 1
            except UnsupportedRequirement as exc:
                _logger.error(
                    u"Workflow or tool uses unsupported feature:\n%s",
                    exc,
                    exc_info=args.debug)
                infor = "Workflow or tool uses unsupported feature:\n%s" + exc + args.debug
                return infor, 3
            except WorkflowException as exc:
                _logger.error(u"Workflow error%s:\n%s",
                              try_again_msg,
                              strip_dup_lineno(six.text_type(exc)),
                              exc_info=args.debug)
                infor = "Workflow error%s:\n%s" + try_again_msg + strip_dup_lineno(
                    six.text_type(exc)) + args.debug
                return infor, 1
            except Exception as exc:
                _logger.error(u"Unhandled error%s:\n  %s",
                              try_again_msg,
                              exc,
                              exc_info=args.debug)
                infor = "Unhandled error%s:\n  %s" + try_again_msg + exc + args.debug
                return infor, 1

        finally:
            _logger.removeHandler(stderr_handler)
            _logger.addHandler(defaultStreamHandler)
示例#38
0
    parser.add_argument("dir")
    args = parser.parse_args()

    for dirpath, dirnames, filenames in os.walk(args.dir):
        for f in filenames:
            if f.endswith(".cwl"):
                path = os.path.realpath(os.path.join(args.dir, dirpath, f))
                try:
                    with open(path) as f2:
                        content = f2.read()
                        cwl = yaml.load(content)
                    document_loader = Loader({
                        "cwl": "https://w3id.org/cwl/cwl#",
                        "id": "@id"
                    })
                    document_loader, avsc_names, processobj, metadata, uri = validate_document(
                        document_loader, cwl, "file://" + path, strict=False)

                    tools[f] = {
                        "url":
                        "/api/ga4gh/v1/tools/%s" % f,
                        "id":
                        f,
                        "organization":
                        "",
                        "author":
                        "",
                        "meta-version":
                        "",
                        "toolclass": {
                            "id": cwl["class"],
                        },