Example #1
0
    def _init_job(self, joborder, runtime_context):
        # type: (Mapping[str, str], RuntimeContext) -> Builder

        if self.metadata.get("cwlVersion") != INTERNAL_VERSION:
            raise WorkflowException(
                "Process object loaded with version '%s', must update to '%s' in order to execute."
                % (self.metadata.get("cwlVersion"), INTERNAL_VERSION))

        job = cast(Dict[str, expression.JSON], copy.deepcopy(joborder))

        make_fs_access = getdefault(runtime_context.make_fs_access,
                                    StdFsAccess)
        fs_access = make_fs_access(runtime_context.basedir)

        load_listing_req, _ = self.get_requirement("LoadListingRequirement")

        if load_listing_req is not None:
            load_listing = load_listing_req.get("loadListing")
        else:
            load_listing = "no_listing"

        # Validate job order
        try:
            fill_in_defaults(self.tool["inputs"], job, fs_access)

            normalizeFilesDirs(job)
            schema = self.names.get_name("input_record_schema", "")
            if schema is None:
                raise WorkflowException("Missing input record schema: "
                                        "{}".format(self.names))
            validate.validate_ex(schema,
                                 job,
                                 strict=False,
                                 logger=_logger_validation_warnings)

            if load_listing and load_listing != "no_listing":
                get_listing(fs_access,
                            job,
                            recursive=(load_listing == "deep_listing"))

            visit_class(job, ("File", ),
                        functools.partial(add_sizes, fs_access))

            if load_listing == "deep_listing":
                for i, inparm in enumerate(self.tool["inputs"]):
                    k = shortname(inparm["id"])
                    if k not in job:
                        continue
                    v = job[k]
                    dircount = [0]

                    def inc(d):  # type: (List[int]) -> None
                        d[0] += 1

                    visit_class(v, ("Directory", ), lambda x: inc(dircount))
                    if dircount[0] == 0:
                        continue
                    filecount = [0]
                    visit_class(v, ("File", ), lambda x: inc(filecount))
                    if filecount[0] > FILE_COUNT_WARNING:
                        # Long lines in this message are okay, will be reflowed based on terminal columns.
                        _logger.warning(
                            strip_dup_lineno(
                                SourceLine(self.tool["inputs"], i, str).
                                makeError(
                                    """Recursive directory listing has resulted in a large number of File objects (%s) passed to the input parameter '%s'.  This may negatively affect workflow performance and memory use.

If this is a problem, use the hint 'cwltool:LoadListingRequirement' with "shallow_listing" or "no_listing" to change the directory listing behavior:

$namespaces:
  cwltool: "http://commonwl.org/cwltool#"
hints:
  cwltool:LoadListingRequirement:
    loadListing: shallow_listing

""" % (filecount[0], k))))

        except (validate.ValidationException, WorkflowException) as err:
            raise WorkflowException("Invalid job input record:\n" +
                                    str(err)) from err

        files = []  # type: List[Dict[str, str]]
        bindings = CommentedSeq()
        tmpdir = ""
        stagedir = ""

        docker_req, _ = self.get_requirement("DockerRequirement")
        default_docker = None

        if docker_req is None and runtime_context.default_container:
            default_docker = runtime_context.default_container

        if (docker_req or default_docker) and runtime_context.use_container:
            if docker_req is not None:
                # Check if docker output directory is absolute
                if docker_req.get("dockerOutputDirectory") and docker_req.get(
                        "dockerOutputDirectory").startswith("/"):
                    outdir = docker_req.get("dockerOutputDirectory")
                else:
                    outdir = (docker_req.get("dockerOutputDirectory")
                              or runtime_context.docker_outdir
                              or random_outdir())
            elif default_docker is not None:
                outdir = runtime_context.docker_outdir or random_outdir()
            tmpdir = runtime_context.docker_tmpdir or "/tmp"  # nosec
            stagedir = runtime_context.docker_stagedir or "/var/lib/cwl"
        else:
            outdir = fs_access.realpath(
                runtime_context.outdir or tempfile.mkdtemp(prefix=getdefault(
                    runtime_context.tmp_outdir_prefix, DEFAULT_TMP_PREFIX)))
            if self.tool["class"] != "Workflow":
                tmpdir = fs_access.realpath(runtime_context.tmpdir
                                            or tempfile.mkdtemp())
                stagedir = fs_access.realpath(runtime_context.stagedir
                                              or tempfile.mkdtemp())

        builder = Builder(
            job,
            files,
            bindings,
            self.schemaDefs,
            self.names,
            self.requirements,
            self.hints,
            {},
            runtime_context.mutation_manager,
            self.formatgraph,
            make_fs_access,
            fs_access,
            runtime_context.job_script_provider,
            runtime_context.eval_timeout,
            runtime_context.debug,
            runtime_context.js_console,
            runtime_context.force_docker_pull,
            load_listing,
            outdir,
            tmpdir,
            stagedir,
        )

        bindings.extend(
            builder.bind_input(
                self.inputs_record_schema,
                job,
                discover_secondaryFiles=getdefault(runtime_context.toplevel,
                                                   False),
            ))

        if self.tool.get("baseCommand"):
            for index, command in enumerate(aslist(self.tool["baseCommand"])):
                bindings.append({
                    "position": [-1000000, index],
                    "datum": command
                })

        if self.tool.get("arguments"):
            for i, arg in enumerate(self.tool["arguments"]):
                lc = self.tool["arguments"].lc.data[i]
                filename = self.tool["arguments"].lc.filename
                bindings.lc.add_kv_line_col(len(bindings), lc)
                if isinstance(arg, MutableMapping):
                    arg = copy.deepcopy(arg)
                    if arg.get("position"):
                        position = arg.get("position")
                        if isinstance(position, str):  # no need to test the
                            # CWLVersion as the v1.0
                            # schema only allows ints
                            position = builder.do_eval(position)
                            if position is None:
                                position = 0
                        arg["position"] = [position, i]
                    else:
                        arg["position"] = [0, i]
                    bindings.append(arg)
                elif ("$(" in arg) or ("${" in arg):
                    cm = CommentedMap((("position", [0,
                                                     i]), ("valueFrom", arg)))
                    cm.lc.add_kv_line_col("valueFrom", lc)
                    cm.lc.filename = filename
                    bindings.append(cm)
                else:
                    cm = CommentedMap((("position", [0, i]), ("datum", arg)))
                    cm.lc.add_kv_line_col("datum", lc)
                    cm.lc.filename = filename
                    bindings.append(cm)

        # use python2 like sorting of heterogeneous lists
        # (containing str and int types),
        key = functools.cmp_to_key(cmp_like_py2)

        # This awkward construction replaces the contents of
        # "bindings" in place (because Builder expects it to be
        # mutated in place, sigh, I'm sorry) with its contents sorted,
        # supporting different versions of Python and ruamel.yaml with
        # different behaviors/bugs in CommentedSeq.
        bindings_copy = copy.deepcopy(bindings)
        del bindings[:]
        bindings.extend(sorted(bindings_copy, key=key))

        if self.tool["class"] != "Workflow":
            builder.resources = self.evalResources(builder, runtime_context)
        return builder
Example #2
0
    def _init_job(self, joborder, runtimeContext):
        # type: (Dict[Text, Text], RuntimeContext) -> Builder

        job = cast(Dict[Text, Union[Dict[Text, Any], List[Any], Text, None]],
                   copy.deepcopy(joborder))

        make_fs_access = getdefault(runtimeContext.make_fs_access, StdFsAccess)
        fs_access = make_fs_access(runtimeContext.basedir)

        # Validate job order
        try:
            fill_in_defaults(self.tool[u"inputs"], job, fs_access)
            normalizeFilesDirs(job)
            validate.validate_ex(self.names.get_name("input_record_schema", ""),
                                 job, strict=False, logger=_logger_validation_warnings)
        except (validate.ValidationException, WorkflowException) as e:
            raise WorkflowException("Invalid job input record:\n" + Text(e))

        files = []  # type: List[Dict[Text, Text]]
        bindings = CommentedSeq()
        tmpdir = u""
        stagedir = u""

        loadListingReq, _ = self.get_requirement("http://commonwl.org/cwltool#LoadListingRequirement")
        if loadListingReq:
            loadListing = loadListingReq.get("loadListing")
        else:
            loadListing = "deep_listing"   # will default to "no_listing" in CWL v1.1

        dockerReq, _ = self.get_requirement("DockerRequirement")
        defaultDocker = None

        if dockerReq is None and runtimeContext.default_container:
            defaultDocker = runtimeContext.default_container

        if (dockerReq or defaultDocker) and runtimeContext.use_container:
            if dockerReq:
                # Check if docker output directory is absolute
                if dockerReq.get("dockerOutputDirectory") and \
                        dockerReq.get("dockerOutputDirectory").startswith('/'):
                    outdir = dockerReq.get("dockerOutputDirectory")
                else:
                    outdir = dockerReq.get("dockerOutputDirectory") or \
                        runtimeContext.docker_outdir or "/var/spool/cwl"
            elif defaultDocker:
                outdir = runtimeContext.docker_outdir or "/var/spool/cwl"
            tmpdir = runtimeContext.docker_tmpdir or "/tmp"
            stagedir = runtimeContext.docker_stagedir or "/var/lib/cwl"
        else:
            outdir = fs_access.realpath(
                runtimeContext.outdir or tempfile.mkdtemp(
                    prefix=getdefault(runtimeContext.tmp_outdir_prefix, DEFAULT_TMP_PREFIX)))
            if self.tool[u"class"] != 'Workflow':
                tmpdir = fs_access.realpath(runtimeContext.tmpdir or tempfile.mkdtemp())
                stagedir = fs_access.realpath(runtimeContext.stagedir or tempfile.mkdtemp())

        builder = Builder(job,
                          files,
                          bindings,
                          self.schemaDefs,
                          self.names,
                          self.requirements,
                          self.hints,
                          runtimeContext.eval_timeout,
                          runtimeContext.debug,
                          {},
                          runtimeContext.js_console,
                          runtimeContext.mutation_manager,
                          self.formatgraph,
                          make_fs_access,
                          fs_access,
                          runtimeContext.force_docker_pull,
                          loadListing,
                          outdir,
                          tmpdir,
                          stagedir,
                          runtimeContext.job_script_provider)

        bindings.extend(builder.bind_input(
            self.inputs_record_schema, job,
            discover_secondaryFiles=getdefault(runtimeContext.toplevel, False)))

        if self.tool.get("baseCommand"):
            for n, b in enumerate(aslist(self.tool["baseCommand"])):
                bindings.append({
                    "position": [-1000000, n],
                    "datum": b
                })

        if self.tool.get("arguments"):
            for i, a in enumerate(self.tool["arguments"]):
                lc = self.tool["arguments"].lc.data[i]
                fn = self.tool["arguments"].lc.filename
                bindings.lc.add_kv_line_col(len(bindings), lc)
                if isinstance(a, dict):
                    a = copy.copy(a)
                    if a.get("position"):
                        a["position"] = [a["position"], i]
                    else:
                        a["position"] = [0, i]
                    bindings.append(a)
                elif ("$(" in a) or ("${" in a):
                    cm = CommentedMap((
                        ("position", [0, i]),
                        ("valueFrom", a)
                    ))
                    cm.lc.add_kv_line_col("valueFrom", lc)
                    cm.lc.filename = fn
                    bindings.append(cm)
                else:
                    cm = CommentedMap((
                        ("position", [0, i]),
                        ("datum", a)
                    ))
                    cm.lc.add_kv_line_col("datum", lc)
                    cm.lc.filename = fn
                    bindings.append(cm)

        # use python2 like sorting of heterogeneous lists
        # (containing str and int types),
        # TODO: unify for both runtime
        if six.PY3:
            key = cmp_to_key(cmp_like_py2)
        else:  # PY2
            key = lambda dict: dict["position"]
        bindings.sort(key=key)

        if self.tool[u"class"] != 'Workflow':
            builder.resources = self.evalResources(builder, runtimeContext)
        return builder
Example #3
0
    def _init_job(self, joborder, runtime_context):
        # type: (Mapping[Text, Text], RuntimeContext) -> Builder

        job = cast(Dict[Text, Union[Dict[Text, Any], List[Any], Text, None]],
                   copy.deepcopy(joborder))

        make_fs_access = getdefault(runtime_context.make_fs_access, StdFsAccess)
        fs_access = make_fs_access(runtime_context.basedir)

        load_listing_req, _ = self.get_requirement(
            "LoadListingRequirement")

        if load_listing_req is not None:
            load_listing = load_listing_req.get("loadListing")
        else:
            load_listing = "no_listing"

        # Validate job order
        try:
            fill_in_defaults(self.tool[u"inputs"], job, fs_access)

            normalizeFilesDirs(job)
            schema = self.names.get_name("input_record_schema", "")
            if schema is None:
                raise WorkflowException("Missing input record schema: "
                    "{}".format(self.names))
            validate.validate_ex(schema, job, strict=False,
                                 logger=_logger_validation_warnings)

            if load_listing and load_listing != "no_listing":
                get_listing(fs_access, job, recursive=(load_listing == "deep_listing"))

            visit_class(job, ("File",), functools.partial(add_sizes, fs_access))

            if load_listing == "deep_listing":
                for i, inparm in enumerate(self.tool["inputs"]):
                    k = shortname(inparm["id"])
                    if k not in job:
                        continue
                    v = job[k]
                    dircount = [0]
                    def inc(d):  # type: (List[int]) -> None
                        d[0] += 1
                    visit_class(v, ("Directory",), lambda x: inc(dircount))
                    if dircount[0] == 0:
                        continue
                    filecount = [0]
                    visit_class(v, ("File",), lambda x: inc(filecount))
                    if filecount[0] > FILE_COUNT_WARNING:
                        # Long lines in this message are okay, will be reflowed based on terminal columns.
                        _logger.warning(strip_dup_lineno(SourceLine(self.tool["inputs"], i, Text).makeError(
                    """Recursive directory listing has resulted in a large number of File objects (%s) passed to the input parameter '%s'.  This may negatively affect workflow performance and memory use.

If this is a problem, use the hint 'cwltool:LoadListingRequirement' with "shallow_listing" or "no_listing" to change the directory listing behavior:

$namespaces:
  cwltool: "http://commonwl.org/cwltool#"
hints:
  cwltool:LoadListingRequirement:
    loadListing: shallow_listing

""" % (filecount[0], k))))

        except (validate.ValidationException, WorkflowException) as err:
            raise WorkflowException("Invalid job input record:\n" + Text(err))

        files = []  # type: List[Dict[Text, Text]]
        bindings = CommentedSeq()
        tmpdir = u""
        stagedir = u""

        docker_req, _ = self.get_requirement("DockerRequirement")
        default_docker = None

        if docker_req is None and runtime_context.default_container:
            default_docker = runtime_context.default_container

        if (docker_req or default_docker) and runtime_context.use_container:
            if docker_req is not None:
                # Check if docker output directory is absolute
                if docker_req.get("dockerOutputDirectory") and \
                        docker_req.get("dockerOutputDirectory").startswith('/'):
                    outdir = docker_req.get("dockerOutputDirectory")
                else:
                    outdir = docker_req.get("dockerOutputDirectory") or \
                        runtime_context.docker_outdir or random_outdir()
            elif default_docker is not None:
                outdir = runtime_context.docker_outdir or random_outdir()
            tmpdir = runtime_context.docker_tmpdir or "/tmp"
            stagedir = runtime_context.docker_stagedir or "/var/lib/cwl"
        else:
            outdir = fs_access.realpath(
                runtime_context.outdir or tempfile.mkdtemp(
                    prefix=getdefault(runtime_context.tmp_outdir_prefix,
                                      DEFAULT_TMP_PREFIX)))
            if self.tool[u"class"] != 'Workflow':
                tmpdir = fs_access.realpath(runtime_context.tmpdir
                                            or tempfile.mkdtemp())
                stagedir = fs_access.realpath(runtime_context.stagedir
                                              or tempfile.mkdtemp())

        builder = Builder(job,
                          files,
                          bindings,
                          self.schemaDefs,
                          self.names,
                          self.requirements,
                          self.hints,
                          {},
                          runtime_context.mutation_manager,
                          self.formatgraph,
                          make_fs_access,
                          fs_access,
                          runtime_context.job_script_provider,
                          runtime_context.eval_timeout,
                          runtime_context.debug,
                          runtime_context.js_console,
                          runtime_context.force_docker_pull,
                          load_listing,
                          outdir,
                          tmpdir,
                          stagedir)

        bindings.extend(builder.bind_input(
            self.inputs_record_schema, job,
            discover_secondaryFiles=getdefault(runtime_context.toplevel, False)))

        if self.tool.get("baseCommand"):
            for index, command in enumerate(aslist(self.tool["baseCommand"])):
                bindings.append({
                    "position": [-1000000, index],
                    "datum": command
                })

        if self.tool.get("arguments"):
            for i, arg in enumerate(self.tool["arguments"]):
                lc = self.tool["arguments"].lc.data[i]
                filename = self.tool["arguments"].lc.filename
                bindings.lc.add_kv_line_col(len(bindings), lc)
                if isinstance(arg, MutableMapping):
                    arg = copy.deepcopy(arg)
                    if arg.get("position"):
                        arg["position"] = [arg["position"], i]
                    else:
                        arg["position"] = [0, i]
                    bindings.append(arg)
                elif ("$(" in arg) or ("${" in arg):
                    cm = CommentedMap((
                        ("position", [0, i]),
                        ("valueFrom", arg)
                    ))
                    cm.lc.add_kv_line_col("valueFrom", lc)
                    cm.lc.filename = filename
                    bindings.append(cm)
                else:
                    cm = CommentedMap((
                        ("position", [0, i]),
                        ("datum", arg)
                    ))
                    cm.lc.add_kv_line_col("datum", lc)
                    cm.lc.filename = filename
                    bindings.append(cm)

        # use python2 like sorting of heterogeneous lists
        # (containing str and int types),
        if PY3:
            key = functools.cmp_to_key(cmp_like_py2)
        else:  # PY2
            key = lambda d: d["position"]

        # This awkward construction replaces the contents of
        # "bindings" in place (because Builder expects it to be
        # mutated in place, sigh, I'm sorry) with its contents sorted,
        # supporting different versions of Python and ruamel.yaml with
        # different behaviors/bugs in CommentedSeq.
        bindings_copy = copy.deepcopy(bindings)
        del bindings[:]
        bindings.extend(sorted(bindings_copy, key=key))

        if self.tool[u"class"] != 'Workflow':
            builder.resources = self.evalResources(builder, runtime_context)
        return builder
Example #4
0
    def _init_job(self, joborder, runtimeContext):
        # type: (MutableMapping[Text, Text], RuntimeContext) -> Builder

        job = cast(Dict[Text, Union[Dict[Text, Any], List[Any], Text, None]],
                   copy.deepcopy(joborder))

        make_fs_access = getdefault(runtimeContext.make_fs_access, StdFsAccess)
        fs_access = make_fs_access(runtimeContext.basedir)

        # Validate job order
        try:
            fill_in_defaults(self.tool[u"inputs"], job, fs_access)
            normalizeFilesDirs(job)
            validate.validate_ex(self.names.get_name("input_record_schema", ""),
                                 job, strict=False, logger=_logger_validation_warnings)
        except (validate.ValidationException, WorkflowException) as e:
            raise WorkflowException("Invalid job input record:\n" + Text(e))

        files = []  # type: List[Dict[Text, Text]]
        bindings = CommentedSeq()
        tmpdir = u""
        stagedir = u""

        loadListingReq, _ = self.get_requirement("http://commonwl.org/cwltool#LoadListingRequirement")
        if loadListingReq is not None:
            loadListing = loadListingReq.get("loadListing")
        else:
            loadListing = "deep_listing"   # will default to "no_listing" in CWL v1.1

        dockerReq, _ = self.get_requirement("DockerRequirement")
        defaultDocker = None

        if dockerReq is None and runtimeContext.default_container:
            defaultDocker = runtimeContext.default_container

        if (dockerReq or defaultDocker) and runtimeContext.use_container:
            if dockerReq is not None:
                # Check if docker output directory is absolute
                if dockerReq.get("dockerOutputDirectory") and \
                        dockerReq.get("dockerOutputDirectory").startswith('/'):
                    outdir = dockerReq.get("dockerOutputDirectory")
                else:
                    outdir = dockerReq.get("dockerOutputDirectory") or \
                        runtimeContext.docker_outdir or random_outdir()
            elif defaultDocker is not None:
                outdir = runtimeContext.docker_outdir or random_outdir()
            tmpdir = runtimeContext.docker_tmpdir or "/tmp"
            stagedir = runtimeContext.docker_stagedir or "/var/lib/cwl"
        else:
            outdir = fs_access.realpath(
                runtimeContext.outdir or tempfile.mkdtemp(
                    prefix=getdefault(runtimeContext.tmp_outdir_prefix, DEFAULT_TMP_PREFIX)))
            if self.tool[u"class"] != 'Workflow':
                tmpdir = fs_access.realpath(runtimeContext.tmpdir or tempfile.mkdtemp())
                stagedir = fs_access.realpath(runtimeContext.stagedir or tempfile.mkdtemp())

        builder = Builder(job,
                          files,
                          bindings,
                          self.schemaDefs,
                          self.names,
                          self.requirements,
                          self.hints,
                          {},
                          runtimeContext.mutation_manager,
                          self.formatgraph,
                          make_fs_access,
                          fs_access,
                          runtimeContext.job_script_provider,
                          runtimeContext.eval_timeout,
                          runtimeContext.debug,
                          runtimeContext.js_console,
                          runtimeContext.force_docker_pull,
                          loadListing,
                          outdir,
                          tmpdir,
                          stagedir)

        bindings.extend(builder.bind_input(
            self.inputs_record_schema, job,
            discover_secondaryFiles=getdefault(runtimeContext.toplevel, False)))

        if self.tool.get("baseCommand"):
            for n, b in enumerate(aslist(self.tool["baseCommand"])):
                bindings.append({
                    "position": [-1000000, n],
                    "datum": b
                })

        if self.tool.get("arguments"):
            for i, a in enumerate(self.tool["arguments"]):
                lc = self.tool["arguments"].lc.data[i]
                fn = self.tool["arguments"].lc.filename
                bindings.lc.add_kv_line_col(len(bindings), lc)
                if isinstance(a, MutableMapping):
                    a = copy.deepcopy(a)
                    if a.get("position"):
                        a["position"] = [a["position"], i]
                    else:
                        a["position"] = [0, i]
                    bindings.append(a)
                elif ("$(" in a) or ("${" in a):
                    cm = CommentedMap((
                        ("position", [0, i]),
                        ("valueFrom", a)
                    ))
                    cm.lc.add_kv_line_col("valueFrom", lc)
                    cm.lc.filename = fn
                    bindings.append(cm)
                else:
                    cm = CommentedMap((
                        ("position", [0, i]),
                        ("datum", a)
                    ))
                    cm.lc.add_kv_line_col("datum", lc)
                    cm.lc.filename = fn
                    bindings.append(cm)

        # use python2 like sorting of heterogeneous lists
        # (containing str and int types),
        # TODO: unify for both runtime
        if PY3:
            key = functools.cmp_to_key(cmp_like_py2)
        else:  # PY2
            key = lambda d: d["position"]

        # This awkward construction replaces the contents of
        # "bindings" in place (because Builder expects it to be
        # mutated in place, sigh, I'm sorry) with its contents sorted,
        # supporting different versions of Python and ruamel.yaml with
        # different behaviors/bugs in CommentedSeq.
        bd = copy.deepcopy(bindings)
        del bindings[:]
        bindings.extend(sorted(bd, key=key))

        if self.tool[u"class"] != 'Workflow':
            builder.resources = self.evalResources(builder, runtimeContext)
        return builder
Example #5
0
    def _init_job(self, joborder, runtimeContext):
        # type: (Dict[Text, Text], RuntimeContext) -> Builder
        """
        kwargs:

        use_container: do/don't use Docker when DockerRequirement hint provided
        make_fs_access: make an FsAccess() object with given basedir
        docker_outdir: output directory inside docker for this job
        docker_tmpdir: tmpdir inside docker for this job
        docker_stagedir: stagedir inside docker for this job
        outdir: outdir on host for this job
        tmpdir: tmpdir on host for this job
        stagedir: stagedir on host for this job
        select_resources: callback to select compute resources
        tmp_outdir_prefix: Path prefix for intermediate output directories
        """

        job = cast(Dict[Text, Union[Dict[Text, Any], List, Text]],
                   copy.deepcopy(joborder))
        # Validate job order
        try:
            fillInDefaults(self.tool[u"inputs"], job)
            normalizeFilesDirs(job)
            validate.validate_ex(self.names.get_name("input_record_schema",
                                                     ""),
                                 job,
                                 strict=False,
                                 logger=_logger_validation_warnings)
        except (validate.ValidationException, WorkflowException) as e:
            raise WorkflowException("Invalid job input record:\n" + Text(e))

        files = []  # type: List[Dict[Text, Text]]
        bindings = CommentedSeq()
        make_fs_access = getdefault(runtimeContext.make_fs_access, StdFsAccess)
        fs_access = make_fs_access(runtimeContext.basedir)
        tmpdir = u""
        stagedir = u""

        loadListingReq, _ = self.get_requirement(
            "http://commonwl.org/cwltool#LoadListingRequirement")
        if loadListingReq:
            loadListing = loadListingReq.get("loadListing")
        else:
            loadListing = "deep_listing"  # will default to "no_listing" in CWL v1.1

        dockerReq, _ = self.get_requirement("DockerRequirement")
        defaultDocker = None

        if dockerReq is None and runtimeContext.default_container:
            defaultDocker = runtimeContext.default_container

        if (dockerReq or defaultDocker) and runtimeContext.use_container:
            if dockerReq:
                # Check if docker output directory is absolute
                if dockerReq.get("dockerOutputDirectory") and \
                        dockerReq.get("dockerOutputDirectory").startswith('/'):
                    outdir = dockerReq.get("dockerOutputDirectory")
                else:
                    outdir = fs_access.docker_compatible_realpath(
                        dockerReq.get("dockerOutputDirectory")
                        or runtimeContext.docker_outdir or "/var/spool/cwl")
            elif defaultDocker:
                outdir = fs_access.docker_compatible_realpath(
                    runtimeContext.docker_outdir or "/var/spool/cwl")
            tmpdir = fs_access.docker_compatible_realpath(
                runtimeContext.docker_tmpdir or "/tmp")
            stagedir = fs_access.docker_compatible_realpath(
                runtimeContext.docker_stagedir or "/var/lib/cwl")
        else:
            outdir = fs_access.realpath(
                runtimeContext.outdir or tempfile.mkdtemp(prefix=getdefault(
                    runtimeContext.tmp_outdir_prefix, DEFAULT_TMP_PREFIX)))
            if self.tool[u"class"] != 'Workflow':
                tmpdir = fs_access.realpath(runtimeContext.tmpdir
                                            or tempfile.mkdtemp())
                stagedir = fs_access.realpath(runtimeContext.stagedir
                                              or tempfile.mkdtemp())

        builder = Builder(
            job, files, bindings, self.schemaDefs, self.names,
            self.requirements, self.hints, runtimeContext.eval_timeout,
            runtimeContext.debug, {}, runtimeContext.js_console,
            runtimeContext.mutation_manager, self.formatgraph, make_fs_access,
            fs_access, runtimeContext.force_docker_pull, loadListing, outdir,
            tmpdir, stagedir, runtimeContext.job_script_provider)

        bindings.extend(
            builder.bind_input(self.inputs_record_schema,
                               job,
                               discover_secondaryFiles=getdefault(
                                   runtimeContext.toplevel, False)))

        if self.tool.get("baseCommand"):
            for n, b in enumerate(aslist(self.tool["baseCommand"])):
                bindings.append({"position": [-1000000, n], "datum": b})

        if self.tool.get("arguments"):
            for i, a in enumerate(self.tool["arguments"]):
                lc = self.tool["arguments"].lc.data[i]
                fn = self.tool["arguments"].lc.filename
                bindings.lc.add_kv_line_col(len(bindings), lc)
                if isinstance(a, dict):
                    a = copy.copy(a)
                    if a.get("position"):
                        a["position"] = [a["position"], i]
                    else:
                        a["position"] = [0, i]
                    bindings.append(a)
                elif ("$(" in a) or ("${" in a):
                    cm = CommentedMap((("position", [0, i]), ("valueFrom", a)))
                    cm.lc.add_kv_line_col("valueFrom", lc)
                    cm.lc.filename = fn
                    bindings.append(cm)
                else:
                    cm = CommentedMap((("position", [0, i]), ("datum", a)))
                    cm.lc.add_kv_line_col("datum", lc)
                    cm.lc.filename = fn
                    bindings.append(cm)

        # use python2 like sorting of heterogeneous lists
        # (containing str and int types),
        # TODO: unify for both runtime
        if six.PY3:
            key = cmp_to_key(cmp_like_py2)
        else:  # PY2
            key = lambda dict: dict["position"]
        bindings.sort(key=key)
        builder.resources = self.evalResources(builder, runtimeContext)
        return builder