Beispiel #1
0
def test_basename_field_generation(filename: str, expected: Tuple[str, str]) -> None:
    nameroot, nameext = expected
    expected2 = {
        "class": "File",
        "location": "/foo/" + filename,
        "basename": filename,
        "nameroot": nameroot,
        "nameext": nameext,
    }

    my_file = {"class": "File", "location": "/foo/" + filename}

    normalizeFilesDirs(my_file)
    assert my_file == expected2
def test_normalizeFilesDirs(name: str, file_dir: CWLObjectType,
                            expected: CWLObjectType) -> None:
    normalizeFilesDirs(file_dir)
    assert file_dir == expected, name
Beispiel #3
0
    def job(self, joborder, output_callback, runtimeContext):

        builder = make_builder(joborder, self.hints, self.requirements,
                               runtimeContext, self.metadata)
        runtimeContext = set_cluster_target(self.tool, self.arvrunner, builder,
                                            runtimeContext)

        req, _ = self.get_requirement(
            "http://arvados.org/cwl#RunInSingleContainer")
        if not req:
            return super(ArvadosWorkflow, self).job(joborder, output_callback,
                                                    runtimeContext)

        # RunInSingleContainer is true

        with SourceLine(self.tool, None, WorkflowException,
                        logger.isEnabledFor(logging.DEBUG)):
            if "id" not in self.tool:
                raise WorkflowException("%s object must have 'id'" %
                                        (self.tool["class"]))

        discover_secondary_files(self.arvrunner.fs_access, builder,
                                 self.tool["inputs"], joborder)
        normalizeFilesDirs(joborder)

        with Perf(metrics, "subworkflow upload_deps"):
            upload_dependencies(self.arvrunner,
                                os.path.basename(joborder.get("id", "#")),
                                self.doc_loader, joborder,
                                joborder.get("id", "#"), False)

            if self.wf_pdh is None:
                packed = pack(self.loadingContext,
                              self.tool["id"],
                              loader=self.doc_loader)

                for p in packed["$graph"]:
                    if p["id"] == "#main":
                        p["requirements"] = dedup_reqs(self.requirements)
                        p["hints"] = dedup_reqs(self.hints)

                def visit(item):
                    if "requirements" in item:
                        item["requirements"] = [
                            i for i in item["requirements"]
                            if i["class"] != "DockerRequirement"
                        ]
                    for t in ("hints", "requirements"):
                        if t not in item:
                            continue
                        for req in item[t]:
                            if req["class"] == "ResourceRequirement":
                                dyn = False
                                for k in max_res_pars + sum_res_pars:
                                    if k in req:
                                        if isinstance(req[k], basestring):
                                            if item["id"] == "#main":
                                                # only the top-level requirements/hints may contain expressions
                                                self.dynamic_resource_req.append(
                                                    req)
                                                dyn = True
                                                break
                                            else:
                                                with SourceLine(
                                                        req, k,
                                                        WorkflowException):
                                                    raise WorkflowException(
                                                        "Non-top-level ResourceRequirement in single container cannot have expressions"
                                                    )
                                if not dyn:
                                    self.static_resource_req.append(req)

                visit_class(packed["$graph"], ("Workflow", "CommandLineTool"),
                            visit)

                if self.static_resource_req:
                    self.static_resource_req = [
                        get_overall_res_req(self.static_resource_req)
                    ]

                upload_dependencies(self.arvrunner, runtimeContext.name,
                                    self.doc_loader, packed, self.tool["id"],
                                    False)

                # Discover files/directories referenced by the
                # workflow (mainly "default" values)
                visit_class(packed, ("File", "Directory"),
                            self.wf_reffiles.append)

        if self.dynamic_resource_req:
            # Evaluate dynamic resource requirements using current builder
            rs = copy.copy(self.static_resource_req)
            for dyn_rs in self.dynamic_resource_req:
                eval_req = {"class": "ResourceRequirement"}
                for a in max_res_pars + sum_res_pars:
                    if a in dyn_rs:
                        eval_req[a] = builder.do_eval(dyn_rs[a])
                rs.append(eval_req)
            job_res_reqs = [get_overall_res_req(rs)]
        else:
            job_res_reqs = self.static_resource_req

        with Perf(metrics, "subworkflow adjust"):
            joborder_resolved = copy.deepcopy(joborder)
            joborder_keepmount = copy.deepcopy(joborder)

            reffiles = []
            visit_class(joborder_keepmount, ("File", "Directory"),
                        reffiles.append)

            mapper = ArvPathMapper(self.arvrunner, reffiles + self.wf_reffiles,
                                   runtimeContext.basedir, "/keep/%s",
                                   "/keep/%s/%s")

            # For containers API, we need to make sure any extra
            # referenced files (ie referenced by the workflow but
            # not in the inputs) are included in the mounts.
            if self.wf_reffiles:
                runtimeContext = runtimeContext.copy()
                runtimeContext.extra_reffiles = copy.deepcopy(self.wf_reffiles)

            def keepmount(obj):
                remove_redundant_fields(obj)
                with SourceLine(obj, None, WorkflowException,
                                logger.isEnabledFor(logging.DEBUG)):
                    if "location" not in obj:
                        raise WorkflowException(
                            "%s object is missing required 'location' field: %s"
                            % (obj["class"], obj))
                with SourceLine(obj, "location", WorkflowException,
                                logger.isEnabledFor(logging.DEBUG)):
                    if obj["location"].startswith("keep:"):
                        obj["location"] = mapper.mapper(obj["location"]).target
                        if "listing" in obj:
                            del obj["listing"]
                    elif obj["location"].startswith("_:"):
                        del obj["location"]
                    else:
                        raise WorkflowException(
                            "Location is not a keep reference or a literal: '%s'"
                            % obj["location"])

            visit_class(joborder_keepmount, ("File", "Directory"), keepmount)

            def resolved(obj):
                if obj["location"].startswith("keep:"):
                    obj["location"] = mapper.mapper(obj["location"]).resolved

            visit_class(joborder_resolved, ("File", "Directory"), resolved)

            if self.wf_pdh is None:
                adjustFileObjs(packed, keepmount)
                adjustDirObjs(packed, keepmount)
                self.wf_pdh = upload_workflow_collection(
                    self.arvrunner, shortname(self.tool["id"]), packed)

        self.loadingContext = self.loadingContext.copy()
        self.loadingContext.metadata = self.loadingContext.metadata.copy()
        self.loadingContext.metadata[
            "http://commonwl.org/cwltool#original_cwlVersion"] = "v1.0"

        if len(job_res_reqs) == 1:
            # RAM request needs to be at least 128 MiB or the workflow
            # runner itself won't run reliably.
            if job_res_reqs[0].get("ramMin", 1024) < 128:
                job_res_reqs[0]["ramMin"] = 128

        arguments = [
            "--no-container", "--move-outputs",
            "--preserve-entire-environment", "workflow.cwl", "cwl.input.yml"
        ]
        if runtimeContext.debug:
            arguments.insert(0, '--debug')

        wf_runner = cmap({
            "class":
            "CommandLineTool",
            "baseCommand":
            "cwltool",
            "inputs":
            self.tool["inputs"],
            "outputs":
            self.tool["outputs"],
            "stdout":
            "cwl.output.json",
            "requirements":
            self.requirements + job_res_reqs +
            [{
                "class": "InlineJavascriptRequirement"
            }, {
                "class":
                "InitialWorkDirRequirement",
                "listing": [{
                    "entryname":
                    "workflow.cwl",
                    "entry":
                    '$({"class": "File", "location": "keep:%s/workflow.cwl"})'
                    % self.wf_pdh
                }, {
                    "entryname":
                    "cwl.input.yml",
                    "entry":
                    json.dumps(joborder_keepmount,
                               indent=2,
                               sort_keys=True,
                               separators=(',', ': ')).replace(
                                   "\\", "\\\\").replace('$(', '\$(').replace(
                                       '${', '\${')
                }]
            }],
            "hints":
            self.hints,
            "arguments":
            arguments,
            "id":
            "#"
        })
        return ArvadosCommandTool(self.arvrunner, wf_runner,
                                  self.loadingContext).job(
                                      joborder_resolved, output_callback,
                                      runtimeContext)