def testPathMapperCallsFunctionWithKWArgs(self):
        self.kwargs = {}

        def f(*args, **kwargs):
            self.kwargs = kwargs

        p = PathMapper([(r"^/a/(?P<key>\w)$", f)])
        p.resolve("/a/b")
        self.assertEquals("b", self.kwargs["key"])
    def testPathMapperCallsFunctionWithArgs(self):
        self.args = ()

        def f(*args):
            self.args = args

        p = PathMapper([(r"^/a/(\w)$", f)])
        p.resolve("/a/b")
        self.assertEquals(("b",), self.args)
    def testPathMapperCallsFunction(self):
        self.called = False

        def f():
            self.called = True

        p = PathMapper([(r"^/abc$", f)])
        p.resolve("/abc")
        self.assertEquals(True, self.called)
    def testPathMapperHandlesBothCountAndKeywords(self):
        self.flag = False

        def f(count, key=None):
            if count == "1" and key == "test":
                self.flag = True
            else:
                print "'%d' '%d'" % (count, key)

        p = PathMapper([(r"^/a/(\d+)/(?P<key>\w+)$", f)])
        p.resolve("/a/1/test")

        self.assertEquals(True, self.flag)
Exemple #5
0
 def makePathMapper(self, reffiles, input_basedir, **kwargs):
     dockerReq, _ = self.get_requirement("DockerRequirement")
     try:
         if dockerReq and kwargs.get("use_container"):
             return DockerPathMapper(reffiles, input_basedir)
         else:
             return PathMapper(reffiles, input_basedir)
     except OSError as e:
         if e.errno == errno.ENOENT:
             raise WorkflowException("Missing input file %s" % e)
 def makePathMapper(self, reffiles, input_basedir, **kwargs):
     dockerReq, _ = self.get_requirement("DockerRequirement")
     if dockerReq and kwargs.get("use_container"):
         return DockerPathMapper(reffiles, input_basedir)
     else:
         return PathMapper(reffiles, input_basedir)
    def job(self, joborder, basedir, output_callback, use_container=True):
        inputs = joborder['inputs']
        Draft4Validator(self.tool['inputs']).validate(inputs)

        adapter = self.tool["adapter"]
        adapters = [{
            "order": [-1000000],
            "schema":
            tool_schema_doc["properties"]["adapter"]["properties"]["baseCmd"],
            "value":
            adapter['baseCmd']
        }]

        builder = Builder()
        builder.base_url = "file:" + os.path.abspath(basedir) + "/"
        builder.ref_base_url = "file:" + toolpath

        requirements = self.tool.get("requirements")
        builder.jslib = ''
        if requirements and 'expressionlib' in requirements:
            for ex in requirements['expressionlib']:
                builder.jslib += builder.resolve_eval(joborder, ex) + "\n"

        if "args" in adapter:
            for i, a in enumerate(adapter["args"]):
                a = copy.copy(a)
                if "order" in a:
                    a["order"] = [a["order"]]
                else:
                    a["order"] = [0]
                a["schema"] = tool_schema_doc["definitions"]["strOrExpr"]
                adapters.append(a)

        adapters.extend(
            builder.adapt_inputs(self.tool['inputs'], inputs, inputs, ""))

        adapters.sort(key=lambda a: a["order"])

        referenced_files = filter(
            lambda a: a is not None,
            flatten(map(lambda a: builder.find_files(a, joborder), adapters)))

        j = CommandLineProcess()
        j.joborder = joborder
        j.container = None

        if 'stdin' in adapter:
            j.stdin = flatten(
                builder.adapt(
                    {
                        "value":
                        adapter['stdin'],
                        "schema":
                        tool_schema_doc["properties"]["adapter"]["properties"]
                        ["stdin"]
                    }, joborder, None))[0]
            referenced_files.append(j.stdin)
        else:
            j.stdin = None

        if 'stdout' in adapter:
            j.stdout = flatten(
                builder.adapt(
                    {
                        "value":
                        adapter['stdout'],
                        "schema":
                        tool_schema_doc["properties"]["adapter"]["properties"]
                        ["stdout"]
                    }, joborder, None))[0]

            if os.path.isabs(j.stdout):
                raise Exception("stdout must be a relative path")
        else:
            j.stdout = None

        j.generatefiles = {}
        for t in adapter.get("generatefiles", []):
            j.generatefiles[builder.resolve_eval(
                inputs, t["name"])] = builder.resolve_eval(inputs, t["value"])

        d = None
        if requirements:
            b = requirements.get("environment")
            if b:
                c = b.get("container")
                if use_container and c:
                    if c.get("type") == "docker":
                        d = DockerPathMapper(referenced_files, basedir)
                        j.container = c

        if d is None:
            d = PathMapper(referenced_files, basedir)

        if j.stdin:
            j.stdin = j.stdin if os.path.isabs(j.stdin) else os.path.join(
                basedir, j.stdin)

        j.command_line = flatten(
            map(lambda a: builder.adapt(a, joborder, d.mapper), adapters))

        j.pathmapper = d
        j.collect_outputs = functools.partial(self.collect_outputs,
                                              self.tool.get("outputs", {}),
                                              joborder)
        j.output_callback = output_callback

        yield j
 def testPathMapperReturnsNoneOnMissedPath(self):
     p = PathMapper([(r"^/a/(?P<key>\w)$", None)])
     self.assertEquals(None, p.resolve("/b/asc"))
     self.assertEquals(None, p.resolve("/a/bc"))
     self.assertEquals(None, p.resolve("/"))
    def testPathMapperReturnsFunctionResult(self):
        def f(*args, **kwargs):
            return True

        p = PathMapper([(r"^/a/(?P<key>\w)$", f)])
        self.assertEquals(True, p.resolve("/a/b"))
Exemple #10
0
    def job(self,
            joborder,
            basedir,
            output_callback,
            use_container=True,
            **kwargs):
        builder = self._init_job(joborder, basedir, **kwargs)

        if self.tool["baseCommand"]:
            for n, b in enumerate(aslist(self.tool["baseCommand"])):
                builder.bindings.append({
                    "position": [-1000000, n],
                    "valueFrom": b
                })

        if self.tool.get("arguments"):
            for i, a in enumerate(self.tool["arguments"]):
                if isinstance(a, dict):
                    a = copy.copy(a)
                    if a.get("position"):
                        a["position"] = [a["position"], i]
                    else:
                        a["position"] = [0, i]
                    a["do_eval"] = a["valueFrom"]
                    a["valueFrom"] = None
                    builder.bindings.append(a)
                else:
                    builder.bindings.append({
                        "position": [0, i],
                        "valueFrom": a
                    })

        builder.bindings.sort(key=lambda a: a["position"])

        _logger.debug(pprint.pformat(builder.bindings))
        _logger.debug(pprint.pformat(builder.files))

        reffiles = [f["path"] for f in builder.files]

        j = CommandLineJob()
        j.joborder = builder.job
        j.stdin = None
        j.stdout = None
        builder.pathmapper = None

        if self.tool.get("stdin"):
            j.stdin = self.tool["stdin"]
            if isinstance(j.stdin, dict) and "ref" in j.stdin:
                j.stdin = builder.job[j.stdin["ref"][1:]]["path"]
            reffiles.append(j.stdin)

        if self.tool.get("stdout"):
            if isinstance(self.tool["stdout"],
                          dict) and "ref" in self.tool["stdout"]:
                for out in self.tool.get("outputs", []):
                    if out["id"] == self.tool["stdout"]["ref"]:
                        filename = self.tool["stdout"]["ref"][1:]
                        j.stdout = filename
                        out["outputBinding"] = out.get("outputBinding", {})
                        out["outputBinding"]["glob"] = filename
                if not j.stdout:
                    raise Exception("stdout refers to invalid output")
            else:
                j.stdout = self.tool["stdout"]
            if os.path.isabs(j.stdout):
                raise Exception("stdout must be a relative path")

        j.requirements = self.requirements
        j.hints = self.hints

        for r in (j.requirements + j.hints):
            if r["class"] == "DockerRequirement" and use_container:
                builder.pathmapper = DockerPathMapper(reffiles, basedir)

        if builder.pathmapper is None:
            builder.pathmapper = PathMapper(reffiles, basedir)

        for f in builder.files:
            f["path"] = builder.pathmapper.mapper(f["path"])

        builder.requirements = j.requirements

        j.generatefiles = {}
        for t in self.tool.get("fileDefs", []):
            j.generatefiles[t["filename"]] = expression.do_eval(
                t["value"], builder.job, j.requirements, self.docpath)

        j.environment = {}
        for t in self.tool.get("environmentDefs", []):
            j.environment[t["env"]] = expression.do_eval(
                t["value"], builder.job, j.requirements, self.docpath)

        j.command_line = flatten(map(builder.generate_arg, builder.bindings))

        if j.stdin:
            j.stdin = j.stdin if os.path.isabs(j.stdin) else os.path.join(
                basedir, j.stdin)

        j.pathmapper = builder.pathmapper
        j.collect_outputs = functools.partial(self.collect_output_ports,
                                              self.tool["outputs"], builder)
        j.output_callback = output_callback

        yield j