Beispiel #1
0
def _step_template(name, run_file, inputs, outputs, parallel, step_parallelism, scatter=None):
    """Templating function for writing a step to avoid repeating namespaces.
    """
    scatter_inputs = []
    sinputs = []
    for inp in inputs:
        step_inp = {"id": workflow.get_base_id(inp["id"]), "source": inp["id"]}
        if inp.get("wf_duplicate"):
            step_inp["id"] += "_toolinput"
        for attr in ["source", "valueFrom"]:
            if attr in inp:
                step_inp[attr] = inp[attr]
        sinputs.append(step_inp)
        # An initial parallel scatter and multiple chained parallel sample scatters
        if (parallel == "multi-parallel" and
              (not step_parallelism or
               step_parallelism.get(workflow.get_step_prefix(inp["id"])) == "multi-parallel")):
            scatter_inputs.append(step_inp["id"])
        # scatter on inputs from previous processes that have been arrayed
        elif (_is_scatter_parallel(parallel) and (_do_scatter_var(inp, parallel)
                                                  or (scatter and inp["id"] in scatter))):
            scatter_inputs.append(step_inp["id"])
    out = {"run": run_file,
           "id": name,
           "in": sinputs,
           "out": [{"id": workflow.get_base_id(output["id"])} for output in outputs]}
    if _is_scatter_parallel(parallel):
        assert scatter_inputs, "Did not find items to scatter on: %s" % name
        out.update({"scatterMethod": "dotproduct",
                    "scatter": scatter_inputs})
    return out
Beispiel #2
0
def _step_template(name, run_file, inputs, outputs, parallel, step_parallelism, scatter=None):
    """Templating function for writing a step to avoid repeating namespaces.
    """
    scatter_inputs = []
    sinputs = []
    for inp in inputs:
        step_inp = {"id": workflow.get_base_id(inp["id"]), "source": inp["id"]}
        if inp.get("wf_duplicate"):
            step_inp["id"] += "_toolinput"
        for attr in ["source", "valueFrom"]:
            if attr in inp:
                step_inp[attr] = inp[attr]
        sinputs.append(step_inp)
        # An initial parallel scatter and multiple chained parallel sample scatters
        if (parallel == "multi-parallel" and
              (not step_parallelism or
               step_parallelism.get(workflow.get_step_prefix(inp["id"])) == "multi-parallel")):
            scatter_inputs.append(step_inp["id"])
        # scatter on inputs from previous processes that have been arrayed
        elif (_is_scatter_parallel(parallel) and (_do_scatter_var(inp, parallel)
                                                  or (scatter and inp["id"] in scatter))):
            scatter_inputs.append(step_inp["id"])
    out = {"run": run_file,
           "id": name,
           "in": sinputs,
           "out": [{"id": workflow.get_base_id(output["id"])} for output in outputs]}
    if _is_scatter_parallel(parallel):
        assert scatter_inputs, "Did not find items to scatter on: %s" % name
        out.update({"scatterMethod": "dotproduct",
                    "scatter": scatter_inputs})
    return out