Exemple #1
0
def generate_inputs(
    tool: Union[str, j.CommandTool, j.Workflow],
    all=False,
    name=None,
    force=False,
    additional_inputs=None,
    with_resources=False,
    quality_type: List[InputQualityType] = None,
    recipes: List[str] = None,
    hints: dict = None,
):
    toolref = resolve_tool(tool, name, from_toolshed=True, force=force)
    inputsdict = None
    if additional_inputs:
        inputsfile = get_file_from_searchname(additional_inputs, ".")
        inputsdict = parse_dict(inputsfile)

    values_to_ignore = set()
    if recipes:
        jc = JanisConfiguration.manager()
        for k in jc.recipes.get_recipe_for_keys(recipes):
            values_to_ignore.add(k)

    if not toolref:
        raise Exception("Couldn't find workflow with name: " + str(tool))

    return toolref.generate_inputs_override(
        additional_inputs=inputsdict,
        with_resource_overrides=with_resources,
        include_defaults=all,
        values_to_ignore=values_to_ignore,
        quality_type=quality_type,
        hints=hints,
    )
def cascade_inputs(
    wf: Optional[Tool],
    inputs: Optional[Union[Dict, List[Union[str, Dict]]]],
    required_inputs: Optional[Dict],
    batchrun_options: Optional[BatchRunRequirements] = None,
    strict_inputs: bool = False,
):

    list_of_input_dicts: List[Dict] = []

    if inputs:
        if not isinstance(inputs, list):
            inputs = [inputs]
        for inp in inputs:
            if isinstance(inp, dict):
                list_of_input_dicts.append(inp)
            else:
                inputsfile = get_file_from_searchname(inp, ".")
                if inputsfile is None:
                    raise FileNotFoundError("Couldn't find inputs file: " +
                                            str(inp))
                list_of_input_dicts.append(parse_dict(inputsfile))

    ins = None
    if batchrun_options:
        ins = cascade_batchrun_inputs(wf, list_of_input_dicts,
                                      batchrun_options)
    else:
        ins = cascade_regular_inputs(list_of_input_dicts)

    if strict_inputs:
        # make all the inputs in "ins" required (make sure they don't override already required inputs)
        required_inputs = {**ins, **(required_inputs or {})}

    if required_inputs:
        if wf is None:
            raise Exception(
                "cascade_inputs requires 'wf' parameter if required_inputs is present"
            )
        if isinstance(wf, DynamicWorkflow):
            pass
        else:
            reqkeys = set(required_inputs.keys())
            inkeys = set(wf.all_input_keys())
            invalid_keys = reqkeys - inkeys
            if len(invalid_keys) > 0:
                raise Exception(
                    f"There were unrecognised inputs provided to the tool \"{wf.id()}\", keys: {', '.join(invalid_keys)}"
                )

        ins = {**ins, **(required_inputs or {})}

    return ins
Exemple #3
0
def translate(
    tool: Union[str, j.CommandTool, Type[j.CommandTool], j.Workflow,
                Type[j.Workflow]],
    translation: str,
    name: str = None,
    hints: Optional[Dict[str, str]] = None,
    output_dir: Optional[str] = None,
    inputs: Union[str, dict] = None,
    allow_empty_container=False,
    container_override=None,
    **kwargs,
):

    toolref = resolve_tool(tool, name, from_toolshed=True)

    if not toolref:
        raise Exception(f"Couldn't find tool: '{tool}'")

    inputsdict = None
    if inputs:
        inputsfile = get_file_from_searchname(inputs, ".")
        inputsdict = parse_dict(inputsfile)

    if isinstance(toolref, j.Workflow):
        wfstr, _, _ = toolref.translate(
            translation,
            to_console=False,
            to_disk=bool(output_dir),
            export_path=output_dir or "./{language}",
            hints=hints,
            additional_inputs=inputsdict,
            allow_empty_container=allow_empty_container,
            container_override=container_override,
        )
    elif isinstance(toolref, (j.CommandTool, j.CodeTool)):
        wfstr = toolref.translate(
            translation=translation,
            to_console=False,
            to_disk=bool(output_dir),
            export_path=output_dir or "./{language}",
            allow_empty_container=allow_empty_container,
            container_override=container_override,
        )

    else:
        name = toolref.__name__ if isclass(
            toolref) else toolref.__class__.__name__
        raise Exception("Unsupported tool type: " + name)

    print(wfstr, file=sys.stdout)
    return wfstr
def generate_inputs(
    jc: JanisConfiguration,
    tool: Union[str, j.CommandTool, j.Workflow],
    all=False,
    name=None,
    force=False,
    additional_inputs=None,
    with_resources=False,
    quality_type: List[InputQualityType] = None,
    recipes: List[str] = None,
    hints: dict = None,
):
    toolref, _ = resolve_tool(tool, name, from_toolshed=True, force=force)
    inputsdict = None
    if additional_inputs:
        inputsfile = get_file_from_searchname(additional_inputs, ".")
        inputsdict = parse_dict(inputsfile)

    values_to_ignore = set()
    if recipes:
        if not jc:
            Logger.warn(
                "An internal error occurred when getting recipes from your config, please raise an issue on GitHub."
            )
        else:
            for k in jc.recipes.get_recipe_for_keys(recipes):
                values_to_ignore.add(k)

    if not toolref:
        raise Exception("Couldn't find workflow with name: " + str(tool))

    d = toolref.generate_inputs_override(
        additional_inputs=inputsdict,
        with_resource_overrides=with_resources,
        include_defaults=all,
        values_to_ignore=values_to_ignore,
        quality_type=quality_type,
        hints=hints,
    )

    return d
def cascade_inputs(
    wf: Workflow,
    inputs: Optional[Union[Dict, List[Union[str, Dict]]]],
    required_inputs: Optional[Dict],
    batchrun_options: Optional[BatchRunRequirements],
):

    list_of_input_dicts: List[Dict] = []

    if inputs:
        if not isinstance(inputs, list):
            inputs = [inputs]
        for inp in inputs:
            if isinstance(inp, dict):
                list_of_input_dicts.append(inp)
            else:
                inputsfile = get_file_from_searchname(inp, ".")
                if inputsfile is None:
                    raise FileNotFoundError("Couldn't find inputs file: " + str(inp))
                list_of_input_dicts.append(parse_dict(inputsfile))

    if required_inputs:
        reqkeys = set(required_inputs.keys())
        inkeys = set(wf.all_input_keys())
        invalid_keys = reqkeys - inkeys
        if len(invalid_keys) > 0:
            raise Exception(
                f"There were unrecognised inputs provided to the tool \"{wf.id()}\", keys: {', '.join(invalid_keys)}"
            )

        list_of_input_dicts.append(parse_known_inputs(wf, required_inputs))

    ins = None
    if batchrun_options:
        ins = cascade_batchrun_inputs(wf, list_of_input_dicts, batchrun_options)
    else:
        ins = cascade_regular_inputs(list_of_input_dicts)

    return ins
def do_run(args):

    if args.job:
        from os import getcwd

        workflow, workflow_ref = resolve_tool(
            tool=args.workflow,
            name=args.name,
            from_toolshed=True,
            only_toolbox=args.toolbox,
            force=args.no_cache,
        )
        # parse and load the job file
        Logger.info("Specified job file, ignoring all other parameters")
        d = parse_dict(get_file_from_searchname(args.job, getcwd()))
        job = PreparedJob(**d, workflow_reference=workflow_ref)

    else:
        job, workflow = prepare_from_args(args, run_prepare_processing=False)

    jobfile = run_from_jobfile(workflow, jobfile=job, wait=args.wait)

    Logger.info("Exiting")
    raise SystemExit