def do_translate(args):
    jc = JanisConfiguration.initial_configuration(args.config)

    container_override = parse_container_override_format(
        args.container_override)

    hints = {
        k[5:]: v
        for k, v in vars(args).items()
        if k.startswith("hint_") and v is not None
    }

    inputs = args.inputs or []
    # the args.extra_inputs parameter are inputs that we MUST match
    # we'll need to parse them manually and then pass them to fromjanis as requiring a match
    # required_inputs = parse_additional_arguments(args.extra_inputs)

    translate(
        config=jc,
        tool=args.workflow,
        translation=args.translation,
        name=args.name,
        output_dir=args.output_dir,
        force=args.no_cache,
        allow_empty_container=args.allow_empty_container,
        container_override=container_override,
        skip_digest_lookup=args.skip_digest_lookup,
        skip_digest_cache=args.skip_digest_cache,
        inputs=inputs,
        recipes=args.recipe,
        hints=hints,
    )
Exemple #2
0
def do_inputs(args):

    if args.config or args.recipes:
        JanisConfiguration.initial_configuration(args.config)

    quality_type = None

    if args.user:
        quality_type = [InputQualityType.user]
    elif args.static:
        quality_type = [
            InputQualityType.static, InputQualityType.configuration
        ]

    hints = {
        k[5:]: v
        for k, v in vars(args).items()
        if k.startswith("hint_") and v is not None
    }

    outd = generate_inputs(
        args.workflow,
        all=args.all,
        name=args.name,
        force=args.no_cache,
        additional_inputs=args.inputs,
        with_resources=args.resources,
        quality_type=quality_type,
        recipes=args.recipes,
        hints=hints,
    )

    if args.json:
        outs = json.dumps(outd,
                          sort_keys=True,
                          indent=4,
                          separators=(",", ": "))
    else:
        outs = ruamel.yaml.dump(outd, default_flow_style=False)

    if args.output:
        with open(args.output, "w+") as out:
            out.write(str(outs))
    else:
        print(outs, file=sys.stdout)
    def from_path_with_wid(path, wid, readonly=False):
        """
        :param wid: Workflow ID
        :param path: Path to workflow
        :return: TaskManager after resuming (might include a wait)
        """
        # get everything and pass to constructor
        # database path

        path = WorkflowManager.get_task_path_for(path)
        if not os.path.exists(path):
            raise FileNotFoundError(f"Couldn't find path '{path}'")

        db = WorkflowDbManager.get_workflow_metadatadb(path, wid, readonly=readonly)

        if not wid:
            wid = db.wid  # .get_meta_info(InfoKeys.taskId)

        if not wid:
            raise Exception(f"Couldn't find workflow with id '{wid}'")

        envid = db.environment  # .get_meta_info(InfoKeys.environment)
        eng = db.engine
        fs = db.filescheme
        env = Environment(envid, eng, fs)

        try:
            JanisConfiguration._managed = db.configuration
        except Exception as e:
            Logger.critical(
                "The JanisConfiguration could not be loaded from the DB, this might be due to an older version, we'll load your current config instead. Error: "
                + str(e)
            )
            JanisConfiguration.initial_configuration(None)

        db.close()

        tm = WorkflowManager(outdir=path, wid=wid, environment=env, readonly=readonly)
        return tm
    def config(self):
        if self._config is None:

            from janis_assistant.management.configuration import JanisConfiguration

            if not self._raw_config:
                pass
            elif isinstance(self._raw_config, JanisConfiguration):
                self._config = self._raw_config
            elif isinstance(self._raw_config, str):
                self._config = JanisConfiguration.initial_configuration(
                    path=self._raw_config
                )
            else:
                raise ValueError(
                    f"Unrecognised type for janis configuration {self._raw_config} (type: {type(self._raw_config)})"
                )
        return self._config
Exemple #5
0
def do_runtest(args):
    config = None
    if args.config:
        config = JanisConfiguration.initial_configuration(path=args.config)

    runner_path = test_runner.__file__

    cli_args = sys.argv[2:]
    run_test_commands = ["python", runner_path] + cli_args

    if config:
        commands = config.template.template.prepare_run_test_command(
            run_test_commands)
    else:
        commands = run_test_commands

    joined_command = "' '".join(commands)
    Logger.info(f"Deploying test with command: '{joined_command}'")
    subprocess.run(commands)
def run_with_outputs(
    tool: Union[j.CommandTool, j.Workflow],
    inputs: Dict[str, any],
    output_dir: str,
    config: JanisConfiguration = None,
    engine: Optional[str] = None,
    workflow_reference: Optional[str] = None,
):
    """
    Run and WAIT for a Janis workflow to complete. This helper method runs a workflow,
    and returns a dictionary of output values to their output tag. This method MAY throw,
    so ensure it's try-catch wrapped.
    :param tool: An INSTANTIATED tool definition. Seek this from the 'get_janis_workflow_from_searchname' earlier
    :param inputs: A dictionary of pure input values, not file paths.
    :param output_dir: Where to run the execution
    :param config: Optional config, else choose the default at $HOME/.janis/janis.conf
    :param workflow_reference: A reference to the workflow being run, this gets used to write a run.sh file
    :return: A dictionary of output values by the output tag
    """

    job = prepare_job(
        tool=tool,
        output_dir=output_dir,
        required_inputs=inputs,
        jc=config or JanisConfiguration.initial_configuration(None),
        # params to be automatically evaluated
        execution_dir=None,
        inputs={},
        allow_empty_container=False,
        check_files=True,
        container_override={},
        skip_digest_cache=False,
        skip_digest_lookup=False,
        batchrun_reqs=None,
        validation_reqs=None,
        engine=engine,
        hints={},
        keep_intermediate_files=False,
        max_cores=None,
        max_memory=None,
        max_duration=None,
        no_store=True,
        recipes=[],
        run_in_background=None,
        run_in_foreground=None,
        strict_inputs=False,
        watch=False,
        workflow_reference=workflow_reference,
        # don't do extra preprocessing steps
        run_prepare_processing=False,
        localise_all_files=True,
    )

    wm = run_from_jobfile(tool, jobfile=job, wait=True)
    if not wm:
        Logger.critical(f"An error occurred when running workflow {tool.id()}")
        return None

    if not wm.database:
        Logger.critical(
            f"An error occurred when getting the outputs for workflow {tool.id()}"
        )
        return None

    status = wm.database.get_uncached_status()
    if status != TaskStatus.COMPLETED:
        error = ""
        if wm.database.submission_metadata and wm.database.submission_metadata.metadata:
            error = wm.database.submission_metadata.metadata.error or ""
        Logger.critical(
            f"The workflow {tool.id()} ended with status {status}, and hence won't return outputs. Error: {error}"
        )
        return None

    outs = wm.database.outputsDB.get()
    return {
        o.id_: o.value or o.new_path
        for o in outs if o.value or o.new_path
    }
Exemple #7
0
def do_run(args):
    jc = JanisConfiguration.initial_configuration(args.config)

    validation_reqs, batchrun_reqs = None, None

    if args.validation_fields:
        Logger.info("Will prepare validation")
        validation_reqs = ValidationRequirements(
            truthVCF=args.validation_truth_vcf,
            reference=args.validation_reference,
            fields=args.validation_fields,
            intervals=args.validation_intervals,
        )

    if args.batchrun:
        Logger.info("Will prepare batch run")
        batchrun_reqs = BatchRunRequirements(fields=args.batchrun_fields,
                                             groupby=args.batchrun_groupby)

    hints = {
        k[5:]: v
        for k, v in vars(args).items()
        if k.startswith("hint_") and v is not None
    }

    # the args.extra_inputs parameter are inputs that we MUST match
    # we'll need to parse them manually and then pass them to fromjanis as requiring a match
    required_inputs = parse_additional_arguments(args.extra_inputs)

    inputs = args.inputs or []
    # we'll manually suck "inputs" out of the extra parms, otherwise it's actually really
    # annoying if you forget to put the inputs before the workflow positional argument.
    # TBH, we could automatically do this for all params, but it's a little trickier

    if "inputs" in required_inputs:
        ins = required_inputs.pop("inputs")
        inputs.extend(ins if isinstance(ins, list) else [ins])
    if "i" in required_inputs:
        ins = required_inputs.pop("i")
        inputs.extend(ins if isinstance(ins, list) else [ins])

    keep_intermediate_files = args.keep_intermediate_files is True

    db_config = jc.cromwell.get_database_config_helper()

    if args.mysql:
        db_config.should_manage_mysql = True

    if args.no_database:
        db_config.skip_database = True

    if args.development:
        # no change for using mysql, as a database is the default
        keep_intermediate_files = True
        JanisConfiguration.manager().cromwell.call_caching_enabled = True

    wid = fromjanis(
        args.workflow,
        name=args.name,
        validation_reqs=validation_reqs,
        batchrun_reqs=batchrun_reqs,
        engine=args.engine,
        filescheme=args.filescheme,
        hints=hints,
        output_dir=args.output_dir,
        inputs=inputs,
        required_inputs=required_inputs,
        filescheme_ssh_binding=args.filescheme_ssh_binding,
        cromwell_url=args.cromwell_url,
        watch=args.progress,
        max_cores=args.max_cores,
        max_mem=args.max_memory,
        force=args.no_cache,
        recipes=args.recipe,
        keep_intermediate_files=keep_intermediate_files,
        run_in_background=(args.background is True),
        run_in_foreground=(args.foreground is True),
        dbconfig=db_config,
        only_toolbox=args.toolbox,
        no_store=args.no_store,
        allow_empty_container=args.allow_empty_container,
        check_files=not args.skip_file_check,
        container_override=parse_container_override_format(
            args.container_override),
    )

    Logger.info("Exiting")
    raise SystemExit
def prepare_from_args(
        args, run_prepare_processing: bool) -> Tuple[PreparedJob, Tool]:
    jc = JanisConfiguration.initial_configuration(path=args.config)

    # the args.extra_inputs parameter are inputs that we MUST match
    # we'll need to parse them manually and then pass them to fromjanis as requiring a match
    required_inputs = parse_additional_arguments(args.extra_inputs)

    inputs = args.inputs or []
    # we'll manually suck "inputs" out of the extra parms, otherwise it's actually really
    # annoying if you forget to put the inputs before the workflow positional argument.
    # TBH, we could automatically do this for all params, but it's a little trickier

    if "inputs" in required_inputs:
        ins = required_inputs.pop("inputs")
        inputs.extend(ins if isinstance(ins, list) else [ins])
    if "i" in required_inputs:
        ins = required_inputs.pop("i")
        inputs.extend(ins if isinstance(ins, list) else [ins])

    validation, batchrun = None, None
    if args.validation_fields:
        Logger.info("Will prepare validation")
        validation = ValidationRequirements(
            truthVCF=args.validation_truth_vcf,
            reference=args.validation_reference,
            fields=args.validation_fields,
            intervals=args.validation_intervals,
        )

    if args.batchrun:
        Logger.info("Will prepare batch run")
        batchrun = BatchRunRequirements(fields=args.batchrun_fields,
                                        groupby=args.batchrun_groupby)

    db_type: Optional[DatabaseTypeToUse] = None
    if args.no_database:
        db_type = DatabaseTypeToUse.none
    elif args.mysql:
        db_type = DatabaseTypeToUse.managed

    try:
        source_hints = args.source_hint
    except:
        source_hints = []

    wf, wf_reference = resolve_tool(
        tool=args.workflow,
        name=args.name,
        from_toolshed=True,
        only_toolbox=args.toolbox,
        force=args.no_cache,
    )

    job = prepare_job(
        run_prepare_processing=run_prepare_processing,
        tool=wf,
        workflow_reference=wf_reference,
        jc=jc,
        engine=args.engine,
        output_dir=args.output_dir,
        execution_dir=args.execution_dir,
        max_cores=args.max_cores,
        max_memory=args.max_memory,
        max_duration=args.max_duration,
        run_in_foreground=args.foreground is True,
        run_in_background=args.background is True,
        check_files=not args.skip_file_check,
        container_override=parse_container_override_format(
            args.container_override),
        skip_digest_lookup=args.skip_digest_lookup,
        skip_digest_cache=args.skip_digest_cache,
        hints={
            k[5:]: v
            for k, v in vars(args).items()
            if k.startswith("hint_") and v is not None
        },
        validation_reqs=validation,
        batchrun_reqs=batchrun,
        # inputs
        inputs=inputs,
        required_inputs=required_inputs,
        watch=args.progress,
        recipes=args.recipe,
        keep_intermediate_files=args.keep_intermediate_files is True,
        no_store=args.no_store,
        allow_empty_container=args.allow_empty_container,
        strict_inputs=args.strict_inputs,
        db_type=db_type,
        source_hints=source_hints,
        post_run_script=args.post_run_script,
        localise_all_files=args.localise_all_files,
    )

    return job, wf