def execute(self) -> Tuple[UUID, Dict, Optional[Exception]]:
        """
        :return: Run ID, dict with new files, exception if there is any
        """
        run_id = uuid4()

        runtime_context = RuntimeContext()
        runtime_context.outdir = self.file_manager.ROOT_DIRECTORY
        runtime_context.basedir = self.file_manager.ROOT_DIRECTORY
        runtime_context.default_stdin = subprocess.DEVNULL
        runtime_context.default_stdout = subprocess.DEVNULL
        runtime_context.default_stderr = subprocess.DEVNULL
        os.chdir(self.file_manager.ROOT_DIRECTORY)
        factory = Factory(runtime_context=runtime_context)
        executable = factory.make(self._workflow_path)
        data = {}
        for data_file in self._data_paths:
            with open(data_file) as f:
                new_data = yaml.load(f, Loader=yaml.Loader)
                data = {**new_data, **data}
        try:
            result: Dict = executable(**data)
            return run_id, result, None
        except Exception as e:
            traceback.print_exc(file=sys.stderr)
            return run_id, {}, e
def test_value_from_two_concatenated_expressions() -> None:
    sandboxjs.have_node_slim = False
    sandboxjs.localdata = threading.local()
    factory = Factory()
    echo = factory.make(get_data("tests/wf/vf-concat.cwl"))
    file = {"class": "File", "location": get_data("tests/wf/whale.txt")}

    assert echo(file1=file) == {"out": "a string\n"}
Example #3
0
def test_scattered_workflow() -> None:
    test_file = "tests/wf/scatter-wf4.cwl"
    job_file = "tests/wf/scatter-job2.json"
    factory = Factory(MultithreadedJobExecutor())
    echo = factory.make(get_data(test_file))
    with open(get_data(job_file)) as job:
        assert echo(**json.load(job)) == {
            "out": ["foo one three", "foo two four"]
        }
Example #4
0
def get_windows_safe_factory(**execkwargs):
    if onWindows():
        makekwargs = {'find_default_container': functools.partial(
            force_default_container, windows_default_container_id),
                      'use_container': True}
        execkwargs['default_container'] = windows_default_container_id
    else:
        makekwargs = {}
    return Factory(makekwargs=makekwargs, **execkwargs)
Example #5
0
def get_windows_safe_factory(runtime_context=None,  # type: RuntimeContext
                             loading_context=None,  # type: LoadingContext
                             executor=None          # type: Any
                            ):  # type: (...) -> Factory
    if onWindows():
        if not runtime_context:
            runtime_context = RuntimeContext()
        runtime_context.find_default_container = functools.partial(
            force_default_container, windows_default_container_id)
        runtime_context.use_container = True
        runtime_context.default_container = windows_default_container_id
    return Factory(executor, loading_context, runtime_context)
Example #6
0
def get_windows_safe_factory(
    runtime_context: Optional[RuntimeContext] = None,
    loading_context: Optional[LoadingContext] = None,
    executor: Optional[JobExecutor] = None,
) -> Factory:
    if onWindows():
        if not runtime_context:
            runtime_context = RuntimeContext()
        runtime_context.find_default_container = functools.partial(
            force_default_container, windows_default_container_id)
        runtime_context.use_container = True
        runtime_context.default_container = windows_default_container_id
    return Factory(executor, loading_context, runtime_context)
Example #7
0
def test_sequential_workflow(tmp_path: Path) -> None:
    test_file = "tests/wf/count-lines1-wf.cwl"
    executor = MultithreadedJobExecutor()
    runtime_context = RuntimeContext()
    runtime_context.outdir = str(tmp_path)
    runtime_context.select_resources = executor.select_resources
    factory = Factory(executor, None, runtime_context)
    echo = factory.make(get_data(test_file))
    file_contents = {
        "class": "File",
        "location": get_data("tests/wf/whale.txt")
    }
    assert echo(file1=file_contents) == {"count_output": 16}
def test_value_from_two_concatenated_expressions_singularity(
    tmp_path: Path, monkeypatch: pytest.MonkeyPatch
) -> None:
    """Javascript test using Singularity."""
    sandboxjs.have_node_slim = False
    sandboxjs.localdata = threading.local()
    new_paths = hide_nodejs(tmp_path)
    factory = Factory()
    factory.loading_context.singularity = True
    factory.loading_context.debug = True
    factory.runtime_context.debug = True
    with monkeypatch.context() as m:
        m.setenv("PATH", new_paths)
        echo = factory.make(get_data("tests/wf/vf-concat.cwl"))
        file = {"class": "File", "location": get_data("tests/wf/whale.txt")}
        assert echo(file1=file) == {"out": "a string\n"}
Example #9
0
    def __init__(
        self,
        tool:dict,
        inputs:dict,
        workdir = None, # contains tmp and out dir 
                        # if tmp not specified separately
        tmpdir = None, 
        use_container = True,
        user_space_docker_cmd = "",
        force_docker_pull = False,
        singularity = False,
        debug = False,
        default_container = ""
    ):
        random_string = "".join([random_choice(ascii_letters + digits) for c in range(0,14)])
        self.tool = tool
        self.inputs = inputs
        if workdir is None:
            workdir = os.getcwd()

        assert (not os.path.exists(workdir)) or os.path.isdir(workdir), \
            error_message(
                "task_executor",
                f"Message workdir exists but is not a directory: {workdir}",
                is_known=True
            )

        self.workdir = workdir if not os.path.exists(workdir) \
            else os.path.join(workdir, random_string)
        os.makedirs(self.workdir)
        
        self.inputs_file = os.path.join(self.workdir, "inputs.json")
        with open(self.inputs_file, "w") as inp_file:
            inp_file.write(json.dumps(self.inputs, indent=2))

        self.cwl_file = os.path.join(self.workdir, "tool.cwl")
        with open(self.cwl_file, "w") as cwl_file:
            cwl_file.write(json.dumps(self.tool, indent=2))

        self.tmpdir = tmpdir if tmpdir \
            else os.path.join(self.workdir, "tmp")
        os.mkdir(self.tmpdir)
        self.outdir = os.path.join(self.workdir, "out")
        os.mkdir(self.outdir)

        self.loading_context = LoadingContext()
        self.runtime_context = RuntimeContext()

        # self.runtime_context.tmpdir = self.tmpdir
        self.runtime_context.outdir = self.outdir
        # self.runtime_context.use_container = use_container
        # self.runtime_context.user_space_docker_cmd = user_space_docker_cmd
        # self.runtime_context.force_docker_pull = force_docker_pull
        # self.runtime_context.singularity = singularity

        print(self.runtime_context.outdir)

        fac = Factory(
            # loading_context=self.loading_context, 
            runtime_context=self.runtime_context
        )
        self.callable_tool = fac.make(self.cwl_file)

        self.success = None
        self.out = {}
Example #10
0
def test_newline_in_entry() -> None:
    """Files in a InitialWorkingDirectory are created with a newline character."""
    factory = Factory()
    echo = factory.make(get_data("tests/wf/iwdr-entry.cwl"))
    assert echo(message="hello") == {"out": "CONFIGVAR=hello\n"}
Example #11
0
def test_value_from_two_concatenated_expressions() -> None:
    factory = Factory()
    echo = factory.make(get_data("tests/wf/vf-concat.cwl"))
    file = {"class": "File", "location": get_data("tests/wf/whale.txt")}

    assert echo(file1=file) == {"out": "a string\n"}