Exemplo n.º 1
0
    def with_workflow_storage(self):
        """Yield a workflow storage."""
        from renku.core.models.cwl.ascwl import ascwl
        from renku.core.models.cwl.workflow import Workflow

        workflow = Workflow()
        yield workflow

        for step in workflow.steps:
            step_name = '{0}_{1}.cwl'.format(
                uuid.uuid4().hex,
                secure_filename('_'.join(step.run.baseCommand)),
            )

            workflow_path = self.workflow_path
            if not workflow_path.exists():
                workflow_path.mkdir()

            step_path = workflow_path / step_name
            with step_path.open('w') as step_file:
                yaml.dump(
                    ascwl(
                        # filter=lambda _, x: not (x is False or bool(x)
                        step.run,
                        filter=lambda _, x: x is not None,
                        basedir=workflow_path,
                    ),
                    stream=step_file,
                    default_flow_style=False
                )
Exemplo n.º 2
0
def create(client, output_file, revision, paths):
    """Create a workflow description for a file."""
    graph = Graph(client)
    outputs = graph.build(paths=paths, revision=revision)

    output_file.write(
        yaml.dump(ascwl(
            graph.ascwl(outputs=outputs),
            filter=lambda _, x: x is not None and x != [],
            basedir=os.path.dirname(getattr(output_file, 'name', '.')) or '.',
        ),
                  default_flow_style=False))
Exemplo n.º 3
0
def rerun(client, revision, roots, siblings, inputs, paths):
    """Recreate files generated by a sequence of ``run`` commands."""
    graph = Graph(client)
    outputs = graph.build(paths=paths, revision=revision)

    # Check or extend siblings of outputs.
    outputs = siblings(graph, outputs)
    output_paths = {node.path for node in outputs}

    # Normalize and check all starting paths.
    roots = {graph.normalize_path(root) for root in roots}
    assert not roots & output_paths, '--from colides with output paths'

    # Generate workflow and check inputs.
    # NOTE The workflow creation is done before opening a new file.
    workflow = inputs(
        client,
        graph.ascwl(
            input_paths=roots,
            output_paths=output_paths,
            outputs=outputs,
        ))

    # Don't compute paths if storage is disabled.
    if client.has_external_storage:
        # Make sure all inputs are pulled from a storage.
        paths_ = (
            path
            for _, path in workflow.iter_input_files(client.workflow_path))
        client.pull_paths_from_storage(*paths_)

    # Store the generated workflow used for updating paths.
    import yaml

    output_file = client.workflow_path / '{0}.cwl'.format(uuid.uuid4().hex)
    with output_file.open('w') as f:
        f.write(
            yaml.dump(ascwl(
                workflow,
                filter=lambda _, x: x is not None,
                basedir=client.workflow_path,
            ),
                      default_flow_style=False))

    # Execute the workflow and relocate all output files.
    # FIXME get new output paths for edited tools
    # output_paths = {path for _, path in workflow.iter_output_files()}
    execute(
        client,
        output_file,
        output_paths=output_paths,
    )
Exemplo n.º 4
0
def test_modified_tool(runner, project, run):
    """Test detection of modified tool."""
    from renku.core.management import LocalClient

    client = LocalClient(project)
    repo = client.repo
    greeting = client.path / 'greeting.txt'

    assert 0 == run(args=('run', 'echo', 'hello'), stdout=greeting)

    cmd = ['status']
    result = runner.invoke(cli, cmd)
    assert 0 == result.exit_code

    # There should be only one command line tool.
    tools = list(client.workflow_path.glob('*_echo.cwl'))
    assert 1 == len(tools)

    tool_path = tools[0]
    with tool_path.open('r') as f:
        command_line_tool = CWLClass.from_cwl(yaml.safe_load(f))

    # Simulate a manual edit.
    command_line_tool.inputs[0].default = 'ahoj'
    command_line_tool.stdout = 'pozdrav.txt'

    with tool_path.open('w') as f:
        yaml.dump(ascwl(
            command_line_tool,
            filter=lambda _, x: x is not None,
            basedir=client.workflow_path,
        ),
                  stream=f,
                  default_flow_style=False)

    repo.git.add('--all')
    repo.index.commit('Modified tool', skip_hooks=True)

    assert 0 == run()

    output = client.path / 'pozdrav.txt'
    assert output.exists()
    with output.open('r') as f:
        assert 'ahoj\n' == f.read()

    cmd = ['status']
    result = runner.invoke(cli, cmd)
    assert 0 == result.exit_code
Exemplo n.º 5
0
def update(client, revision, no_output, siblings, paths):
    """Update existing files by rerunning their outdated workflow."""
    graph = Graph(client)
    outputs = graph.build(revision=revision, can_be_cwl=no_output, paths=paths)
    outputs = {node for node in outputs if graph.need_update(node)}

    if not outputs:
        click.secho('All files were generated from the latest inputs.',
                    fg='green')
        sys.exit(0)

    # Check or extend siblings of outputs.
    outputs = siblings(graph, outputs)
    output_paths = {node.path for node in outputs if _safe_path(node.path)}

    # Get all clean nodes.
    input_paths = {node.path for node in graph.nodes} - output_paths

    # Store the generated workflow used for updating paths.
    import yaml

    output_file = client.workflow_path / '{0}.cwl'.format(uuid.uuid4().hex)
    workflow = graph.ascwl(
        input_paths=input_paths,
        output_paths=output_paths,
        outputs=outputs,
    )

    # Don't compute paths if storage is disabled.
    if client.has_external_storage:
        # Make sure all inputs are pulled from a storage.
        paths_ = (
            path
            for _, path in workflow.iter_input_files(client.workflow_path))
        client.pull_paths_from_storage(*paths_)

    with output_file.open('w') as f:
        f.write(
            yaml.dump(ascwl(
                workflow,
                filter=lambda _, x: x is not None,
                basedir=client.workflow_path,
            ),
                      default_flow_style=False))

    execute(client, output_file, output_paths=output_paths)