def test_pipeline_get_envs():

    # Ensure pipeline operation env lists are properly converted to dictionaries.

    pipeline_definitions = _read_pipeline_resource(
        'pipeline_dependency_complex.json')

    pipeline = PipelineParser().parse(pipeline_definitions)

    local_processor = LocalPipelineProcessor()
    for op in pipeline.operations.values():
        op_envs = local_processor._get_envs(op)
        assert op_envs['OP_NAME'] == op.name
Esempio n. 2
0
def test_pipeline_execution_bad_notebook(pipeline_dir):
    # Construct 4-node pipeline where node 3 (nodebook) produces a failure
    node1 = NotebookNode("node1", num_outputs=2)
    node2 = PythonNode("node2", num_outputs=2, input_nodes=[node1])
    node3 = NotebookNode("node3",
                         num_outputs=2,
                         input_nodes=[node1],
                         fail=True)
    node4 = NotebookNode("node4", num_outputs=2, input_nodes=[node2, node3])
    nodes = [node1, node2, node3, node4]
    processed_nodes = [node1, node2]
    unprocessed_nodes = [node3, node4]

    pipeline = construct_pipeline("p1", nodes=nodes, location=pipeline_dir)

    with pytest.raises(RuntimeError) as e:
        LocalPipelineProcessor(pipeline_dir).process(pipeline)
    assert 'Error processing operation node3' in str(e.value)

    # Confirm outputs (and non-outputs)
    for node in processed_nodes:
        for output in node.outputs:
            assert os.path.exists(os.path.join(pipeline_dir, output))

    for node in unprocessed_nodes:
        for output in node.outputs:
            assert not os.path.exists(os.path.join(pipeline_dir, output))
Esempio n. 3
0
def test_pipeline_execution(pipeline_dir):
    # Construct 4-node pipeline consisting of 3 notebooks and 1 python script.
    # This pipeline is "diamond shaped" with node1 feeding nodes 2 and 3, each then
    # feeding node4.
    node1 = NotebookNode("node1", num_outputs=2)
    node2 = PythonNode("node2", num_outputs=2, input_nodes=[node1])
    node3 = NotebookNode("node3", num_outputs=2, input_nodes=[node1])
    node4 = NotebookNode("node4", num_outputs=2, input_nodes=[node2, node3])
    nodes = [node1, node2, node3, node4]

    pipeline = construct_pipeline("p1", nodes=nodes, location=pipeline_dir)

    LocalPipelineProcessor(pipeline_dir).process(pipeline)

    # Confirm outputs
    for node in nodes:
        for output in node.outputs:
            assert os.path.exists(os.path.join(pipeline_dir, output))
Esempio n. 4
0
def test_pipeline_execution_missing_kernelspec(pipeline_dir):
    # Construct 4-node pipeline consisting of 3 notebooks and 1 python script.
    # This pipeline is "diamond shaped" with node1 feeding nodes 2 and 3, each then
    # feeding node4.
    node1 = NotebookNode("node1", num_outputs=2)
    node2 = PythonNode("node2", num_outputs=2, input_nodes=[node1])
    node3 = NotebookNode("node3", num_outputs=2, input_nodes=[node1])
    node4 = NotebookNode("node4", num_outputs=2, input_nodes=[node2, node3])
    nodes = [node1, node2, node3, node4]

    pipeline = construct_pipeline("p1", nodes=nodes, location=pipeline_dir)

    node1nb_file = os.path.join(pipeline_dir,
                                pipeline.operations[node1.id].filename)
    nb = nbformat.read(node1nb_file, 4)
    nb['metadata'].pop('kernelspec')
    nbformat.write(nb, node1nb_file)

    with pytest.raises(RuntimeError) as e:
        LocalPipelineProcessor(pipeline_dir).process(pipeline)
    assert 'Error processing operation node1 (node1.ipynb): No kernel ' \
           'name found in notebook and no override provided.' in str(e.value)