コード例 #1
0
def test_pipeline_with_comments():
    pipeline_json = _read_pipeline_resource("resources/sample_pipelines/" "pipeline_3_node_sample_with_comments.json")
    pipeline = PipelineParser().parse(pipeline_json)
    assert (
        pipeline.operations["d52ddfb4-dd0e-47ac-abc7-fa30bb95d45c"].doc
        == "Generate community stats and then aggregate them on an overview dashboard"
    )
コード例 #2
0
def test_supernode_pipeline():
    pipeline_json = _read_pipeline_resource("resources/sample_pipelines/pipeline_with_supernode.json")

    pipeline = PipelineParser().parse(pipeline_json)

    assert len(pipeline.operations) == 4

    # Confirm structure of pipeline:
    # Two execution nodes feed their outputs to super-node with one execution_node.
    # Super-node's execution node, then sends its output to external execution node.
    # 4 nodes total.  Super-node execution node should have two parent-operations
    # pointing at first two nodes, and final node should have one parent pointing
    # at execution node WITHIN supernode.

    external_input_node_ids = ["db9f3f5b-b2e3-4824-aadd-c1c6bf652534", "f6584209-6f22-434f-9820-41327b6c749d"]
    supernode_excution_node_id = "079c0e12-eb5f-4fcc-983b-09e011869fee"
    external_node_id = "7628306d-2cc2-405c-94a1-fe42c95567a1"

    for node_id in pipeline.operations:
        # Validate operations list
        if node_id in external_input_node_ids:
            # These are input nodes, ensure parent_operation_ids are empty
            assert len(pipeline.operations[node_id].parent_operation_ids) == 0
            continue
        if node_id == supernode_excution_node_id:
            # Node within supernode, should have two parent_ops matching external_input_node_ids
            assert len(pipeline.operations[node_id].parent_operation_ids) == 2
            assert set(pipeline.operations[node_id].parent_operation_ids) == set(external_input_node_ids)
            continue
        if node_id == external_node_id:
            # Final external node, should have super_node embedded node as parent op.
            assert len(pipeline.operations[node_id].parent_operation_ids) == 1
            assert pipeline.operations[node_id].parent_operation_ids[0] == supernode_excution_node_id
            continue
        assert False, "Invalid node_id encountered in pipeline operations!"
コード例 #3
0
def test_missing_pipeline_name_should_default_to_untitled():
    pipeline_json = _read_pipeline_resource("resources/sample_pipelines/pipeline_valid.json")
    pipeline_json["pipelines"][0]["app_data"]["properties"].pop("name")

    pipeline = PipelineParser().parse(pipeline_json)

    assert pipeline.name == "untitled"
コード例 #4
0
def test_valid_pipeline():
    pipeline_json = _read_pipeline_resource(
        "resources/sample_pipelines/pipeline_valid.json")

    pipeline_definition = PipelineDefinition(pipeline_definition=pipeline_json)

    assert pipeline_definition.is_valid()
コード例 #5
0
def test_convert_kv_properties(monkeypatch):
    kv_test_property_name = "kv_test_property"
    pipeline_json = _read_pipeline_resource(
        "resources/sample_pipelines/pipeline_valid_with_pipeline_default.json")

    # Mock get_kv_properties() to ensure the "kv_test_property" variable is included in the list
    mock_kv_property_list = [
        pipeline_constants.ENV_VARIABLES, kv_test_property_name
    ]
    monkeypatch.setattr(PipelineDefinition, "get_kv_properties",
                        mock.Mock(return_value=mock_kv_property_list))

    pipeline_definition = PipelineDefinition(pipeline_definition=pipeline_json)

    node = pipeline_definition.primary_pipeline.nodes.pop()
    pipeline_defaults = pipeline_definition.primary_pipeline.get_property(
        pipeline_constants.PIPELINE_DEFAULTS)

    for kv_property in mock_kv_property_list:
        assert isinstance(node.get_component_parameter(kv_property),
                          KeyValueList)
        assert isinstance(pipeline_defaults[kv_property], KeyValueList)

    # Ensure a non-list property is not converted to a KeyValueList
    assert not isinstance(
        pipeline_definition.primary_pipeline.get_property(
            pipeline_constants.RUNTIME_IMAGE), KeyValueList)

    # Ensure plain list property is not converted to a KeyValueList
    assert not isinstance(node.get_component_parameter("outputs"),
                          KeyValueList)
コード例 #6
0
def _check_pipeline_correct_pipeline_alternative_name():
    pipeline_json = _read_pipeline_resource(
        "resources/sample_pipelines/pipeline_valid_alternative_name.json")
    pipeline_definition = PipelineDefinition(pipeline_definition=pipeline_json)

    primary_pipeline = pipeline_definition.primary_pipeline

    assert primary_pipeline.name == "{{alternative_name}}"
コード例 #7
0
def test_pipeline_with_dependencies():
    pipeline_json = _read_pipeline_resource(
        "resources/sample_pipelines/" "pipeline_3_node_sample_with_dependencies.json"
    )

    pipeline = PipelineParser().parse(pipeline_json)

    assert len(pipeline.operations["acc4527d-7cc8-4c16-b520-5aa0f50a2e34"].parent_operation_ids) == 2
コード例 #8
0
def test_missing_pipeline_runtime_configuration():
    pipeline_json = _read_pipeline_resource("resources/sample_pipelines/pipeline_valid.json")
    pipeline_json["pipelines"][0]["app_data"].pop("runtime_config")

    with pytest.raises(ValueError) as e:
        PipelineParser().parse(pipeline_json)

    assert "Invalid pipeline: Missing runtime configuration" in str(e.value)
コード例 #9
0
def test_missing_operation_id():
    pipeline_json = _read_pipeline_resource("resources/sample_pipelines/pipeline_valid.json")
    pipeline_json["pipelines"][0]["nodes"][0].pop("id")

    with pytest.raises(ValueError) as e:
        PipelineParser().parse(pipeline_json)

    assert "Missing field 'operation id'" in str(e.value)
コード例 #10
0
def test_pipeline_global_attributes():
    pipeline_json = _read_pipeline_resource("resources/sample_pipelines/pipeline_valid.json")

    pipeline = PipelineParser().parse(pipeline_json)

    assert pipeline.name == "{{name}}"
    assert pipeline.runtime == "{{runtime}}"
    assert pipeline.runtime_config == "{{runtime-config}}"
コード例 #11
0
def test_missing_operation_type():
    pipeline_json = _read_pipeline_resource("resources/sample_pipelines/pipeline_valid.json")
    pipeline_json["pipelines"][0]["nodes"][0].pop("type")

    with pytest.raises(ValueError) as e:
        PipelineParser().parse(pipeline_json)

    assert "Node type 'None' is invalid!" in str(e.value)
コード例 #12
0
def test_invalid_node_type():
    pipeline_json = _read_pipeline_resource("resources/sample_pipelines/pipeline_valid.json")
    pipeline_json["pipelines"][0]["nodes"][0]["type"] = "foo"

    with pytest.raises(ValueError) as e:
        PipelineParser().parse(pipeline_json)

    assert "Node type 'foo' is invalid!" in str(e.value)
コード例 #13
0
def test_missing_operation_image():
    pipeline_json = _read_pipeline_resource("resources/sample_pipelines/pipeline_valid.json")
    pipeline_json["pipelines"][0]["nodes"][0]["app_data"]["component_parameters"].pop("runtime_image")

    with pytest.raises(ValueError) as e:
        PipelineParser().parse(pipeline_json)

    assert "Missing field 'operation runtime image'" in str(e.value)
コード例 #14
0
def test_pipeline_operations_and_handle_artifact_file_details():
    pipeline_json = _read_pipeline_resource("resources/sample_pipelines/pipeline_3_node_sample.json")

    pipeline = PipelineParser().parse(pipeline_json)

    assert len(pipeline.operations) == 3

    for op in pipeline.operations.values():
        assert "." not in op.name
コード例 #15
0
def _check_missing_primary_pipeline_field(field: str, error_msg: str):
    pipeline_json = _read_pipeline_resource(
        "resources/sample_pipelines/pipeline_valid.json")
    pipeline_json["pipelines"][0].pop(field)

    pipeline_definition = PipelineDefinition(pipeline_definition=pipeline_json)

    assert pipeline_definition.is_valid() is False
    assert error_msg in pipeline_definition.validate()
コード例 #16
0
def test_updates_to_nodes_updates_pipeline_definition():
    pipeline_json = _read_pipeline_resource(
        "resources/sample_pipelines/pipeline_valid.json")

    pipeline_definition = PipelineDefinition(pipeline_definition=pipeline_json)
    for node in pipeline_definition.primary_pipeline.nodes:
        node.set_component_parameter("filename", "foo")

    for node in pipeline_definition.to_dict()["pipelines"][0]["nodes"]:
        assert node["app_data"]["component_parameters"]["filename"] == "foo"
コード例 #17
0
def test_updates_to_primary_pipeline_updates_pipeline_definition():
    pipeline_json = _read_pipeline_resource(
        "resources/sample_pipelines/pipeline_valid.json")

    pipeline_definition = PipelineDefinition(pipeline_definition=pipeline_json)
    pipeline_definition.primary_pipeline.set("version", 3)

    assert pipeline_definition.primary_pipeline.version == 3
    assert pipeline_definition.to_dict(
    )["pipelines"][0]["app_data"]["version"] == 3
コード例 #18
0
def test_pipeline_with_dirty_list_values(valid_operation):
    pipeline_json = _read_pipeline_resource("resources/sample_pipelines/pipeline_with_invalid_list_values.json")

    pipeline = PipelineParser().parse(pipeline_json)

    assert pipeline.name == "{{name}}"
    assert pipeline.runtime == "{{runtime}}"
    assert pipeline.runtime_config == "{{runtime-config}}"
    assert len(pipeline.operations) == 1
    assert pipeline.operations["{{uuid}}"] == valid_operation
コード例 #19
0
def test_validation_flags_missing_version_field():
    pipeline_json = _read_pipeline_resource(
        "resources/sample_pipelines/pipeline_valid.json")
    pipeline_json["pipelines"][0]["app_data"].pop("version")

    pipeline_definition = PipelineDefinition(pipeline_definition=pipeline_json)

    assert pipeline_definition.is_valid() is False
    assert "Primary pipeline is missing the 'version' field." in pipeline_definition.validate(
    )
コード例 #20
0
def _check_pipeline_field_type(field: str, wrong_type_value: any,
                               error_msg: str):
    pipeline_json = _read_pipeline_resource(
        "resources/sample_pipelines/pipeline_valid.json")
    pipeline_json.pop(field)
    pipeline_json[field] = wrong_type_value

    pipeline_definition = PipelineDefinition(pipeline_definition=pipeline_json)

    assert pipeline_definition.is_valid() is False
    assert error_msg in pipeline_definition.validate()
コード例 #21
0
def test_pipeline_get_envs():

    # Ensure pipeline operation env lists are properly converted to dictionaries.

    pipeline_json = _read_pipeline_resource(
        "resources/sample_pipelines/pipeline_dependency_complex.json")

    pipeline = PipelineParser().parse(pipeline_json)

    for op in pipeline.operations.values():
        assert isinstance(op, GenericOperation)
        op_envs = op.env_vars.to_dict()
        assert op_envs["OP_NAME"] == op.name
コード例 #22
0
def test_remove_env_vars_with_matching_secrets():
    pipeline_json = _read_pipeline_resource(
        "resources/sample_pipelines/pipeline_valid_with_pipeline_default.json")
    pipeline_definition = PipelineDefinition(pipeline_definition=pipeline_json)
    node = pipeline_definition.primary_pipeline.nodes.pop()

    # Set kubernetes_secret property to have all the same keys as those in the env_vars property
    kubernetes_secrets = KeyValueList(
        ["var1=name1:key1", "var2=name2:key2", "var3=name3:key3"])
    node.set_component_parameter(KUBERNETES_SECRETS, kubernetes_secrets)

    node.remove_env_vars_with_matching_secrets()
    assert node.get_component_parameter(ENV_VARIABLES) == []
コード例 #23
0
def test_pipeline_execution_order_in_simple_pipeline():
    expected_operation_names = ["f", "a", "c", "g"]
    pipeline_json = _read_pipeline_resource(
        "resources/sample_pipelines/pipeline_dependency_simple.json")

    pipeline = PipelineParser().parse(pipeline_json)
    current_ordered_operation_names = _get_operation_names(
        pipeline.operations.values())
    assert current_ordered_operation_names != expected_operation_names

    operations = LocalPipelineProcessor._sort_operations(
        operations_by_id=pipeline.operations)

    ordered_operation_names = _get_operation_names(operations)

    assert ordered_operation_names == expected_operation_names
コード例 #24
0
def test_propagate_pipeline_default_properties(monkeypatch):
    kv_list_correct = ["var1=var1", "var2=var2", "var3=var_three"]
    kv_test_property_name = "kv_test_property"
    pipeline_json = _read_pipeline_resource(
        "resources/sample_pipelines/pipeline_valid_with_pipeline_default.json")

    # Mock get_kv_properties() to ensure the "kv_test_property" variable is included in the list
    mock_kv_property_list = [
        pipeline_constants.ENV_VARIABLES, kv_test_property_name
    ]
    monkeypatch.setattr(PipelineDefinition, "get_kv_properties",
                        mock.Mock(return_value=mock_kv_property_list))

    pipeline_definition = PipelineDefinition(pipeline_definition=pipeline_json)
    node = pipeline_definition.primary_pipeline.nodes.pop()
    assert node.get_component_parameter(
        pipeline_constants.ENV_VARIABLES) == kv_list_correct
    assert node.get_component_parameter(
        kv_test_property_name) == kv_list_correct
コード例 #25
0
def test_multinode_pipeline():
    pipeline_json = _read_pipeline_resource("resources/sample_pipelines/pipeline_3_node_sample.json")

    pipeline = PipelineParser().parse(pipeline_json)

    assert len(pipeline.operations) == 3
コード例 #26
0
def test_multiple_pipeline_definition():
    pipeline_json = _read_pipeline_resource("resources/sample_pipelines/" "pipeline_multiple_pipeline_definitions.json")

    with pytest.raises(ValueError):
        PipelineParser().parse(pipeline_json)
コード例 #27
0
ファイル: test_validation.py プロジェクト: marthacryan/elyra
    def _function(pipeline_filepath):
        response = ValidationResponse()

        pipeline = _read_pipeline_resource(
            f"resources/validation_pipelines/{pipeline_filepath}")
        return pipeline, response