Esempio n. 1
0
def test_load_yaml():
    assert load_yaml_from_path(
        file_relative_path(__file__, 'yamls/yaml_one.yaml')) == {
            'key_one': {
                'key_one_one': 'value_one'
            }
        }
Esempio n. 2
0
def test_pipelines_success(file_path, run_config_path):

    with pushd(
            file_relative_path(__file__,
                               "../../../docs_snippets/legacy/data_science/")):
        with instance_for_test() as instance:
            run_config = load_yaml_from_path(
                run_config_path) if run_config_path else {}
            recon_pipeline = ReconstructablePipeline.for_file(
                file_path, "iris_classify")

            with tempfile.TemporaryDirectory() as temp_dir:
                run_config["resources"] = {
                    "io_manager": {
                        "config": {
                            "base_dir": temp_dir
                        }
                    }
                }
                pipeline_result = execute_pipeline(
                    recon_pipeline,
                    run_config=run_config,
                    instance=instance,
                    solid_selection=["k_means_iris"
                                     ],  # skip download_file in tests
                )
                assert pipeline_result.success
Esempio n. 3
0
def test_load_yaml():
    assert load_yaml_from_path(
        script_relative_path('yamls/yaml_one.yaml')) == {
            'key_one': {
                'key_one_one': 'value_one'
            }
        }
Esempio n. 4
0
def test_load_yaml():
    assert load_yaml_from_path(
        file_relative_path(__file__, "yamls/yaml_one.yaml")) == {
            "key_one": {
                "key_one_one": "value_one"
            }
        }
def test_demo_configuration_schema_pipeline_runtime_error():
    with pytest.raises(DagsterExecutionStepExecutionError) as e_info:
        execute_pipeline(
            configuration_schema_pipeline,
            load_yaml_from_path(intro_tutorial_path('configuration_schemas_bad_config.yaml')),
        )

    assert isinstance(e_info.value.__cause__, TypeError)
Esempio n. 6
0
def test_demo_configuration_schema_pipeline_wrong_field():
    with pytest.raises(
        PipelineConfigEvaluationError,
        match=('Undefined field "multiply_the_word_with_typed_config" at path ' 'root:solids'),
    ):
        execute_pipeline(
            define_demo_configuration_schema_pipeline(),
            load_yaml_from_path(intro_tutorial_path('configuration_schemas_wrong_field.yml')),
        )
Esempio n. 7
0
def test_demo_configuration_schema_pipeline_runtime_error():
    with pytest.raises(TypeError):
        execute_pipeline(
            define_demo_configuration_schema_pipeline(),
            load_yaml_from_path(
                script_relative_path(
                    '../../../dagster/tutorials/intro_tutorial/configuration_schemas_runtime_error.yml'
                )),
        )
Esempio n. 8
0
def test_typed_demo_configuration_schema_pipeline_correct_yaml():
    result = execute_pipeline(
        define_typed_demo_configuration_schema_pipeline(),
        load_yaml_from_path(intro_tutorial_path('configuration_schemas_typed.yml')),
    )
    assert result.success
    assert len(result.solid_result_list) == 2
    count_letters_result = result.result_for_solid('count_letters').transformed_value()
    expected_value = {'q': 2, 'u': 4, 'x': 2}
    assert set(count_letters_result.keys()) == set(expected_value.keys())
    for key, value in expected_value.items():
        assert count_letters_result[key] == value
    assert result.result_for_solid('typed_multiply_the_word').transformed_value() == 'quuxquux'
Esempio n. 9
0
def test_typed_demo_configuration_schema_type_mismatch_error():
    with pytest.raises(
            PipelineConfigEvaluationError,
            match=
        ('Type failure at path "root:solids:multiply_the_word:config:factor" on type '
         '"Int"'),
    ):
        execute_pipeline(
            typed_configuration_schema_pipeline,
            load_yaml_from_path(
                script_relative_path(
                    ('../../dagster_examples/intro_tutorial/'
                     'configuration_schemas_bad_config.yaml'))),
        )
Esempio n. 10
0
def test_typed_demo_configuration_schema_type_mismatch_error():
    with pytest.raises(
            PipelineConfigEvaluationError,
            match=
        ('Type failure at path "root:solids:typed_multiply_the_word:config:factor" on type '
         '"Int". Got "\'not_a_number\'"'),
    ):
        execute_pipeline(
            define_typed_demo_configuration_schema_pipeline(),
            load_yaml_from_path(
                script_relative_path(
                    ('../../../dagster/tutorials/intro_tutorial/'
                     'configuration_schemas_type_mismatch_error.yml'))),
        )
Esempio n. 11
0
def test_pipelines_success(file_path, run_config_path):

    with pushd(
            file_relative_path(__file__,
                               '../../../docs_snippets/legacy/data_science/')):
        instance = DagsterInstance.local_temp()
        run_config = load_yaml_from_path(
            run_config_path) if run_config_path else None
        recon_pipeline = ReconstructablePipeline.for_file(
            file_path, 'iris_pipeline')

        pipeline_result = execute_pipeline(recon_pipeline,
                                           run_config=run_config,
                                           instance=instance)
        assert pipeline_result.success
Esempio n. 12
0
def test_pipelines_success(file_path, run_config_path):

    with pushd(
            file_relative_path(__file__,
                               "../../../docs_snippets/legacy/data_science/")):
        instance = DagsterInstance.local_temp()
        run_config = load_yaml_from_path(
            run_config_path) if run_config_path else None
        recon_pipeline = ReconstructablePipeline.for_file(
            file_path, "iris_pipeline")

        pipeline_result = execute_pipeline(
            recon_pipeline,
            run_config=run_config,
            instance=instance,
            solid_selection=["k_means_iris"],  # skip download_file in tests
        )
        assert pipeline_result.success
Esempio n. 13
0
def construct_graph_with_yaml(yaml_file, op_defs) -> GraphDefinition:
    yaml_data = load_yaml_from_path(yaml_file)

    deps = {}

    for op_yaml_data in yaml_data["ops"]:
        def_name = op_yaml_data["def"]
        alias = op_yaml_data.get("alias", def_name)
        op_deps_entry = {}
        for input_name, input_data in op_yaml_data.get("deps", {}).items():
            op_deps_entry[input_name] = DependencyDefinition(
                solid=input_data["op"],
                output=input_data.get("output", "result"))
        deps[NodeInvocation(name=def_name, alias=alias)] = op_deps_entry

    return GraphDefinition(
        name=yaml_data["name"],
        description=yaml_data.get("description"),
        node_defs=op_defs,
        dependencies=deps,
    )
Esempio n. 14
0
def test_docker_monitoring():
    docker_image = get_test_project_docker_image()

    launcher_config = {
        "env_vars": [
            "AWS_ACCESS_KEY_ID",
            "AWS_SECRET_ACCESS_KEY",
        ],
        "networks": ["container:test-postgres-db-docker"],
        "container_kwargs": {
            # "auto_remove": True,
            "volumes": ["/var/run/docker.sock:/var/run/docker.sock"],
        },
    }

    if IS_BUILDKITE:
        launcher_config["registry"] = get_buildkite_registry_config()
    else:
        find_local_test_image(docker_image)

    run_config = merge_dicts(
        load_yaml_from_path(
            os.path.join(get_test_project_environments_path(), "env_s3.yaml")),
        {
            "solids": {
                "multiply_the_word_slow": {
                    "inputs": {
                        "word": "bar"
                    },
                    "config": {
                        "factor": 2,
                        "sleep_time": 20
                    },
                }
            },
            "execution": {
                "docker": {
                    "config": {}
                }
            },
        },
    )

    with docker_postgres_instance({
            "run_monitoring": {
                "enabled": True
            },
            "run_launcher": {
                "class": "DockerRunLauncher",
                "module": "dagster_docker",
                "config": launcher_config,
            },
    }) as instance:
        recon_pipeline = get_test_project_recon_pipeline(
            "demo_pipeline_docker_slow", docker_image)
        with get_test_project_workspace_and_external_pipeline(
                instance,
                "demo_pipeline_docker_slow",
                container_image=docker_image) as (
                    workspace,
                    orig_pipeline,
                ):
            with start_daemon():
                external_pipeline = ReOriginatedExternalPipelineForTest(
                    orig_pipeline, container_image=docker_image)

                run = instance.create_run_for_pipeline(
                    pipeline_def=recon_pipeline.get_definition(),
                    run_config=run_config,
                    external_pipeline_origin=external_pipeline.
                    get_external_origin(),
                    pipeline_code_origin=external_pipeline.get_python_origin(),
                )

                with log_run_events(instance, run.run_id):

                    instance.launch_run(run.run_id, workspace)

                    start_time = time.time()
                    while time.time() - start_time < 60:
                        run = instance.get_run_by_id(run.run_id)
                        if run.status == PipelineRunStatus.STARTED:
                            break
                        assert run.status == PipelineRunStatus.STARTING
                        time.sleep(1)

                    time.sleep(3)

                    instance.run_launcher._get_container(  # pylint:disable=protected-access
                        instance.get_run_by_id(run.run_id)).stop()

                    # daemon resumes the run
                    poll_for_finished_run(instance, run.run_id, timeout=90)
                    assert instance.get_run_by_id(
                        run.run_id).status == PipelineRunStatus.SUCCESS
Esempio n. 15
0
def test_demo_configuration_schema_pipeline_runtime_error():
    with pytest.raises(TypeError):
        execute_pipeline(
            define_demo_configuration_schema_pipeline(),
            load_yaml_from_path(intro_tutorial_path('configuration_schemas_runtime_error.yml')),
        )