Exemplo n.º 1
0
def test_error_dag_k8s():
    pipeline_name = 'demo_error_pipeline'
    handle = ExecutionTargetHandle.for_pipeline_module(
        'dagster_airflow_tests.test_project.dagster_airflow_demo',
        pipeline_name)
    environment_yaml = [
        script_relative_path('test_project/env_s3.yaml'),
    ]
    environment_dict = load_yaml_from_glob_list(environment_yaml)

    run_id = str(uuid.uuid4())
    execution_date = timezone.utcnow()

    dag, tasks = make_airflow_dag_kubernetized_for_handle(
        handle=handle,
        pipeline_name=pipeline_name,
        image=IMAGE,
        namespace='default',
        environment_dict=environment_dict,
    )

    with pytest.raises(AirflowException) as exc_info:
        execute_tasks_in_dag(dag, tasks, run_id, execution_date)

    assert 'Exception: Unusual error' in str(exc_info.value)
Exemplo n.º 2
0
def test_error_dag_k8s(dagster_docker_image, cluster_provider):
    print('--- :airflow: test_kubernetes.test_error_dag_k8s')
    _check_aws_creds_available()

    pipeline_name = 'demo_error_pipeline'
    handle = ReconstructableRepository.for_module('test_pipelines.repo',
                                                  pipeline_name)
    environments_path = test_project_environments_path()
    environment_yaml = [
        os.path.join(environments_path, 'env_s3.yaml'),
    ]
    environment_dict = load_yaml_from_glob_list(environment_yaml)

    run_id = make_new_run_id()
    execution_date = timezone.utcnow()

    dag, tasks = make_airflow_dag_kubernetized_for_handle(
        handle=handle,
        pipeline_name=pipeline_name,
        image=dagster_docker_image,
        namespace='default',
        environment_dict=environment_dict,
        op_kwargs={
            'config_file': os.environ['KUBECONFIG'],
            'env_vars': {
                'AWS_ACCESS_KEY_ID': os.environ['AWS_ACCESS_KEY_ID'],
                'AWS_SECRET_ACCESS_KEY': os.environ['AWS_SECRET_ACCESS_KEY'],
            },
        },
    )

    with pytest.raises(AirflowException) as exc_info:
        execute_tasks_in_dag(dag, tasks, run_id, execution_date)

    assert 'Exception: Unusual error' in str(exc_info.value)
Exemplo n.º 3
0
def test_error_dag_k8s(dagster_docker_image, cluster_provider):
    _check_aws_creds_available()

    pipeline_name = "demo_error_pipeline"
    recon_repo = ReconstructableRepository.for_module(
        "dagster_test.test_project.test_pipelines.repo",
        "define_demo_execution_repo")
    environments_path = test_project_environments_path()
    environment_yaml = [
        os.path.join(environments_path, "env_s3.yaml"),
    ]
    run_config = load_yaml_from_glob_list(environment_yaml)

    run_id = make_new_run_id()
    execution_date = timezone.utcnow()

    dag, tasks = make_airflow_dag_kubernetized_for_recon_repo(
        recon_repo=recon_repo,
        pipeline_name=pipeline_name,
        image=dagster_docker_image,
        namespace="default",
        run_config=run_config,
        op_kwargs={
            "config_file": os.environ["KUBECONFIG"],
            "env_vars": {
                "AWS_ACCESS_KEY_ID": os.environ["AWS_ACCESS_KEY_ID"],
                "AWS_SECRET_ACCESS_KEY": os.environ["AWS_SECRET_ACCESS_KEY"],
            },
        },
    )

    with pytest.raises(AirflowException) as exc_info:
        execute_tasks_in_dag(dag, tasks, run_id, execution_date)

    assert "Exception: Unusual error" in str(exc_info.value)
Exemplo n.º 4
0
def test_error_dag_k8s(
    dagster_docker_image, environments_path
):  # pylint: disable=redefined-outer-name
    pipeline_name = 'demo_error_pipeline'
    handle = ExecutionTargetHandle.for_pipeline_module('test_pipelines', pipeline_name)
    environment_yaml = [
        os.path.join(environments_path, 'env_s3.yaml'),
    ]
    environment_dict = load_yaml_from_glob_list(environment_yaml)

    run_id = make_new_run_id()
    execution_date = timezone.utcnow()

    dag, tasks = make_airflow_dag_kubernetized_for_handle(
        handle=handle,
        pipeline_name=pipeline_name,
        image=dagster_docker_image,
        namespace='default',
        environment_dict=environment_dict,
    )

    with pytest.raises(AirflowException) as exc_info:
        execute_tasks_in_dag(dag, tasks, run_id, execution_date)

    assert 'Exception: Unusual error' in str(exc_info.value)
Exemplo n.º 5
0
def test_error_dag_python(environments_path):  # pylint: disable=redefined-outer-name
    pipeline_name = 'demo_error_pipeline'
    handle = ExecutionTargetHandle.for_pipeline_module('test_pipelines', pipeline_name)
    environment_yaml = [
        os.path.join(environments_path, 'env_filesystem.yaml'),
    ]
    environment_dict = load_yaml_from_glob_list(environment_yaml)
    execution_date = timezone.utcnow()

    dag, tasks = make_airflow_dag_for_handle(handle, pipeline_name, environment_dict)

    with pytest.raises(AirflowException) as exc_info:
        execute_tasks_in_dag(dag, tasks, run_id=str(uuid.uuid4()), execution_date=execution_date)

    assert 'Exception: Unusual error' in str(exc_info.value)
Exemplo n.º 6
0
def test_error_dag_python():  # pylint: disable=redefined-outer-name
    pipeline_name = 'demo_error_pipeline'
    recon_repo = ReconstructableRepository.for_module('test_pipelines.repo', pipeline_name)
    environments_path = test_project_environments_path()
    environment_yaml = [
        os.path.join(environments_path, 'env_filesystem.yaml'),
    ]
    run_config = load_yaml_from_glob_list(environment_yaml)
    execution_date = timezone.utcnow()

    dag, tasks = make_airflow_dag_for_recon_repo(recon_repo, pipeline_name, run_config)

    with pytest.raises(AirflowException) as exc_info:
        execute_tasks_in_dag(dag, tasks, run_id=make_new_run_id(), execution_date=execution_date)

    assert 'Exception: Unusual error' in str(exc_info.value)
Exemplo n.º 7
0
def test_airflow_execution_date_tags_containerized(dagster_docker_image, ):  # pylint: disable=redefined-outer-name, unused-argument
    pipeline_name = "demo_airflow_execution_date_pipeline"
    recon_repo = ReconstructableRepository.for_module(
        "dagster_test.test_project.test_pipelines.repo",
        "define_demo_execution_repo")
    environments_path = test_project_environments_path()
    environment_yaml = [
        os.path.join(environments_path, "env_s3.yaml"),
    ]
    run_config = load_yaml_from_glob_list(environment_yaml)

    execution_date = timezone.utcnow()

    dag, tasks = make_airflow_dag_containerized_for_recon_repo(
        recon_repo, pipeline_name, dagster_docker_image, run_config)

    results = execute_tasks_in_dag(dag,
                                   tasks,
                                   run_id=make_new_run_id(),
                                   execution_date=execution_date)

    materialized_airflow_execution_date = None
    for result in results.values():
        for event in result:
            if event.event_type_value == "STEP_MATERIALIZATION":
                materialization = event.event_specific_data.materialization
                materialization_entry = materialization.metadata_entries[0]
                materialized_airflow_execution_date = materialization_entry.entry_data.text

    assert execution_date.isoformat() == materialized_airflow_execution_date
Exemplo n.º 8
0
def test_airflow_execution_date_tags():
    pipeline_name = 'demo_airflow_execution_date_pipeline'
    recon_repo = ReconstructableRepository.for_module(
        'dagster_test.test_project.test_pipelines.repo', pipeline_name)
    environments_path = test_project_environments_path()
    environment_yaml = [
        os.path.join(environments_path, 'env_filesystem.yaml'),
    ]
    run_config = load_yaml_from_glob_list(environment_yaml)
    execution_date = timezone.utcnow()

    dag, tasks = make_airflow_dag_for_recon_repo(recon_repo, pipeline_name,
                                                 run_config)

    results = execute_tasks_in_dag(dag,
                                   tasks,
                                   run_id=make_new_run_id(),
                                   execution_date=execution_date)

    materialized_airflow_execution_date = None
    for result in results.values():
        for event in result:
            if event.event_type_value == 'STEP_MATERIALIZATION':
                materialization = event.event_specific_data.materialization
                materialization_entry = materialization.metadata_entries[0]
                materialized_airflow_execution_date = materialization_entry.entry_data.text

    assert execution_date.isoformat() == materialized_airflow_execution_date
Exemplo n.º 9
0
def test_error_dag_containerized(dagster_docker_image):  # pylint: disable=redefined-outer-name
    pipeline_name = 'demo_error_pipeline'
    handle = ReconstructableRepository.for_module('test_pipelines.repo',
                                                  pipeline_name)
    environments_path = test_project_environments_path()
    environment_yaml = [
        os.path.join(environments_path, 'env_s3.yaml'),
    ]
    environment_dict = load_yaml_from_glob_list(environment_yaml)

    run_id = make_new_run_id()
    execution_date = timezone.utcnow()

    dag, tasks = make_airflow_dag_containerized_for_handle(
        handle, pipeline_name, dagster_docker_image, environment_dict)

    with pytest.raises(AirflowException) as exc_info:
        execute_tasks_in_dag(dag, tasks, run_id, execution_date)

    assert 'Exception: Unusual error' in str(exc_info.value)
Exemplo n.º 10
0
def test_error_dag_containerized(dagster_docker_image):  # pylint: disable=redefined-outer-name
    pipeline_name = "demo_error_pipeline"
    recon_repo = ReconstructableRepository.for_module(
        "dagster_test.test_project.test_pipelines.repo",
        "define_demo_execution_repo")
    environments_path = test_project_environments_path()
    environment_yaml = [
        os.path.join(environments_path, "env_s3.yaml"),
    ]
    run_config = load_yaml_from_glob_list(environment_yaml)

    run_id = make_new_run_id()
    execution_date = timezone.utcnow()

    dag, tasks = make_airflow_dag_containerized_for_recon_repo(
        recon_repo, pipeline_name, dagster_docker_image, run_config)

    with pytest.raises(AirflowException) as exc_info:
        execute_tasks_in_dag(dag, tasks, run_id, execution_date)

    assert "Exception: Unusual error" in str(exc_info.value)
Exemplo n.º 11
0
def test_airflow_execution_date_tags_k8s(dagster_docker_image,
                                         cluster_provider):
    _check_aws_creds_available()

    pipeline_name = "demo_airflow_execution_date_pipeline"
    recon_repo = ReconstructableRepository.for_module(
        "dagster_test.test_project.test_pipelines.repo",
        "define_demo_execution_repo")
    environments_path = test_project_environments_path()
    environment_yaml = [
        os.path.join(environments_path, "env_s3.yaml"),
    ]
    run_config = load_yaml_from_glob_list(environment_yaml)

    execution_date = timezone.utcnow()

    dag, tasks = make_airflow_dag_kubernetized_for_recon_repo(
        recon_repo=recon_repo,
        pipeline_name=pipeline_name,
        image=dagster_docker_image,
        namespace="default",
        run_config=run_config,
        op_kwargs={
            "config_file": os.environ["KUBECONFIG"],
            "env_vars": {
                "AWS_ACCESS_KEY_ID": os.environ["AWS_ACCESS_KEY_ID"],
                "AWS_SECRET_ACCESS_KEY": os.environ["AWS_SECRET_ACCESS_KEY"],
            },
        },
    )

    results = execute_tasks_in_dag(dag,
                                   tasks,
                                   run_id=make_new_run_id(),
                                   execution_date=execution_date)

    materialized_airflow_execution_date = None
    for result in results.values():
        for event in result:
            if event.event_type_value == "STEP_MATERIALIZATION":
                materialization = event.event_specific_data.materialization
                materialization_entry = materialization.metadata_entries[0]
                materialized_airflow_execution_date = materialization_entry.entry_data.text

    assert execution_date.isoformat() == materialized_airflow_execution_date