예제 #1
0
def test_dag_import():
    """Test that the DAG file can be successfully imported.
    This tests that the DAG can be parsed, but does not run it in an Airflow
    environment. This is a recommended confidence check by the official Airflow
    docs: https://airflow.incubator.apache.org/tutorial.html#testing
    """
    from . import simple as module
    internal_unit_testing.assert_has_valid_dag(module)
예제 #2
0
def test_dag_with_variables(set_variables):

    # Importing the module verifies that there are no syntax errors.
    from . import unit_testing_variables as module

    # The assert_has_valid_dag verifies that the module contains an Airflow DAG
    # and that the DAG contains no cycles.
    internal_unit_testing.assert_has_valid_dag(module)
def test_dag_import():
    """Test that the DAG file can be successfully imported.

    This tests that the DAG can be parsed, but does not run it in an Airflow
    environment. This is a recommended confidence check by the official Airflow
    docs: https://airflow.incubator.apache.org/tutorial.html#testing
    """
    models.Variable.set("project_id", "example-project")
    from . import dataproc_workflow_template_instantiate_operator_tutorial as module

    internal_unit_testing.assert_has_valid_dag(module)
def test_dag_import():
    """Test that the DAG file can be successfully imported.

    This tests that the DAG can be parsed, but does not run it in an Airflow
    environment. This is a recommended confidence check by the official Airflow
    docs: https://airflow.incubator.apache.org/tutorial.html#testing
    """
    models.Variable.set('gcs_bucket', 'example_bucket')
    models.Variable.set('gcp_project', 'example-project')
    models.Variable.set('gce_zone', 'us-central1-f')
    from . import hadoop_tutorial as module
    internal_unit_testing.assert_has_valid_dag(module)
예제 #5
0
def test_dag_with_variables():
    from airflow import models

    # Set any Airflow variables before importing the DAG module.
    models.Variable.set('gcp_project', 'example-project')

    # Importing the module verifies that there are no syntax errors.
    from . import unit_testing_variables as module

    # The assert_has_valid_dag verifies that the module contains an Airflow DAG
    # and that the DAG contains no cycles.
    internal_unit_testing.assert_has_valid_dag(module)
def test_dag_import():
    """Test that the DAG file can be successfully imported.

    This tests that the DAG can be parsed, but does not run it in an Airflow
    environment. This is a recommended confidence check by the official Airflow
    docs: https://airflow.incubator.apache.org/tutorial.html#testing
    """
    models.Variable.set("bucket_path", "gs://example_bucket")
    models.Variable.set("project_id", "example-project")
    models.Variable.set("gce_zone", "us-central1-f")
    models.Variable.set("gce_region", "us-central1-f")
    from . import dataflowtemplateoperator_tutorial as module

    internal_unit_testing.assert_has_valid_dag(module)
예제 #7
0
def test_dag_import():
    from . import example_dag

    internal_unit_testing.assert_has_valid_dag(example_dag)
예제 #8
0
def test_dag_has_cycle():
    from . import unit_testing_cycle as module
    with pytest.raises(exceptions.AirflowDagCycleException):
        internal_unit_testing.assert_has_valid_dag(module)
예제 #9
0
def test_dag_no_dag():
    import internal_unit_testing as module  # Does not contain a DAG.
    with pytest.raises(AssertionError):
        internal_unit_testing.assert_has_valid_dag(module)
예제 #10
0
def test_dag_import():
    from . import data_orchestration_blog_sample_dag

    internal_unit_testing.assert_has_valid_dag(
        data_orchestration_blog_sample_dag)
예제 #11
0
def test_dag_import():
    models.Variable.set('gcp_project', PROJECT_ID)
    from . import example_dag
    internal_unit_testing.assert_has_valid_dag(example_dag)