def test_error_dag_containerized(dagster_docker_image): # pylint: disable=redefined-outer-name pipeline_name = "demo_error_pipeline_s3" recon_repo = ReconstructableRepository.for_module( "dagster_test.test_project.test_pipelines.repo", "define_demo_execution_repo") environments_path = get_test_project_environments_path() environment_yaml = [ os.path.join(environments_path, "env_s3.yaml"), ] run_config = load_yaml_from_glob_list(environment_yaml) run_id = make_new_run_id() execution_date = timezone.utcnow() with postgres_instance() as instance: dag, tasks = make_airflow_dag_containerized_for_recon_repo( recon_repo, pipeline_name, dagster_docker_image, run_config, instance=instance, op_kwargs={"network_mode": "container:test-postgres-db-airflow"}, ) with pytest.raises(AirflowException) as exc_info: execute_tasks_in_dag(dag, tasks, run_id, execution_date) assert "Exception: Unusual error" in str(exc_info.value)
def test_airflow_execution_date_tags_job(): job_name = "demo_airflow_execution_date_job" recon_repo = ReconstructableRepository.for_module( "dagster_test.test_project.test_pipelines.repo", job_name ) environments_path = get_test_project_environments_path() environment_yaml = [ os.path.join(environments_path, "env_filesystem.yaml"), ] run_config = load_yaml_from_glob_list(environment_yaml) execution_date = timezone.utcnow() dag, tasks = make_airflow_dag_for_recon_repo(recon_repo, job_name, run_config) results = execute_tasks_in_dag( dag, tasks, run_id=make_new_run_id(), execution_date=execution_date ) materialized_airflow_execution_date = None for result in results.values(): for event in result: if event.event_type_value == "ASSET_MATERIALIZATION": materialization = event.event_specific_data.materialization materialization_entry = materialization.metadata_entries[0] materialized_airflow_execution_date = materialization_entry.entry_data.text assert execution_date.isoformat() == materialized_airflow_execution_date
def test_error_dag_python_job(): job_name = "demo_error_job" recon_repo = ReconstructableRepository.for_module( "dagster_test.test_project.test_pipelines.repo", job_name ) environments_path = get_test_project_environments_path() environment_yaml = [ os.path.join(environments_path, "env_filesystem.yaml"), ] run_config = load_yaml_from_glob_list(environment_yaml) execution_date = timezone.utcnow() dag, tasks = make_airflow_dag_for_recon_repo(recon_repo, job_name, run_config) with pytest.raises(AirflowException) as exc_info: execute_tasks_in_dag(dag, tasks, run_id=make_new_run_id(), execution_date=execution_date) assert "Exception: Unusual error" in str(exc_info.value)
def test_airflow_execution_date_tags_containerized(dagster_docker_image, ): # pylint: disable=redefined-outer-name, unused-argument pipeline_name = "demo_airflow_execution_date_pipeline_s3" recon_repo = ReconstructableRepository.for_module( "dagster_test.test_project.test_pipelines.repo", "define_demo_execution_repo") environments_path = get_test_project_environments_path() environment_yaml = [ os.path.join(environments_path, "env_s3.yaml"), ] run_config = load_yaml_from_glob_list(environment_yaml) execution_date = timezone.utcnow() with postgres_instance() as instance: dag, tasks = make_airflow_dag_containerized_for_recon_repo( recon_repo, pipeline_name, dagster_docker_image, run_config, instance=instance, op_kwargs={"network_mode": "container:test-postgres-db-airflow"}, ) results = execute_tasks_in_dag(dag, tasks, run_id=make_new_run_id(), execution_date=execution_date) materialized_airflow_execution_date = None for result in results.values(): for event in result: if event.event_type_value == "ASSET_MATERIALIZATION": materialization = event.event_specific_data.materialization materialization_entry = materialization.metadata_entries[0] materialized_airflow_execution_date = materialization_entry.entry_data.text assert execution_date.isoformat( ) == materialized_airflow_execution_date