def test_airflow_execution_date_tags_job(): job_name = "demo_airflow_execution_date_job" recon_repo = ReconstructableRepository.for_module( "dagster_test.test_project.test_pipelines.repo", job_name ) environments_path = get_test_project_environments_path() environment_yaml = [ os.path.join(environments_path, "env_filesystem.yaml"), ] run_config = load_yaml_from_glob_list(environment_yaml) execution_date = timezone.utcnow() dag, tasks = make_airflow_dag_for_recon_repo(recon_repo, job_name, run_config) results = execute_tasks_in_dag( dag, tasks, run_id=make_new_run_id(), execution_date=execution_date ) materialized_airflow_execution_date = None for result in results.values(): for event in result: if event.event_type_value == "ASSET_MATERIALIZATION": materialization = event.event_specific_data.materialization materialization_entry = materialization.metadata_entries[0] materialized_airflow_execution_date = materialization_entry.entry_data.text assert execution_date.isoformat() == materialized_airflow_execution_date
def _pipeline_fn( recon_repo, pipeline_name, run_config=None, environment_yaml=None, op_kwargs=None, mode=None, execution_date=timezone.utcnow(), ): if run_config is None and environment_yaml is not None: run_config = load_yaml_from_glob_list(environment_yaml) dag, tasks = make_airflow_dag_for_recon_repo(recon_repo, pipeline_name, run_config, mode=mode, op_kwargs=op_kwargs) assert isinstance(dag, DAG) for task in tasks: assert isinstance(task, PythonOperator) return execute_tasks_in_dag(dag, tasks, run_id=make_new_run_id(), execution_date=execution_date)
def test_error_dag_python(): # pylint: disable=redefined-outer-name pipeline_name = 'demo_error_pipeline' recon_repo = ReconstructableRepository.for_module('test_pipelines.repo', pipeline_name) environments_path = test_project_environments_path() environment_yaml = [ os.path.join(environments_path, 'env_filesystem.yaml'), ] run_config = load_yaml_from_glob_list(environment_yaml) execution_date = timezone.utcnow() dag, tasks = make_airflow_dag_for_recon_repo(recon_repo, pipeline_name, run_config) with pytest.raises(AirflowException) as exc_info: execute_tasks_in_dag(dag, tasks, run_id=make_new_run_id(), execution_date=execution_date) assert 'Exception: Unusual error' in str(exc_info.value)
def test_error_dag_python_job(): job_name = "demo_error_job" recon_repo = ReconstructableRepository.for_module( "dagster_test.test_project.test_pipelines.repo", job_name ) environments_path = get_test_project_environments_path() environment_yaml = [ os.path.join(environments_path, "env_filesystem.yaml"), ] run_config = load_yaml_from_glob_list(environment_yaml) execution_date = timezone.utcnow() dag, tasks = make_airflow_dag_for_recon_repo(recon_repo, job_name, run_config) with pytest.raises(AirflowException) as exc_info: execute_tasks_in_dag(dag, tasks, run_id=make_new_run_id(), execution_date=execution_date) assert "Exception: Unusual error" in str(exc_info.value)