def test_s3_storage(dagster_airflow_k8s_operator_pipeline, dagster_docker_image, cluster_provider): print('--- :airflow: test_kubernetes.test_s3_storage') _check_aws_creds_available() environments_path = test_project_environments_path() pipeline_name = 'demo_pipeline' results = dagster_airflow_k8s_operator_pipeline( pipeline_name=pipeline_name, recon_repo=ReconstructableRepository.for_module( 'dagster_test.test_project.test_pipelines.repo', 'define_demo_execution_repo', ), environment_yaml=[ os.path.join(environments_path, 'env.yaml'), os.path.join(environments_path, 'env_s3.yaml'), ], image=dagster_docker_image, op_kwargs={ 'config_file': os.environ['KUBECONFIG'], 'env_vars': { 'AWS_ACCESS_KEY_ID': os.environ['AWS_ACCESS_KEY_ID'], 'AWS_SECRET_ACCESS_KEY': os.environ['AWS_SECRET_ACCESS_KEY'], }, }, ) validate_pipeline_execution(results)
def test_my_custom_operator( dagster_airflow_custom_operator_pipeline, caplog, ): # pylint: disable=redefined-outer-name caplog.set_level(logging.INFO, logger='CustomOperatorLogger') pipeline_name = 'demo_pipeline' operator = CustomOperator environments_path = test_project_environments_path() results = dagster_airflow_custom_operator_pipeline( pipeline_name=pipeline_name, handle=ReconstructableRepository.for_module('test_pipelines.repo', pipeline_name), operator=operator, environment_yaml=[ os.path.join(environments_path, 'env.yaml'), os.path.join(environments_path, 'env_filesystem_no_explicit_base_dir.yaml'), ], ) validate_pipeline_execution(results) log_lines = 0 for record in caplog.records: if record.name == 'CustomOperatorLogger': log_lines += 1 assert record.message == 'CustomOperator is called' assert log_lines == 2
def test_s3_storage(dagster_airflow_k8s_operator_pipeline, dagster_docker_image, cluster_provider): _check_aws_creds_available() environments_path = test_project_environments_path() pipeline_name = "demo_pipeline" results = dagster_airflow_k8s_operator_pipeline( pipeline_name=pipeline_name, recon_repo=ReconstructableRepository.for_module( "dagster_test.test_project.test_pipelines.repo", "define_demo_execution_repo", ), environment_yaml=[ os.path.join(environments_path, "env.yaml"), os.path.join(environments_path, "env_s3.yaml"), ], image=dagster_docker_image, op_kwargs={ "config_file": os.environ["KUBECONFIG"], "env_vars": { "AWS_ACCESS_KEY_ID": os.environ["AWS_ACCESS_KEY_ID"], "AWS_SECRET_ACCESS_KEY": os.environ["AWS_SECRET_ACCESS_KEY"], }, }, ) validate_pipeline_execution(results)
def test_my_custom_operator( dagster_airflow_custom_operator_pipeline, caplog, ): # pylint: disable=redefined-outer-name caplog.set_level(logging.INFO, logger="CustomOperatorLogger") pipeline_name = "demo_pipeline_s3" operator = CustomOperator environments_path = get_test_project_environments_path() results = dagster_airflow_custom_operator_pipeline( pipeline_name=pipeline_name, recon_repo=ReconstructableRepository.for_module( "dagster_test.test_project.test_pipelines.repo", pipeline_name), operator=operator, environment_yaml=[ os.path.join(environments_path, "env.yaml"), os.path.join(environments_path, "env_s3.yaml"), ], ) validate_pipeline_execution(results) log_lines = 0 for record in caplog.records: if record.name == "CustomOperatorLogger": log_lines += 1 assert record.message == "CustomOperator is called" assert log_lines == 2
def test_gcs_storage( dagster_airflow_k8s_operator_pipeline, dagster_docker_image, cluster_provider, ): environments_path = test_project_environments_path() pipeline_name = 'demo_pipeline_gcs' results = dagster_airflow_k8s_operator_pipeline( pipeline_name=pipeline_name, recon_repo=ReconstructableRepository.for_module( 'dagster_test.test_project.test_pipelines.repo', 'define_demo_execution_repo', ), environment_yaml=[ os.path.join(environments_path, 'env.yaml'), os.path.join(environments_path, 'env_gcs.yaml'), ], image=dagster_docker_image, op_kwargs={'config_file': os.environ['KUBECONFIG']}, ) validate_pipeline_execution(results)