def test_s3_storage(dagster_airflow_k8s_operator_pipeline, dagster_docker_image, cluster_provider): print('--- :airflow: test_kubernetes.test_s3_storage') _check_aws_creds_available() environments_path = test_project_environments_path() pipeline_name = 'demo_pipeline' results = dagster_airflow_k8s_operator_pipeline( pipeline_name=pipeline_name, recon_repo=ReconstructableRepository.for_module( 'dagster_test.test_project.test_pipelines.repo', 'define_demo_execution_repo', ), environment_yaml=[ os.path.join(environments_path, 'env.yaml'), os.path.join(environments_path, 'env_s3.yaml'), ], image=dagster_docker_image, op_kwargs={ 'config_file': os.environ['KUBECONFIG'], 'env_vars': { 'AWS_ACCESS_KEY_ID': os.environ['AWS_ACCESS_KEY_ID'], 'AWS_SECRET_ACCESS_KEY': os.environ['AWS_SECRET_ACCESS_KEY'], }, }, ) validate_pipeline_execution(results)
def test_s3_storage(dagster_airflow_k8s_operator_pipeline, dagster_docker_image, cluster_provider): _check_aws_creds_available() environments_path = test_project_environments_path() pipeline_name = "demo_pipeline" results = dagster_airflow_k8s_operator_pipeline( pipeline_name=pipeline_name, recon_repo=ReconstructableRepository.for_module( "dagster_test.test_project.test_pipelines.repo", "define_demo_execution_repo", ), environment_yaml=[ os.path.join(environments_path, "env.yaml"), os.path.join(environments_path, "env_s3.yaml"), ], image=dagster_docker_image, op_kwargs={ "config_file": os.environ["KUBECONFIG"], "env_vars": { "AWS_ACCESS_KEY_ID": os.environ["AWS_ACCESS_KEY_ID"], "AWS_SECRET_ACCESS_KEY": os.environ["AWS_SECRET_ACCESS_KEY"], }, }, ) validate_pipeline_execution(results)
def test_gcs_storage( dagster_airflow_k8s_operator_pipeline, dagster_docker_image, environments_path, ): # pylint: disable=redefined-outer-name pipeline_name = 'demo_pipeline_gcs' results = dagster_airflow_k8s_operator_pipeline( pipeline_name=pipeline_name, handle=ExecutionTargetHandle.for_pipeline_module('test_pipelines', pipeline_name), environment_yaml=[ os.path.join(environments_path, 'env.yaml'), os.path.join(environments_path, 'env_gcs.yaml'), ], image=dagster_docker_image, ) validate_pipeline_execution(results)
def test_gcs_storage( dagster_airflow_k8s_operator_pipeline, dagster_docker_image, cluster_provider, ): print('--- :airflow: test_kubernetes.test_gcs_storage') environments_path = test_project_environments_path() pipeline_name = 'demo_pipeline_gcs' results = dagster_airflow_k8s_operator_pipeline( pipeline_name=pipeline_name, handle=ReconstructableRepository.for_module('test_pipelines.repo', pipeline_name), environment_yaml=[ os.path.join(environments_path, 'env.yaml'), os.path.join(environments_path, 'env_gcs.yaml'), ], image=dagster_docker_image, op_kwargs={'config_file': os.environ['KUBECONFIG']}, ) validate_pipeline_execution(results)
def test_gcs_storage( dagster_airflow_k8s_operator_pipeline, dagster_docker_image, cluster_provider, ): environments_path = test_project_environments_path() pipeline_name = "demo_pipeline_gcs" results = dagster_airflow_k8s_operator_pipeline( pipeline_name=pipeline_name, recon_repo=ReconstructableRepository.for_module( "dagster_test.test_project.test_pipelines.repo", "define_demo_execution_repo", ), environment_yaml=[ os.path.join(environments_path, "env.yaml"), os.path.join(environments_path, "env_gcs.yaml"), ], image=dagster_docker_image, op_kwargs={"config_file": os.environ["KUBECONFIG"]}, ) validate_pipeline_execution(results)