def test_k8s_run_launcher_default( dagster_instance_for_k8s_run_launcher, helm_namespace_for_k8s_run_launcher, dagster_docker_image ): # sanity check that we have a K8sRunLauncher check.inst(dagster_instance_for_k8s_run_launcher.run_launcher, K8sRunLauncher) pods = DagsterKubernetesClient.production_client().core_api.list_namespaced_pod( namespace=helm_namespace_for_k8s_run_launcher ) celery_pod_names = [p.metadata.name for p in pods.items if "celery-workers" in p.metadata.name] check.invariant(not celery_pod_names) run_config = merge_dicts( load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env.yaml")), load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env_s3.yaml")), { "execution": { "k8s": { "config": { "job_namespace": helm_namespace_for_k8s_run_launcher, "job_image": dagster_docker_image, "image_pull_policy": image_pull_policy(), "env_config_maps": ["dagster-pipeline-env"] + ([TEST_AWS_CONFIGMAP_NAME] if not IS_BUILDKITE else []), } } }, }, ) pipeline_name = "demo_k8s_executor_pipeline" tags = {"key": "value"} with get_test_project_location_and_external_pipeline(pipeline_name) as ( location, external_pipeline, ): run = create_run_for_test( dagster_instance_for_k8s_run_launcher, pipeline_name=pipeline_name, run_config=run_config, tags=tags, mode="default", pipeline_snapshot=external_pipeline.pipeline_snapshot, execution_plan_snapshot=location.get_external_execution_plan( external_pipeline, run_config, "default", None, None ).execution_plan_snapshot, ) dagster_instance_for_k8s_run_launcher.launch_run( run.run_id, ReOriginatedExternalPipelineForTest(external_pipeline), ) result = wait_for_job_and_get_raw_logs( job_name="dagster-run-%s" % run.run_id, namespace=helm_namespace_for_k8s_run_launcher ) assert "PIPELINE_SUCCESS" in result, "no match, result: {}".format(result) updated_run = dagster_instance_for_k8s_run_launcher.get_run_by_id(run.run_id) assert updated_run.tags[DOCKER_IMAGE_TAG] == get_test_project_docker_image()
def test_k8s_run_launcher_with_celery_executor_fails( dagster_docker_image, dagster_instance_for_k8s_run_launcher, helm_namespace_for_k8s_run_launcher): run_config = merge_dicts( merge_yamls([ os.path.join(get_test_project_environments_path(), "env.yaml"), os.path.join(get_test_project_environments_path(), "env_s3.yaml"), ]), get_celery_engine_config( dagster_docker_image=dagster_docker_image, job_namespace=helm_namespace_for_k8s_run_launcher, ), ) pipeline_name = "demo_pipeline_celery" with get_test_project_location_and_external_pipeline(pipeline_name) as ( location, external_pipeline, ): run = create_run_for_test( dagster_instance_for_k8s_run_launcher, pipeline_name=pipeline_name, run_config=run_config, mode="default", pipeline_snapshot=external_pipeline.pipeline_snapshot, execution_plan_snapshot=location.get_external_execution_plan( external_pipeline, run_config, "default", None, None).execution_plan_snapshot, ) dagster_instance_for_k8s_run_launcher.launch_run( run.run_id, ReOriginatedExternalPipelineForTest(external_pipeline), ) timeout = datetime.timedelta(0, 120) found_pipeline_failure = False start_time = datetime.datetime.now() while datetime.datetime.now() < start_time + timeout: event_records = dagster_instance_for_k8s_run_launcher.all_logs( run.run_id) for event_record in event_records: if event_record.dagster_event: if (event_record.dagster_event.event_type == DagsterEventType.PIPELINE_INIT_FAILURE): found_pipeline_failure = True if found_pipeline_failure: break time.sleep(5) assert found_pipeline_failure assert (dagster_instance_for_k8s_run_launcher.get_run_by_id( run.run_id).status == PipelineRunStatus.FAILURE)
def test_k8s_run_launcher_terminate(dagster_instance_for_k8s_run_launcher, helm_namespace_for_k8s_run_launcher): pipeline_name = "slow_pipeline" tags = {"key": "value"} with get_test_project_location_and_external_pipeline(pipeline_name) as ( location, external_pipeline, ): run = create_run_for_test( dagster_instance_for_k8s_run_launcher, pipeline_name=pipeline_name, run_config=None, tags=tags, mode="default", pipeline_snapshot=external_pipeline.pipeline_snapshot, execution_plan_snapshot=location.get_external_execution_plan( external_pipeline, {}, "default", None, None).execution_plan_snapshot, ) dagster_instance_for_k8s_run_launcher.launch_run( run.run_id, ReOriginatedExternalPipelineForTest(external_pipeline), ) wait_for_job(job_name="dagster-run-%s" % run.run_id, namespace=helm_namespace_for_k8s_run_launcher) timeout = datetime.timedelta(0, 30) start_time = datetime.datetime.now() while datetime.datetime.now() < start_time + timeout: if dagster_instance_for_k8s_run_launcher.run_launcher.can_terminate( run_id=run.run_id): break time.sleep(5) assert dagster_instance_for_k8s_run_launcher.run_launcher.can_terminate( run_id=run.run_id) assert dagster_instance_for_k8s_run_launcher.run_launcher.terminate( run_id=run.run_id) start_time = datetime.datetime.now() pipeline_run = None while datetime.datetime.now() < start_time + timeout: pipeline_run = dagster_instance_for_k8s_run_launcher.get_run_by_id( run.run_id) if pipeline_run.status == PipelineRunStatus.CANCELED: break time.sleep(5) assert pipeline_run.status == PipelineRunStatus.CANCELED assert not dagster_instance_for_k8s_run_launcher.run_launcher.terminate( run_id=run.run_id)
def test_failing_k8s_run_launcher(dagster_instance_for_k8s_run_launcher, helm_namespace_for_k8s_run_launcher): run_config = {"blah blah this is wrong": {}} pipeline_name = "demo_pipeline" with get_test_project_location_and_external_pipeline(pipeline_name) as ( location, external_pipeline, ): run = create_run_for_test( dagster_instance_for_k8s_run_launcher, pipeline_name=pipeline_name, run_config=run_config, pipeline_snapshot=external_pipeline.pipeline_snapshot, execution_plan_snapshot=location.get_external_execution_plan( external_pipeline, { "solids": { "multiply_the_word": { "config": { "factor": 0 }, "inputs": { "word": "..." } } } }, "default", None, None, ).execution_plan_snapshot, ) dagster_instance_for_k8s_run_launcher.launch_run( run.run_id, ReOriginatedExternalPipelineForTest(external_pipeline), ) result = wait_for_job_and_get_raw_logs( job_name="dagster-run-%s" % run.run_id, namespace=helm_namespace_for_k8s_run_launcher) assert "PIPELINE_SUCCESS" not in result, "no match, result: {}".format( result) event_records = dagster_instance_for_k8s_run_launcher.all_logs( run.run_id) assert any([ 'Received unexpected config entry "blah blah this is wrong"' in str(event) for event in event_records ]) assert any([ 'Missing required config entry "solids"' in str(event) for event in event_records ])