コード例 #1
0
def test_run_always_finishes():  # pylint: disable=redefined-outer-name
    with seven.TemporaryDirectory() as temp_dir:
        instance = DagsterInstance.local_temp(
            temp_dir,
            overrides={
                "run_launcher": {
                    "module": "dagster.core.launcher.grpc_run_launcher",
                    "class": "GrpcRunLauncher",
                }
            },
        )

        pipeline_run = instance.create_run_for_pipeline(
            pipeline_def=slow_pipeline, run_config=None)
        run_id = pipeline_run.run_id

        recon_repo = ReconstructableRepository.for_file(__file__, "nope")
        loadable_target_origin = LoadableTargetOrigin.from_python_origin(
            recon_repo.get_origin())

        server_process = GrpcServerProcess(
            loadable_target_origin=loadable_target_origin, max_workers=4)
        with server_process.create_ephemeral_client() as api_client:
            repository_location = GrpcServerRepositoryLocation(
                RepositoryLocationHandle.create_grpc_server_location(
                    location_name="test",
                    port=api_client.port,
                    socket=api_client.socket,
                    host=api_client.host,
                ))

            external_pipeline = repository_location.get_repository(
                "nope").get_full_external_pipeline("slow_pipeline")

            assert instance.get_run_by_id(
                run_id).status == PipelineRunStatus.NOT_STARTED

            launcher = instance.run_launcher
            launcher.launch_run(instance=instance,
                                run=pipeline_run,
                                external_pipeline=external_pipeline)

        # Server process now receives shutdown event, run has not finished yet
        pipeline_run = instance.get_run_by_id(run_id)
        assert not pipeline_run.is_finished
        assert server_process.server_process.poll() is None

        # Server should wait until run finishes, then shutdown
        pipeline_run = poll_for_run(instance, run_id)
        assert pipeline_run.status == PipelineRunStatus.SUCCESS

        start_time = time.time()
        while server_process.server_process.poll() is None:
            time.sleep(0.05)
            # Verify server process cleans up eventually
            assert time.time() - start_time < 5

        server_process.wait()
コード例 #2
0
def test_execute_run_api_grpc_python_handle(pipeline_handle):
    with seven.TemporaryDirectory() as temp_dir:
        instance = DagsterInstance.local_temp(temp_dir)
        pipeline_run = instance.create_run(
            pipeline_name="foo",
            run_id=None,
            run_config={},
            mode="default",
            solids_to_execute=None,
            step_keys_to_execute=None,
            status=None,
            tags=None,
            root_run_id=None,
            parent_run_id=None,
            pipeline_snapshot=None,
            execution_plan_snapshot=None,
            parent_pipeline_snapshot=None,
        )

        loadable_target_origin = LoadableTargetOrigin.from_python_origin(
            pipeline_handle.get_origin().repository_origin
        )

        with GrpcServerProcess(
            loadable_target_origin, max_workers=2
        ).create_ephemeral_client() as api_client:
            events = [
                event
                for event in sync_execute_run_grpc(
                    api_client=api_client,
                    instance_ref=instance.get_ref(),
                    pipeline_origin=pipeline_handle.get_origin(),
                    pipeline_run=pipeline_run,
                )
            ]

            assert len(events) == 14
            assert [event.event_type_value for event in events] == [
                "ENGINE_EVENT",
                "ENGINE_EVENT",
                "PIPELINE_START",
                "ENGINE_EVENT",
                "STEP_START",
                "STEP_OUTPUT",
                "STEP_SUCCESS",
                "STEP_START",
                "STEP_INPUT",
                "STEP_OUTPUT",
                "STEP_SUCCESS",
                "ENGINE_EVENT",
                "PIPELINE_SUCCESS",
                "ENGINE_EVENT",
            ]
コード例 #3
0
        def _mgr_fn(recon_repo):
            """Goes out of process via grpc"""
            check.inst_param(recon_repo, "recon_repo",
                             ReconstructableRepository)

            loadable_target_origin = LoadableTargetOrigin.from_python_origin(
                recon_repo.get_origin())
            with Workspace([
                    RepositoryLocationHandle.
                    create_process_bound_grpc_server_location(
                        loadable_target_origin=loadable_target_origin,
                        location_name="test",
                    )
            ]) as workspace:
                yield workspace
コード例 #4
0
        def _mgr_fn(recon_repo):
            """Goes out of process but same process as host process"""
            check.inst_param(recon_repo, "recon_repo",
                             ReconstructableRepository)

            # this is "ok" because we know the test host process containers the user code
            loadable_target_origin = LoadableTargetOrigin.from_python_origin(
                recon_repo.get_origin())
            with Workspace([
                    RepositoryLocationHandle.create_python_env_location(
                        loadable_target_origin=loadable_target_origin,
                        location_name="test",
                    )
            ]) as workspace:
                yield workspace
コード例 #5
0
def get_external_pipeline_from_grpc_server_repository(pipeline_name):
    repo_yaml = file_relative_path(__file__, "repo.yaml")
    recon_repo = ReconstructableRepository.from_legacy_repository_yaml(
        repo_yaml)
    loadable_target_origin = LoadableTargetOrigin.from_python_origin(
        recon_repo.get_origin())
    server_process = GrpcServerProcess(
        loadable_target_origin=loadable_target_origin)

    try:
        with server_process.create_ephemeral_client() as api_client:
            repository_location = GrpcServerRepositoryLocation(
                RepositoryLocationHandle.create_grpc_server_location(
                    location_name="test",
                    port=api_client.port,
                    socket=api_client.socket,
                    host=api_client.host,
                ))

            yield repository_location.get_repository(
                "nope").get_full_external_pipeline(pipeline_name)
    finally:
        server_process.wait()
コード例 #6
0
        def _mgr_fn(recon_repo):
            check.inst_param(recon_repo, "recon_repo",
                             ReconstructableRepository)

            loadable_target_origin = LoadableTargetOrigin.from_python_origin(
                recon_repo.get_origin())

            server_process = GrpcServerProcess(
                loadable_target_origin=loadable_target_origin)
            try:
                with server_process.create_ephemeral_client() as api_client:
                    with Workspace([
                            RepositoryLocationHandle.
                            create_grpc_server_location(
                                port=api_client.port,
                                socket=api_client.socket,
                                host=api_client.host,
                                location_name="test",
                            )
                    ]) as workspace:
                        yield workspace
            finally:
                server_process.wait()