예제 #1
0
    def __init__(self, origin: InProcessRepositoryLocationOrigin):
        from dagster.grpc.server import LoadedRepositories

        self._origin = check.inst_param(origin, "origin",
                                        InProcessRepositoryLocationOrigin)

        loadable_target_origin = self._origin.loadable_target_origin
        self._loaded_repositories = LoadedRepositories(
            loadable_target_origin, self._origin.entry_point)

        self._repository_code_pointer_dict = self._loaded_repositories.code_pointers_by_repo_name

        self._recon_repos = {
            repo_name: self._loaded_repositories.get_recon_repo(repo_name)
            for repo_name in self._repository_code_pointer_dict
        }

        self._repositories = {}
        for repo_name in self._repository_code_pointer_dict:
            recon_repo = self._loaded_repositories.get_recon_repo(repo_name)
            repo_def = recon_repo.get_definition()
            self._repositories[repo_name] = external_repo_from_def(
                repo_def,
                RepositoryHandle(repository_name=repo_name,
                                 repository_location=self),
            )
예제 #2
0
def sync_get_external_schedule_execution_data_grpc(
    api_client: "DagsterGrpcClient",
    instance: DagsterInstance,
    repository_handle: RepositoryHandle,
    schedule_name: str,
    scheduled_execution_time: Any,
) -> ScheduleExecutionData:
    check.inst_param(repository_handle, "repository_handle", RepositoryHandle)
    check.str_param(schedule_name, "schedule_name")
    check.opt_inst_param(scheduled_execution_time, "scheduled_execution_time",
                         PendulumDateTime)

    origin = repository_handle.get_external_origin()
    result = deserialize_as(
        api_client.external_schedule_execution(
            external_schedule_execution_args=ExternalScheduleExecutionArgs(
                repository_origin=origin,
                instance_ref=instance.get_ref(),
                schedule_name=schedule_name,
                scheduled_execution_timestamp=scheduled_execution_time.
                timestamp() if scheduled_execution_time else None,
                scheduled_execution_timezone=scheduled_execution_time.timezone.
                name if scheduled_execution_time else None,
            )),
        (ScheduleExecutionData, ExternalScheduleExecutionErrorData),
    )
    if isinstance(result, ExternalScheduleExecutionErrorData):
        raise DagsterUserCodeProcessError.from_error_info(result.error)

    return result
예제 #3
0
def sync_get_external_partition_set_execution_param_data_grpc(
    api_client: "DagsterGrpcClient",
    repository_handle: RepositoryHandle,
    partition_set_name: str,
    partition_names: List[str],
) -> ExternalPartitionSetExecutionParamData:
    from dagster.grpc.client import DagsterGrpcClient

    check.inst_param(api_client, "api_client", DagsterGrpcClient)
    check.inst_param(repository_handle, "repository_handle", RepositoryHandle)
    check.str_param(partition_set_name, "partition_set_name")
    check.list_param(partition_names, "partition_names", of_type=str)

    repository_origin = repository_handle.get_external_origin()

    result = deserialize_as(
        api_client.external_partition_set_execution_params(
            partition_set_execution_param_args=PartitionSetExecutionParamArgs(
                repository_origin=repository_origin,
                partition_set_name=partition_set_name,
                partition_names=partition_names,
            ), ),
        (ExternalPartitionSetExecutionParamData,
         ExternalPartitionExecutionErrorData),
    )
    if isinstance(result, ExternalPartitionExecutionErrorData):
        raise DagsterUserCodeProcessError.from_error_info(result.error)

    return result
예제 #4
0
def sync_get_external_sensor_execution_data_grpc(
    api_client: "DagsterGrpcClient",
    instance: "DagsterInstance",
    repository_handle: RepositoryHandle,
    sensor_name: str,
    last_completion_time: Optional[float],
    last_run_key: Optional[str],
    cursor: Optional[str],
) -> SensorExecutionData:
    check.inst_param(repository_handle, "repository_handle", RepositoryHandle)
    check.str_param(sensor_name, "sensor_name")
    check.opt_float_param(last_completion_time, "last_completion_time")
    check.opt_str_param(last_run_key, "last_run_key")
    check.opt_str_param(cursor, "cursor")

    origin = repository_handle.get_external_origin()

    result = deserialize_as(
        api_client.external_sensor_execution(
            sensor_execution_args=SensorExecutionArgs(
                repository_origin=origin,
                instance_ref=instance.get_ref(),
                sensor_name=sensor_name,
                last_completion_time=last_completion_time,
                last_run_key=last_run_key,
                cursor=cursor,
            )
        ),
        (SensorExecutionData, ExternalSensorExecutionErrorData),
    )

    if isinstance(result, ExternalSensorExecutionErrorData):
        raise DagsterUserCodeProcessError.from_error_info(result.error)

    return result
예제 #5
0
def _external_pipeline_from_def(pipeline_def, solid_subset=None):
    recon_pipeline = reconstructable(pipeline_def)
    recon_repo = recon_pipeline.repository
    repo_def = recon_repo.get_definition()
    location_handle = LocationHandle('test', recon_repo.pointer)
    repository_handle = RepositoryHandle(repo_def.name, location_handle)
    return external_pipeline_from_recon_pipeline(
        reconstructable(pipeline_def),
        solid_subset=solid_subset,
        repository_handle=repository_handle,
    )
예제 #6
0
def _external_pipeline_from_def(pipeline_def, solid_selection=None):
    recon_pipeline = reconstructable(pipeline_def)
    recon_repo = recon_pipeline.repository
    repo_def = recon_repo.get_definition()
    location_handle = RepositoryLocationHandle.create_in_process_location(recon_repo.pointer)
    repository_handle = RepositoryHandle(
        repository_name=repo_def.name, repository_location_handle=location_handle,
    )
    return external_pipeline_from_recon_pipeline(
        reconstructable(pipeline_def),
        solid_selection=solid_selection,
        repository_handle=repository_handle,
    )
예제 #7
0
    def __init__(self, origin: InProcessRepositoryLocationOrigin):
        self._origin = check.inst_param(origin, "origin", InProcessRepositoryLocationOrigin)

        self._recon_repo = self._origin.recon_repo

        repo_def = self._recon_repo.get_definition()
        pointer = self._recon_repo.pointer

        self._repository_code_pointer_dict = {repo_def.name: pointer}

        def_name = repo_def.name

        self._external_repo = external_repo_from_def(
            repo_def,
            RepositoryHandle(repository_name=def_name, repository_location=self),
        )
        self._repositories = {self._external_repo.name: self._external_repo}
예제 #8
0
def sync_get_external_schedule_execution_data_ephemeral_grpc(
    instance: DagsterInstance,
    repository_handle: RepositoryHandle,
    schedule_name: str,
    scheduled_execution_time: Any,
):
    from dagster.grpc.client import ephemeral_grpc_api_client

    origin = repository_handle.get_external_origin()
    with ephemeral_grpc_api_client(origin.repository_location_origin.
                                   loadable_target_origin) as api_client:
        return sync_get_external_schedule_execution_data_grpc(
            api_client,
            instance,
            repository_handle,
            schedule_name,
            scheduled_execution_time,
        )
예제 #9
0
    def __init__(
        self,
        origin: RepositoryLocationOrigin,
        host: Optional[str] = None,
        port: Optional[int] = None,
        socket: Optional[str] = None,
        server_id: Optional[str] = None,
        heartbeat: Optional[bool] = False,
        watch_server: Optional[bool] = True,
        grpc_server_registry: Optional[GrpcServerRegistry] = None,
    ):
        from dagster.grpc.client import DagsterGrpcClient, client_heartbeat_thread

        self._origin = check.inst_param(origin, "origin",
                                        RepositoryLocationOrigin)

        self.grpc_server_registry = check.opt_inst_param(
            grpc_server_registry, "grpc_server_registry", GrpcServerRegistry)

        if isinstance(self.origin, GrpcServerRepositoryLocationOrigin):
            self._port = self.origin.port
            self._socket = self.origin.socket
            self._host = self.origin.host
            self._use_ssl = bool(self.origin.use_ssl)
        else:
            self._port = check.opt_int_param(port, "port")
            self._socket = check.opt_str_param(socket, "socket")
            self._host = check.str_param(host, "host")
            self._use_ssl = False

        self._watch_thread_shutdown_event = None
        self._watch_thread = None

        self._heartbeat_shutdown_event = None
        self._heartbeat_thread = None

        self._heartbeat = check.bool_param(heartbeat, "heartbeat")
        self._watch_server = check.bool_param(watch_server, "watch_server")

        self.server_id = None
        self._external_repositories_data = None

        self._executable_path = None
        self._container_image = None
        self._repository_code_pointer_dict = None

        try:
            self.client = DagsterGrpcClient(
                port=self._port,
                socket=self._socket,
                host=self._host,
                use_ssl=self._use_ssl,
            )
            list_repositories_response = sync_list_repositories_grpc(
                self.client)

            self.server_id = server_id if server_id else sync_get_server_id(
                self.client)
            self.repository_names = set(
                symbol.repository_name
                for symbol in list_repositories_response.repository_symbols)

            if self._heartbeat:
                self._heartbeat_shutdown_event = threading.Event()

                self._heartbeat_thread = threading.Thread(
                    target=client_heartbeat_thread,
                    args=(
                        self.client,
                        self._heartbeat_shutdown_event,
                    ),
                    name="grpc-client-heartbeat",
                )
                self._heartbeat_thread.daemon = True
                self._heartbeat_thread.start()

            self._executable_path = list_repositories_response.executable_path
            self._repository_code_pointer_dict = (
                list_repositories_response.repository_code_pointer_dict)

            self._container_image = self._reload_current_image()

            self._external_repositories_data = sync_get_streaming_external_repositories_data_grpc(
                self.client,
                self,
            )

            self.external_repositories = {
                repo_name: ExternalRepository(
                    repo_data,
                    RepositoryHandle(
                        repository_name=repo_name,
                        repository_location=self,
                    ),
                )
                for repo_name, repo_data in
                self._external_repositories_data.items()
            }
        except:
            self.cleanup()
            raise
예제 #10
0
    def __init__(
        self,
        origin: RepositoryLocationOrigin,
        host: Optional[str] = None,
        port: Optional[int] = None,
        socket: Optional[str] = None,
        server_id: Optional[str] = None,
        heartbeat: Optional[bool] = False,
        watch_server: Optional[bool] = True,
        grpc_server_registry: Optional[GrpcServerRegistry] = None,
    ):
        from dagster.grpc.client import DagsterGrpcClient, client_heartbeat_thread
        from dagster.grpc.server_watcher import create_grpc_watch_thread

        self._origin = check.inst_param(origin, "origin", RepositoryLocationOrigin)

        self.grpc_server_registry = check.opt_inst_param(
            grpc_server_registry, "grpc_server_registry", GrpcServerRegistry
        )

        if isinstance(self.origin, GrpcServerRepositoryLocationOrigin):
            self._port = self.origin.port
            self._socket = self.origin.socket
            self._host = self.origin.host
            self._use_ssl = bool(self.origin.use_ssl)
        else:
            self._port = check.opt_int_param(port, "port")
            self._socket = check.opt_str_param(socket, "socket")
            self._host = check.str_param(host, "host")
            self._use_ssl = False

        self._watch_thread_shutdown_event = None
        self._watch_thread = None

        self._heartbeat_shutdown_event = None
        self._heartbeat_thread = None

        self._heartbeat = check.bool_param(heartbeat, "heartbeat")
        self._watch_server = check.bool_param(watch_server, "watch_server")

        self.server_id = None
        self._external_repositories_data = None

        self._executable_path = None
        self._container_image = None
        self._repository_code_pointer_dict = None

        try:
            self.client = DagsterGrpcClient(
                port=self._port,
                socket=self._socket,
                host=self._host,
                use_ssl=self._use_ssl,
            )
            list_repositories_response = sync_list_repositories_grpc(self.client)

            self.server_id = server_id if server_id else sync_get_server_id(self.client)
            self.repository_names = set(
                symbol.repository_name for symbol in list_repositories_response.repository_symbols
            )

            if self._heartbeat:
                self._heartbeat_shutdown_event = threading.Event()

                self._heartbeat_thread = threading.Thread(
                    target=client_heartbeat_thread,
                    args=(
                        self.client,
                        self._heartbeat_shutdown_event,
                    ),
                    name="grpc-client-heartbeat",
                )
                self._heartbeat_thread.daemon = True
                self._heartbeat_thread.start()

            if self._watch_server:
                self._state_subscribers: List[LocationStateSubscriber] = []
                self._watch_thread_shutdown_event, self._watch_thread = create_grpc_watch_thread(
                    self.client,
                    on_updated=lambda new_server_id: self._send_state_event_to_subscribers(
                        LocationStateChangeEvent(
                            LocationStateChangeEventType.LOCATION_UPDATED,
                            location_name=self.name,
                            message="Server has been updated.",
                            server_id=new_server_id,
                        )
                    ),
                    on_error=lambda: self._send_state_event_to_subscribers(
                        LocationStateChangeEvent(
                            LocationStateChangeEventType.LOCATION_ERROR,
                            location_name=self.name,
                            message="Unable to reconnect to server. You can reload the server once it is "
                            "reachable again",
                        )
                    ),
                )

                self._watch_thread.start()

            self._executable_path = list_repositories_response.executable_path
            self._repository_code_pointer_dict = (
                list_repositories_response.repository_code_pointer_dict
            )

            self._container_image = self._reload_current_image()

            self._external_repositories_data = sync_get_streaming_external_repositories_data_grpc(
                self.client,
                self,
            )

            self.external_repositories = {
                repo_name: ExternalRepository(
                    repo_data,
                    RepositoryHandle(
                        repository_name=repo_name,
                        repository_location=self,
                    ),
                )
                for repo_name, repo_data in self._external_repositories_data.items()
            }
        except:
            self.cleanup()
            raise