def export_actor_class(self, Class, actor_creation_function_descriptor, actor_method_names): if self._worker.load_code_from_local: module_name, class_name = ( actor_creation_function_descriptor.module_name, actor_creation_function_descriptor.class_name, ) # If the class is dynamic, we still export it to GCS # even if load_code_from_local is set True. if (self.load_function_or_class_from_local(module_name, class_name) is not None): return # `current_job_id` shouldn't be NIL, unless: # 1) This worker isn't an actor; # 2) And a previous task started a background thread, which didn't # finish before the task finished, and still uses Ray API # after that. assert not self._worker.current_job_id.is_nil(), ( "You might have started a background thread in a non-actor " "task, please make sure the thread finishes before the " "task finishes.") job_id = self._worker.current_job_id key = make_function_table_key( b"ActorClass", job_id, actor_creation_function_descriptor.function_id.binary(), ) try: serialized_actor_class = pickle.dumps(Class) except TypeError as e: msg = ( "Could not serialize the actor class " f"{actor_creation_function_descriptor.repr}. " "Check https://docs.ray.io/en/master/serialization.html#troubleshooting " # noqa "for more information.") raise TypeError(msg) from e actor_class_info = { "class_name": actor_creation_function_descriptor.class_name.split(".")[-1], "module": actor_creation_function_descriptor.module_name, "class": serialized_actor_class, "job_id": job_id.binary(), "collision_identifier": self.compute_collision_identifier(Class), "actor_method_names": json.dumps(list(actor_method_names)), } check_oversized_function( actor_class_info["class"], actor_class_info["class_name"], "actor", self._worker, ) self._publish_actor_class_to_key(key, actor_class_info)
def export(self, remote_function): """Pickle a remote function and export it to redis. Args: remote_function: the RemoteFunction object. """ if self._worker.load_code_from_local: function_descriptor = remote_function._function_descriptor module_name, function_name = ( function_descriptor.module_name, function_descriptor.function_name, ) # If the function is dynamic, we still export it to GCS # even if load_code_from_local is set True. if (self.load_function_or_class_from_local(module_name, function_name) is not None): return function = remote_function._function pickled_function = remote_function._pickled_function check_oversized_function( pickled_function, remote_function._function_name, "remote function", self._worker, ) key = make_function_table_key( b"RemoteFunction", self._worker.current_job_id, remote_function._function_descriptor.function_id.binary(), ) if self._worker.gcs_client.internal_kv_exists( key, KV_NAMESPACE_FUNCTION_TABLE): return val = pickle.dumps({ "job_id": self._worker.current_job_id.binary(), "function_id": remote_function._function_descriptor.function_id.binary( ), # noqa: E501 "function_name": remote_function._function_name, "module": function.__module__, "function": pickled_function, "collision_identifier": self.compute_collision_identifier(function), "max_calls": remote_function._max_calls, }) self._worker.gcs_client.internal_kv_put(key, val, True, KV_NAMESPACE_FUNCTION_TABLE) self.export_key(key)
def _ensure_ref(self): with self._lock: if self._ref is None: # As before, set the state of the reference to be an # in-progress self reference value, which # the encoding can detect and handle correctly. self._ref = InProgressSentinel() data = ray.worker._dumps_from_client(self.actor_cls) # Check pickled size before sending it to server, which is more # efficient and can be done synchronously inside remote() call. check_oversized_function(data, self._name, "actor", None) self._ref = ray.worker._put_pickled( data, client_ref_id=self._client_side_ref.id)
def export(self, remote_function): """Pickle a remote function and export it to redis. Args: remote_function: the RemoteFunction object. """ if self._worker.load_code_from_local: function_descriptor = remote_function._function_descriptor module_name, function_name = ( function_descriptor.module_name, function_descriptor.function_name, ) # If the function is dynamic, we still export it to GCS # even if load_code_from_local is set True. if self.load_function_or_class_from_local( module_name, function_name) is not None: return function = remote_function._function pickled_function = remote_function._pickled_function check_oversized_function(pickled_function, remote_function._function_name, "remote function", self._worker) key = (b"RemoteFunction:" + self._worker.current_job_id.binary() + b":" + remote_function._function_descriptor.function_id.binary()) if self._worker.redis_client.exists(key) == 1: return self._worker.redis_client.hset( key, mapping={ "job_id": self._worker.current_job_id.binary(), "function_id": remote_function._function_descriptor.function_id.binary(), "function_name": remote_function._function_name, "module": function.__module__, "function": pickled_function, "collision_identifier": self.compute_collision_identifier(function), "max_calls": remote_function._max_calls }) self._worker.redis_client.rpush("Exports", key)
def export_actor_class(self, Class, actor_creation_function_descriptor, actor_method_names): if self._worker.load_code_from_local: module_name, class_name = ( actor_creation_function_descriptor.module_name, actor_creation_function_descriptor.class_name) # If the class is dynamic, we still export it to GCS # even if load_code_from_local is set True. if self.load_function_or_class_from_local(module_name, class_name) is not None: return # `current_job_id` shouldn't be NIL, unless: # 1) This worker isn't an actor; # 2) And a previous task started a background thread, which didn't # finish before the task finished, and still uses Ray API # after that. assert not self._worker.current_job_id.is_nil(), ( "You might have started a background thread in a non-actor " "task, please make sure the thread finishes before the " "task finishes.") job_id = self._worker.current_job_id key = (b"ActorClass:" + job_id.binary() + b":" + actor_creation_function_descriptor.function_id.binary()) actor_class_info = { "class_name": actor_creation_function_descriptor.class_name.split(".")[-1], "module": actor_creation_function_descriptor.module_name, "class": pickle.dumps(Class), "job_id": job_id.binary(), "collision_identifier": self.compute_collision_identifier(Class), "actor_method_names": json.dumps(list(actor_method_names)) } check_oversized_function(actor_class_info["class"], actor_class_info["class_name"], "actor", self._worker) self._publish_actor_class_to_key(key, actor_class_info)
def _ensure_ref(self): with self._lock: if self._ref is None: # While calling ray.put() on our function, if # our function is recursive, it will attempt to # encode the ClientRemoteFunc -- itself -- and # infinitely recurse on _ensure_ref. # # So we set the state of the reference to be an # in-progress self reference value, which # the encoding can detect and handle correctly. self._ref = InProgressSentinel() data = ray.worker._dumps_from_client(self._func) # Check pickled size before sending it to server, which is more # efficient and can be done synchronously inside remote() call. check_oversized_function(data, self._name, "remote function", None) self._ref = ray.worker._put_pickled( data, client_ref_id=self._client_side_ref.id)
def export(self, remote_function): """Pickle a remote function and export it to redis. Args: remote_function: the RemoteFunction object. """ if self._worker.load_code_from_local: return function = remote_function._function pickled_function = pickle.dumps(function) check_oversized_function(pickled_function, remote_function._function_name, "remote function", self._worker) key = (b"RemoteFunction:" + self._worker.current_job_id.binary() + b":" + remote_function._function_descriptor.function_id.binary()) self._worker.redis_client.hset( key, mapping={ "job_id": self._worker.current_job_id.binary(), "function_id": remote_function._function_descriptor.function_id.binary(), "function_name": remote_function._function_name, "module": function.__module__, "function": pickled_function, "collision_identifier": self.compute_collision_identifier(function), "max_calls": remote_function._max_calls }) self._worker.redis_client.rpush("Exports", key)