def export_actor_class(self, Class, actor_creation_function_descriptor, actor_method_names): if self._worker.load_code_from_local: return # `current_job_id` shouldn't be NIL, unless: # 1) This worker isn't an actor; # 2) And a previous task started a background thread, which didn't # finish before the task finished, and still uses Ray API # after that. assert not self._worker.current_job_id.is_nil(), ( "You might have started a background thread in a non-actor " "task, please make sure the thread finishes before the " "task finishes.") job_id = self._worker.current_job_id key = (b"ActorClass:" + job_id.binary() + b":" + actor_creation_function_descriptor.function_id.binary()) actor_class_info = { "class_name": actor_creation_function_descriptor.class_name, "module": actor_creation_function_descriptor.module_name, "class": pickle.dumps(Class), "job_id": job_id.binary(), "collision_identifier": self.compute_collision_identifier(Class), "actor_method_names": json.dumps(list(actor_method_names)) } check_oversized_pickle(actor_class_info["class"], actor_class_info["class_name"], "actor", self._worker) self._publish_actor_class_to_key(key, actor_class_info)
def _do_export(self, remote_function): """Pickle a remote function and export it to redis. Args: remote_function: the RemoteFunction object. """ if self._worker.load_code_from_local: return function = remote_function._function pickled_function = pickle.dumps(function) check_oversized_pickle(pickled_function, remote_function._function_name, "remote function", self._worker) key = (b"RemoteFunction:" + self._worker.current_job_id.binary() + b":" + remote_function._function_descriptor.function_id.binary()) self._worker.redis_client.hmset( key, { "job_id": self._worker.current_job_id.binary(), "function_id": remote_function._function_descriptor.function_id.binary(), "name": remote_function._function_name, "module": function.__module__, "function": pickled_function, "max_calls": remote_function._max_calls }) self._worker.redis_client.rpush("Exports", key)
def export_actor_class(self, Class, actor_method_names, checkpoint_interval): function_descriptor = FunctionDescriptor.from_class(Class) key = b"ActorClass:" + function_descriptor.function_id.id() actor_class_info = { "class_name": Class.__name__, "module": Class.__module__, "class": pickle.dumps(Class), "checkpoint_interval": checkpoint_interval, "actor_method_names": json.dumps(list(actor_method_names)) } check_oversized_pickle(actor_class_info["class"], actor_class_info["class_name"], "actor", self._worker) if self._worker.mode is None: # This means that 'ray.init()' has not been called yet and so we # must cache the actor class definition and export it when # 'ray.init()' is called. assert self._actors_to_export is not None self._actors_to_export.append((key, actor_class_info)) # This caching code path is currently not used because we only # export actor class definitions lazily when we instantiate the # actor for the first time. assert False, "This should be unreachable." else: self._publish_actor_class_to_key(key, actor_class_info)
def export_actor_class(class_id, Class, actor_method_names, checkpoint_interval, worker): key = b"ActorClass:" + class_id actor_class_info = { "class_name": Class.__name__, "module": Class.__module__, "class": pickle.dumps(Class), "checkpoint_interval": checkpoint_interval, "actor_method_names": json.dumps(list(actor_method_names)) } check_oversized_pickle(actor_class_info["class"], actor_class_info["class_name"], "actor", worker) if worker.mode is None: # This means that 'ray.init()' has not been called yet and so we must # cache the actor class definition and export it when 'ray.init()' is # called. assert worker.cached_remote_functions_and_actors is not None worker.cached_remote_functions_and_actors.append( ("actor", (key, actor_class_info))) # This caching code path is currently not used because we only export # actor class definitions lazily when we instantiate the actor for the # first time. assert False, "This should be unreachable." else: publish_actor_class_to_key(key, actor_class_info, worker)
def _do_export(self, remote_function): """Pickle a remote function and export it to redis. Args: remote_function: the RemoteFunction object. """ if self._worker.load_code_from_local: return # Work around limitations of Python pickling. function = remote_function._function function_name_global_valid = function.__name__ in function.__globals__ function_name_global_value = function.__globals__.get( function.__name__) # Allow the function to reference itself as a global variable if not is_cython(function): function.__globals__[function.__name__] = remote_function try: pickled_function = pickle.dumps(function) finally: # Undo our changes if function_name_global_valid: function.__globals__[function.__name__] = ( function_name_global_value) else: del function.__globals__[function.__name__] check_oversized_pickle(pickled_function, remote_function._function_name, "remote function", self._worker) key = (b"RemoteFunction:" + self._worker.current_job_id.binary() + b":" + remote_function._function_descriptor.function_id.binary()) self._worker.redis_client.hmset( key, { "job_id": self._worker.current_job_id.binary(), "function_id": remote_function._function_descriptor.function_id.binary(), "name": remote_function._function_name, "module": function.__module__, "function": pickled_function, "max_calls": remote_function._max_calls }) self._worker.redis_client.rpush("Exports", key)
def _do_export(self, remote_function): """Pickle a remote function and export it to redis. Args: remote_function: the RemoteFunction object. """ if self._worker.load_code_from_local: return # Work around limitations of Python pickling. function = remote_function._function function_name_global_valid = function.__name__ in function.__globals__ function_name_global_value = function.__globals__.get( function.__name__) # Allow the function to reference itself as a global variable if not is_cython(function): function.__globals__[function.__name__] = remote_function try: pickled_function = pickle.dumps(function) finally: # Undo our changes if function_name_global_valid: function.__globals__[function.__name__] = ( function_name_global_value) else: del function.__globals__[function.__name__] check_oversized_pickle(pickled_function, remote_function._function_name, "remote function", self._worker) key = (b"RemoteFunction:" + self._worker.task_driver_id.binary() + b":" + remote_function._function_descriptor.function_id.binary()) self._worker.redis_client.hmset( key, { "driver_id": self._worker.task_driver_id.binary(), "function_id": remote_function._function_descriptor. function_id.binary(), "name": remote_function._function_name, "module": function.__module__, "function": pickled_function, "max_calls": remote_function._max_calls }) self._worker.redis_client.rpush("Exports", key)
def export_actor_class(self, Class, actor_method_names): if self._worker.load_code_from_local: return function_descriptor = FunctionDescriptor.from_class(Class) # `current_job_id` shouldn't be NIL, unless: # 1) This worker isn't an actor; # 2) And a previous task started a background thread, which didn't # finish before the task finished, and still uses Ray API # after that. assert not self._worker.current_job_id.is_nil(), ( "You might have started a background thread in a non-actor task, " "please make sure the thread finishes before the task finishes.") job_id = self._worker.current_job_id key = (b"ActorClass:" + job_id.binary() + b":" + function_descriptor.function_id.binary()) actor_class_info = { "class_name": Class.__name__, "module": Class.__module__, "class": pickle.dumps(Class), "job_id": job_id.binary(), "actor_method_names": json.dumps(list(actor_method_names)) } check_oversized_pickle(actor_class_info["class"], actor_class_info["class_name"], "actor", self._worker) if self._worker.mode is None: # This means that 'ray.init()' has not been called yet and so we # must cache the actor class definition and export it when # 'ray.init()' is called. assert self._actors_to_export is not None self._actors_to_export.append((key, actor_class_info)) # This caching code path is currently not used because we only # export actor class definitions lazily when we instantiate the # actor for the first time. assert False, "This should be unreachable." else: self._publish_actor_class_to_key(key, actor_class_info)
def export_actor_class(self, Class, actor_method_names): if self._worker.load_code_from_local: return function_descriptor = FunctionDescriptor.from_class(Class) # `task_driver_id` shouldn't be NIL, unless: # 1) This worker isn't an actor; # 2) And a previous task started a background thread, which didn't # finish before the task finished, and still uses Ray API # after that. assert not self._worker.task_driver_id.is_nil(), ( "You might have started a background thread in a non-actor task, " "please make sure the thread finishes before the task finishes.") driver_id = self._worker.task_driver_id key = (b"ActorClass:" + driver_id.binary() + b":" + function_descriptor.function_id.binary()) actor_class_info = { "class_name": Class.__name__, "module": Class.__module__, "class": pickle.dumps(Class), "driver_id": driver_id.binary(), "actor_method_names": json.dumps(list(actor_method_names)) } check_oversized_pickle(actor_class_info["class"], actor_class_info["class_name"], "actor", self._worker) if self._worker.mode is None: # This means that 'ray.init()' has not been called yet and so we # must cache the actor class definition and export it when # 'ray.init()' is called. assert self._actors_to_export is not None self._actors_to_export.append((key, actor_class_info)) # This caching code path is currently not used because we only # export actor class definitions lazily when we instantiate the # actor for the first time. assert False, "This should be unreachable." else: self._publish_actor_class_to_key(key, actor_class_info)