def _deserialize_pickle5_data(self, data): try: in_band, buffers = unpack_pickle5_buffers(data) if len(buffers) > 0: obj = pickle.loads(in_band, buffers=buffers) else: obj = pickle.loads(in_band) # cloudpickle does not provide error types except pickle.pickle.PicklingError: raise DeserializationError() return obj
def _deserialize_object(self, data, metadata, object_id): if metadata: if metadata == ray_constants.PICKLE5_BUFFER_METADATA: if not self.use_pickle: raise ValueError("Receiving pickle5 serialized objects " "while the serialization context is " "using pyarrow as the backend.") try: in_band, buffers = unpack_pickle5_buffers(data) if len(buffers) > 0: obj = pickle.loads(in_band, buffers=buffers) else: obj = pickle.loads(in_band) # cloudpickle does not provide error types except pickle.pickle.PicklingError: raise DeserializationError() # Check that there are no ObjectIDs serialized in arguments # that are inlined. if object_id.is_nil(): assert len(self.get_and_clear_contained_object_ids()) == 0 else: worker = ray.worker.global_worker worker.core_worker.add_contained_object_ids( object_id, self.get_and_clear_contained_object_ids(), ) return obj # Check if the object should be returned as raw bytes. if metadata == ray_constants.RAW_BUFFER_METADATA: if data is None: return b"" return data.to_pybytes() # Otherwise, return an exception object based on # the error type. error_type = int(metadata) if error_type == ErrorType.Value("WORKER_DIED"): return RayWorkerError() elif error_type == ErrorType.Value("ACTOR_DIED"): return RayActorError() elif error_type == ErrorType.Value("OBJECT_UNRECONSTRUCTABLE"): return UnreconstructableError(ray.ObjectID(object_id.binary())) else: assert error_type != ErrorType.Value("OBJECT_IN_PLASMA"), \ "Tried to get object that has been promoted to plasma." assert False, "Unrecognized error type " + str(error_type) elif data: raise ValueError("non-null object should always have metadata") else: # Object isn't available in plasma. This should never be returned # to the user. We should only reach this line if this object was # deserialized as part of a list, and another object in the list # throws an exception. return plasma.ObjectNotAvailable
def _deserialize_object_from_arrow(self, data, metadata, object_id): if metadata: if metadata == ray_constants.PICKLE5_BUFFER_METADATA: if not self.use_pickle: raise ValueError("Receiving pickle5 serialized objects " "while the serialization context is " "using pyarrow as the backend.") try: in_band, buffers = unpack_pickle5_buffers(data) if len(buffers) > 0: return pickle.loads(in_band, buffers=buffers) else: return pickle.loads(in_band) # cloudpickle does not provide error types except pickle.pickle.PicklingError: raise DeserializationError() # Check if the object should be returned as raw bytes. if metadata == ray_constants.RAW_BUFFER_METADATA: if data is None: return b"" return data.to_pybytes() # Otherwise, return an exception object based on # the error type. error_type = int(metadata) if error_type == ErrorType.Value("WORKER_DIED"): return RayWorkerError() elif error_type == ErrorType.Value("ACTOR_DIED"): return RayActorError() elif error_type == ErrorType.Value("OBJECT_UNRECONSTRUCTABLE"): return UnreconstructableError(ray.ObjectID(object_id.binary())) else: assert error_type != ErrorType.Value("OBJECT_IN_PLASMA"), \ "Tried to get object that has been promoted to plasma." assert False, "Unrecognized error type " + str(error_type) elif data: if self.use_pickle: raise ValueError("Receiving plasma serialized objects " "while the serialization context is " "using pickle5 as the backend.") try: # If data is not empty, deserialize the object. return pyarrow.deserialize(data, self.pyarrow_context) except pyarrow.DeserializationCallbackError: raise DeserializationError() else: # Object isn't available in plasma. This should never be returned # to the user. We should only reach this line if this object was # deserialized as part of a list, and another object in the list # throws an exception. return plasma.ObjectNotAvailable
def _deserialize_pickle5_data(self, data): if not self.use_pickle: raise ValueError("Receiving pickle5 serialized objects " "while the serialization context is " "using a custom raw backend.") try: in_band, buffers = unpack_pickle5_buffers(data) if len(buffers) > 0: obj = pickle.loads(in_band, buffers=buffers) else: obj = pickle.loads(in_band) # cloudpickle does not provide error types except pickle.pickle.PicklingError: raise DeserializationError() return obj