Пример #1
0
def serialize(obj):
    """This is the callback that will be used by numbuf.

  If numbuf does not know how to serialize an object, it will call this method.

  Args:
    obj (object): A Python object.

  Returns:
    A dictionary that has the key "_pyttype_" to identify the class, and
      contains all information needed to reconstruct the object.
  """
    class_id = class_identifier(type(obj))
    if class_id not in whitelisted_classes:
        raise Exception(
            "Ray does not know how to serialize objects of type {}. "
            "To fix this, call 'ray.register_class' with this class.".format(
                type(obj)))
    if class_id in classes_to_pickle:
        serialized_obj = {"data": pickling.dumps(obj)}
    elif class_id in custom_serializers.keys():
        serialized_obj = {"data": custom_serializers[class_id](obj)}
    else:
        # Handle the namedtuple case.
        if is_named_tuple(type(obj)):
            serialized_obj = {}
            serialized_obj["_ray_getnewargs_"] = obj.__getnewargs__()
        elif hasattr(obj, "__dict__"):
            serialized_obj = obj.__dict__
        else:
            raise Exception(
                "We do not know how to serialize the object '{}'".format(obj))
    result = dict(serialized_obj, **{"_pytype_": class_id})
    return result
Пример #2
0
def export_actor_class(class_id, Class, actor_method_names, worker):
    if worker.mode is None:
        raise NotImplemented("TODO(pcm): Cache actors")
    key = b"ActorClass:" + class_id
    d = {
        "driver_id": worker.task_driver_id.id(),
        "class_name": Class.__name__,
        "module": Class.__module__,
        "class": pickling.dumps(Class),
        "actor_method_names": json.dumps(list(actor_method_names))
    }
    worker.redis_client.hmset(key, d)
    worker.redis_client.rpush("Exports", key)
Пример #3
0
def export_actor(actor_id, Class, actor_method_names, num_cpus, num_gpus,
                 worker):
  """Export an actor to redis.

  Args:
    actor_id: The ID of the actor.
    Class: Name of the class to be exported as an actor.
    actor_method_names (list): A list of the names of this actor's methods.
    num_cpus (int): The number of CPUs that this actor requires.
    num_gpus (int): The number of GPUs that this actor requires.
  """
  ray.worker.check_main_thread()
  if worker.mode is None:
    raise NotImplemented("TODO(pcm): Cache actors")
  key = "Actor:{}".format(actor_id.id())
  pickled_class = pickling.dumps(Class)

  # For now, all actor methods have 1 return value.
  driver_id = worker.task_driver_id.id()
  for actor_method_name in actor_method_names:
    function_id = get_actor_method_function_id(actor_method_name).id()
    worker.function_properties[driver_id][function_id] = (1, num_cpus,
                                                          num_gpus)

  # Get a list of the local schedulers from the client table.
  client_table = ray.global_state.client_table()
  local_schedulers = []
  for ip_address, clients in client_table.items():
    for client in clients:
      if client["ClientType"] == "local_scheduler":
        local_schedulers.append(client)
  # Select a local scheduler for the actor.
  local_scheduler_id, gpu_ids = select_local_scheduler(local_schedulers,
                                                       num_gpus, worker)

  # Really we should encode this message as a flatbuffer object. However, we're
  # having trouble getting that to work. It almost works, but in Python 2.7,
  # builder.CreateString fails on byte strings that contain characters outside
  # range(128).
  worker.redis_client.publish("actor_notifications",
                              actor_id.id() + driver_id + local_scheduler_id)

  d = {"driver_id": driver_id,
       "actor_id": actor_id.id(),
       "name": Class.__name__,
       "module": Class.__module__,
       "class": pickled_class,
       "gpu_ids": json.dumps(gpu_ids),
       "actor_method_names": json.dumps(list(actor_method_names))}
  worker.redis_client.hmset(key, d)
  worker.redis_client.rpush("Exports", key)
Пример #4
0
def export_actor(actor_id, Class, actor_method_names, num_cpus, num_gpus,
                 worker):
    """Export an actor to redis.

  Args:
    actor_id: The ID of the actor.
    Class: Name of the class to be exported as an actor.
    actor_method_names (list): A list of the names of this actor's methods.
    num_cpus (int): The number of CPUs that this actor requires.
    num_gpus (int): The number of GPUs that this actor requires.
  """
    ray.worker.check_main_thread()
    if worker.mode is None:
        raise NotImplemented("TODO(pcm): Cache actors")
    key = "Actor:{}".format(actor_id.id())
    pickled_class = pickling.dumps(Class)

    # For now, all actor methods have 1 return value.
    driver_id = worker.task_driver_id.id()
    for actor_method_name in actor_method_names:
        function_id = get_actor_method_function_id(actor_method_name).id()
        worker.function_properties[driver_id][function_id] = (1, num_cpus,
                                                              num_gpus)

    # Select a local scheduler for the actor.
    local_schedulers = state.get_local_schedulers(worker)
    local_scheduler_id, gpu_ids = select_local_scheduler(
        local_schedulers, num_gpus, worker)

    worker.redis_client.publish("actor_notifications",
                                actor_id.id() + local_scheduler_id)

    d = {
        "driver_id": driver_id,
        "actor_id": actor_id.id(),
        "name": Class.__name__,
        "module": Class.__module__,
        "class": pickled_class,
        "gpu_ids": json.dumps(gpu_ids),
        "actor_method_names": json.dumps(list(actor_method_names))
    }
    worker.redis_client.hmset(key, d)
    worker.redis_client.rpush("Exports", key)
Пример #5
0
def export_actor(actor_id, Class, worker):
    """Export an actor to redis.

  Args:
    actor_id: The ID of the actor.
    Class: Name of the class to be exported as an actor.
    worker: The worker class
  """
    ray.worker.check_main_thread()
    if worker.mode is None:
        raise NotImplemented("TODO(pcm): Cache actors")
    key = "Actor:{}".format(actor_id.id())
    pickled_class = pickling.dumps(Class)

    # Select a local scheduler for the actor.
    local_schedulers = state.get_local_schedulers()
    local_scheduler_id = random.choice(local_schedulers)

    worker.redis_client.publish("actor_notifications",
                                actor_id.id() + local_scheduler_id)

    # The export counter is computed differently depending on whether we are
    # currently in a driver or a worker.
    if worker.mode in [ray.SCRIPT_MODE, ray.SILENT_MODE]:
        export_counter = worker.driver_export_counter
    elif worker.mode == ray.WORKER_MODE:
        # We don't actually need export counters for actors.
        export_counter = 0
    d = {
        "driver_id": worker.task_driver_id.id(),
        "actor_id": actor_id.id(),
        "name": Class.__name__,
        "module": Class.__module__,
        "class": pickled_class,
        "class_export_counter": export_counter
    }
    worker.redis_client.hmset(key, d)
    worker.redis_client.rpush("Exports", key)
    worker.driver_export_counter += 1