Beispiel #1
0
    def __init__(self, original_class: type):
        actor_methods = inspect.getmembers(original_class,
                                           is_function_or_method)
        self.cls = original_class
        self.module = original_class.__module__
        self.name = original_class.__name__
        self.qualname = original_class.__qualname__
        self.methods = dict(actor_methods)

        # Extract the signatures of each of the methods. This will be used
        # to catch some errors if the methods are called with inappropriate
        # arguments.
        self.signatures = {}
        for method_name, method in actor_methods:
            # Whether or not this method requires binding of its first
            # argument. For class and static methods, we do not want to bind
            # the first argument, but we do for instance methods
            method = inspect.unwrap(method)
            is_bound = (is_class_method(method)
                        or is_static_method(original_class, method_name))

            # Print a warning message if the method signature is not
            # supported. We don't raise an exception because if the actor
            # inherits from a class that has a method whose signature we
            # don't support, there may not be much the user can do about it.
            self.signatures[method_name] = signature.extract_signature(
                method, ignore_first=not is_bound)
Beispiel #2
0
        async def batch_wrapper(*args, **kwargs):
            self = extract_self_if_method_call(args, _func)
            flattened_args: List = flatten_args(extract_signature(_func), args, kwargs)

            if self is None:
                # For functions, inject the batch queue as an
                # attribute of the function.
                batch_queue_object = _func
            else:
                # For methods, inject the batch queue as an
                # attribute of the object.
                batch_queue_object = self
                # Trim the self argument from methods
                flattened_args = flattened_args[2:]

            # The first time the function runs, we lazily construct the batch
            # queue and inject it under a custom attribute name. On subsequent
            # runs, we just get a reference to the attribute.
            batch_queue_attr = f"__serve_batch_queue_{_func.__name__}"
            if not hasattr(batch_queue_object, batch_queue_attr):
                batch_queue = _BatchQueue(max_batch_size, batch_wait_timeout_s, _func)
                setattr(batch_queue_object, batch_queue_attr, batch_queue)
            else:
                batch_queue = getattr(batch_queue_object, batch_queue_attr)

            future = asyncio.get_event_loop().create_future()
            batch_queue.put(SingleRequest(self, flattened_args, future))

            # This will raise if the underlying call raised an exception.
            return await future
Beispiel #3
0
    def __init__(self, original_class, method: Callable, method_name: str,
                 runtime_options: WorkflowStepRuntimeOptions):
        self._original_class = original_class
        self._original_method = method
        # Extract the signature of the method. This will be used
        # to catch some errors if the methods are called with inappropriate
        # arguments.

        # Whether or not this method requires binding of its first
        # argument. For class and static methods, we do not want to bind
        # the first argument, but we do for instance methods
        method = inspect.unwrap(method)
        is_bound = (is_class_method(method)
                    or is_static_method(original_class, method_name))

        # Print a warning message if the method signature is not
        # supported. We don't raise an exception because if the actor
        # inherits from a class that has a method whose signature we
        # don't support, there may not be much the user can do about it.
        self._signature = signature.extract_signature(
            method, ignore_first=not is_bound)

        self._method = method
        self._method_name = method_name
        self._options = runtime_options
        self._name = None
        self._user_metadata = {}

        # attach properties to the original function, so we can create a
        # workflow step with the original function inside a virtual actor.
        self._original_method.step = self.step
        self._original_method.options = self.options
Beispiel #4
0
    def __init__(self,
                 func: Callable,
                 max_retries=1,
                 catch_exceptions=False,
                 ray_options=None):
        if not isinstance(max_retries, int) or max_retries < 1:
            raise ValueError("max_retries should be greater or equal to 1.")
        if ray_options is not None and not isinstance(ray_options, dict):
            raise ValueError("ray_options must be a dict.")

        self._func = func
        self._max_retries = max_retries
        self._catch_exceptions = catch_exceptions
        self._ray_options = ray_options or {}
        self._func_signature = signature.extract_signature(func)

        # Override signature and docstring
        @functools.wraps(func)
        def _build_workflow(*args, **kwargs) -> Workflow:
            flattened_args = signature.flatten_args(self._func_signature, args,
                                                    kwargs)
            workflow_inputs = serialization_context.make_workflow_inputs(
                flattened_args)
            workflow_data = WorkflowData(
                func_body=self._func,
                inputs=workflow_inputs,
                max_retries=self._max_retries,
                catch_exceptions=self._catch_exceptions,
                ray_options=self._ray_options,
            )
            return Workflow(workflow_data)

        self.step = _build_workflow
Beispiel #5
0
 def __init__(self, actor_cls, options=None):
     self.actor_cls = actor_cls
     self._lock = threading.Lock()
     self._name = actor_cls.__name__
     self._init_signature = inspect.Signature(parameters=extract_signature(
         actor_cls.__init__, ignore_first=True))
     self._ref = None
     self._client_side_ref = ClientSideRefID.generate_id()
     self._options = validate_options(options)
Beispiel #6
0
    def __init__(
        self,
        func: Callable,
        *,
        step_options: "WorkflowStepRuntimeOptions" = None,
        name: Optional[str] = None,
        metadata: Optional[Dict[str, Any]] = None,
    ):
        if metadata is not None:
            if not isinstance(metadata, dict):
                raise ValueError("metadata must be a dict.")
            for k, v in metadata.items():
                try:
                    json.dumps(v)
                except TypeError as e:
                    raise ValueError(
                        "metadata values must be JSON serializable, "
                        "however '{}' has a value whose {}.".format(k, e)
                    )
        self._func = func
        self._step_options = step_options
        self._func_signature = signature.extract_signature(func)
        self._name = name or ""
        self._user_metadata = metadata or {}

        # Override signature and docstring
        @functools.wraps(func)
        def _build_workflow(*args, **kwargs) -> Workflow:
            flattened_args = signature.flatten_args(self._func_signature, args, kwargs)

            def prepare_inputs():
                ensure_ray_initialized()
                return serialization_context.make_workflow_inputs(flattened_args)

            nonlocal step_options
            if step_options is None:
                step_options = WorkflowStepRuntimeOptions.make(
                    step_type=StepType.FUNCTION
                )
            # We could have "checkpoint=None" when we use @workflow.step
            # with arguments. Avoid this by updating it here.
            step_options.checkpoint = _inherit_checkpoint_option(
                step_options.checkpoint
            )

            workflow_data = WorkflowData(
                func_body=self._func,
                inputs=None,
                step_options=step_options,
                name=self._name,
                user_metadata=self._user_metadata,
            )
            return Workflow(workflow_data, prepare_inputs)

        self.step = _build_workflow
Beispiel #7
0
    def create(cls, modified_class, actor_creation_function_descriptor):
        # Try to create an instance from cache.
        cached_meta = cls._cache.get(actor_creation_function_descriptor)
        if cached_meta is not None:
            return cached_meta

        # Create an instance without __init__ called.
        self = cls.__new__(cls)

        actor_methods = inspect.getmembers(modified_class,
                                           is_function_or_method)
        self.methods = dict(actor_methods)

        # Extract the signatures of each of the methods. This will be used
        # to catch some errors if the methods are called with inappropriate
        # arguments.
        self.decorators = {}
        self.signatures = {}
        self.num_returns = {}
        self.concurrency_group_for_methods = {}

        for method_name, method in actor_methods:
            # Whether or not this method requires binding of its first
            # argument. For class and static methods, we do not want to bind
            # the first argument, but we do for instance methods
            method = inspect.unwrap(method)
            is_bound = (is_class_method(method)
                        or is_static_method(modified_class, method_name))

            # Print a warning message if the method signature is not
            # supported. We don't raise an exception because if the actor
            # inherits from a class that has a method whose signature we
            # don't support, there may not be much the user can do about it.
            self.signatures[method_name] = signature.extract_signature(
                method, ignore_first=not is_bound)
            # Set the default number of return values for this method.
            if hasattr(method, "__ray_num_returns__"):
                self.num_returns[method_name] = (method.__ray_num_returns__)
            else:
                self.num_returns[method_name] = (
                    ray_constants.DEFAULT_ACTOR_METHOD_NUM_RETURN_VALS)

            if hasattr(method, "__ray_invocation_decorator__"):
                self.decorators[method_name] = (
                    method.__ray_invocation_decorator__)

            if hasattr(method, "__ray_concurrency_group__"):
                self.concurrency_group_for_methods[method_name] = (
                    method.__ray_concurrency_group__)

        # Update cache.
        cls._cache[actor_creation_function_descriptor] = self
        return self
Beispiel #8
0
    def __init__(self,
                 func: Callable,
                 max_retries=3,
                 catch_exceptions=False,
                 name=None,
                 metadata=None,
                 ray_options=None):
        if not isinstance(max_retries, int) or max_retries < 1:
            raise ValueError("max_retries should be greater or equal to 1.")
        if ray_options is not None and not isinstance(ray_options, dict):
            raise ValueError("ray_options must be a dict.")
        if metadata is not None:
            if not isinstance(metadata, dict):
                raise ValueError("metadata must be a dict.")
            for k, v in metadata.items():
                try:
                    json.dumps(v)
                except TypeError as e:
                    raise ValueError(
                        "metadata values must be JSON serializable, "
                        "however '{}' has a value whose {}.".format(k, e))

        self._func = func
        self._max_retries = max_retries
        self._catch_exceptions = catch_exceptions
        self._ray_options = ray_options or {}
        self._func_signature = signature.extract_signature(func)
        self._name = name or ""
        self._user_metadata = metadata or {}

        # Override signature and docstring
        @functools.wraps(func)
        def _build_workflow(*args, **kwargs) -> Workflow:
            flattened_args = signature.flatten_args(self._func_signature, args,
                                                    kwargs)

            def prepare_inputs():
                ensure_ray_initialized()
                return serialization_context.make_workflow_inputs(
                    flattened_args)

            workflow_data = WorkflowData(
                func_body=self._func,
                step_type=StepType.FUNCTION,
                inputs=None,
                max_retries=self._max_retries,
                catch_exceptions=self._catch_exceptions,
                ray_options=self._ray_options,
                name=self._name,
                user_metadata=self._user_metadata)
            return Workflow(workflow_data, prepare_inputs)

        self.step = _build_workflow
Beispiel #9
0
    def __init__(
        self,
        func: Callable,
        *,
        step_options: "WorkflowStepRuntimeOptions" = None,
        name: Optional[str] = None,
        metadata: Optional[Dict[str, Any]] = None,
    ):
        validate_user_metadata(metadata)
        self._func = func
        self._step_options = step_options
        self._func_signature = signature.extract_signature(func)
        self._name = name or ""
        self._user_metadata = metadata or {}

        # Override signature and docstring
        @functools.wraps(func)
        def _build_workflow(*args, **kwargs) -> Workflow:
            flattened_args = signature.flatten_args(self._func_signature, args,
                                                    kwargs)

            def prepare_inputs():
                from ray.workflow.api import _ensure_workflow_initialized

                _ensure_workflow_initialized()
                return serialization_context.make_workflow_inputs(
                    flattened_args)

            nonlocal step_options
            if step_options is None:
                step_options = WorkflowStepRuntimeOptions.make(
                    step_type=StepType.FUNCTION)
            # We could have "checkpoint=None" when we use @workflow.step
            # with arguments. Avoid this by updating it here.
            step_options.checkpoint = _inherit_checkpoint_option(
                step_options.checkpoint)

            workflow_data = WorkflowData(
                func_body=self._func,
                inputs=None,
                step_options=step_options,
                name=self._name,
                user_metadata=self._user_metadata,
            )
            return Workflow(workflow_data, prepare_inputs)

        self.step = _build_workflow
Beispiel #10
0
 def __init__(self,
              actor_ref: ClientActorRef,
              actor_class: Optional[ClientActorClass] = None):
     self.actor_ref = actor_ref
     self._dir: Optional[List[str]] = None
     if actor_class is not None:
         self._method_num_returns = {}
         self._method_signatures = {}
         for method_name, method_obj in inspect.getmembers(
                 actor_class.actor_cls, is_function_or_method):
             self._method_num_returns[method_name] = getattr(
                 method_obj, "__ray_num_returns__", None)
             self._method_signatures[method_name] = inspect.Signature(
                 parameters=extract_signature(
                     method_obj,
                     ignore_first=(not (
                         is_class_method(method_obj) or is_static_method(
                             actor_class.actor_cls, method_name)))))
     else:
         self._method_num_returns = None
         self._method_signatures = None
Beispiel #11
0
    def _node_visitor(node: Any) -> Any:
        if isinstance(node, FunctionNode):
            bound_options = node._bound_options.copy()
            num_returns = bound_options.get("num_returns", 1)
            if num_returns is None:  # ray could use `None` as default value
                num_returns = 1
            if num_returns > 1:
                raise ValueError("Workflow steps can only have one return.")

            workflow_options = bound_options.pop("_metadata",
                                                 {}).get(WORKFLOW_OPTIONS, {})

            # If checkpoint option is not specified, inherit checkpoint
            # options from context (i.e. checkpoint options of the outer
            # step). If it is still not specified, it's True by default.
            checkpoint = workflow_options.get("checkpoint", None)
            if checkpoint is None:
                checkpoint = context.checkpoint if context is not None else True
            # When it returns a nested workflow, catch_exception
            # should be passed recursively.
            catch_exceptions = workflow_options.get("catch_exceptions", None)
            if catch_exceptions is None:
                # TODO(suquark): should we also handle exceptions from a "leaf node"
                #   in the continuation? For example, we have a workflow
                #   > @ray.remote
                #   > def A(): pass
                #   > @ray.remote
                #   > def B(x): return x
                #   > @ray.remote
                #   > def C(x): return workflow.continuation(B.bind(A.bind()))
                #   > dag = C.options(**workflow.options(catch_exceptions=True)).bind()
                #   Should C catches exceptions of A?
                if node.get_stable_uuid() == dag_node.get_stable_uuid():
                    # 'catch_exception' context should be passed down to
                    # its direct continuation task.
                    # In this case, the direct continuation is the output node.
                    catch_exceptions = (context.catch_exceptions
                                        if context is not None else False)
                else:
                    catch_exceptions = False

            max_retries = bound_options.get("max_retries", 3)
            if not isinstance(max_retries, int) or max_retries < -1:
                raise ValueError(
                    "'max_retries' only accepts 0, -1 or a positive integer.")

            step_options = WorkflowStepRuntimeOptions(
                step_type=StepType.FUNCTION,
                catch_exceptions=catch_exceptions,
                max_retries=max_retries,
                allow_inplace=False,
                checkpoint=checkpoint,
                ray_options=bound_options,
            )

            workflow_refs: List[WorkflowRef] = []
            with serialization_context.workflow_args_serialization_context(
                    workflow_refs):
                _func_signature = signature.extract_signature(node._body)
                flattened_args = signature.flatten_args(
                    _func_signature, node._bound_args, node._bound_kwargs)
                # NOTE: When calling 'ray.put', we trigger python object
                # serialization. Under our serialization context,
                # Workflows are separated from the arguments,
                # leaving a placeholder object with all other python objects.
                # Then we put the placeholder object to object store,
                # so it won't be mutated later. This guarantees correct
                # semantics. See "tests/test_variable_mutable.py" as
                # an example.
                input_placeholder: ray.ObjectRef = ray.put(flattened_args)

            name = workflow_options.get("name")
            if name is None:
                name = f"{get_module(node._body)}.{slugify(get_qualname(node._body))}"
            task_id = ray.get(mgr.gen_step_id.remote(workflow_id, name))
            state.add_dependencies(task_id, [s.task_id for s in workflow_refs])
            state.task_input_args[task_id] = input_placeholder

            user_metadata = workflow_options.pop("metadata", {})
            validate_user_metadata(user_metadata)
            state.tasks[task_id] = Task(
                name=name,
                options=step_options,
                user_metadata=user_metadata,
                func_body=node._body,
            )
            return WorkflowRef(task_id)

        if isinstance(node, InputAttributeNode):
            return node._execute_impl()  # get data from input node
        if isinstance(node, InputNode):
            return input_context  # replace input node with input data
        if not isinstance(node, DAGNode):
            return node  # return normal objects
        raise TypeError(f"Unsupported DAG node: {node}")
Beispiel #12
0
    def __init__(self, original_class: type):
        actor_methods = inspect.getmembers(original_class,
                                           is_function_or_method)

        self.cls = original_class
        self.module = original_class.__module__
        self.name = original_class.__name__
        self.qualname = original_class.__qualname__
        self.methods = dict(actor_methods)

        # Extract the signatures of each of the methods. This will be used
        # to catch some errors if the methods are called with inappropriate
        # arguments.
        self.signatures = {}
        for method_name, method in actor_methods:
            # Whether or not this method requires binding of its first
            # argument. For class and static methods, we do not want to bind
            # the first argument, but we do for instance methods
            method = inspect.unwrap(method)
            is_bound = (is_class_method(method)
                        or is_static_method(original_class, method_name))

            # Print a warning message if the method signature is not
            # supported. We don't raise an exception because if the actor
            # inherits from a class that has a method whose signature we
            # don't support, there may not be much the user can do about it.
            self.signatures[method_name] = signature.extract_signature(
                method, ignore_first=not is_bound)

        for method_name, method in actor_methods:

            def step(method_name, method, *args, **kwargs):
                readonly = getattr(method, "__virtual_actor_readonly__", False)
                flattened_args = self.flatten_args(method_name, args, kwargs)
                actor_id = workflow_context.get_current_workflow_id()
                if not readonly:
                    if method_name == "__init__":
                        state_ref = None
                    else:
                        ws = WorkflowStorage(actor_id, get_global_storage())
                        state_ref = WorkflowRef(ws.get_entrypoint_step_id())
                    # This is a hack to insert a positional argument.
                    flattened_args = [signature.DUMMY_TYPE, state_ref
                                      ] + flattened_args
                workflow_inputs = serialization_context.make_workflow_inputs(
                    flattened_args)

                if readonly:
                    _actor_method = _wrap_readonly_actor_method(
                        actor_id, self.cls, method_name)
                    step_type = StepType.READONLY_ACTOR_METHOD
                else:
                    _actor_method = _wrap_actor_method(self.cls, method_name)
                    step_type = StepType.ACTOR_METHOD
                # TODO(suquark): Support actor options.
                workflow_data = WorkflowData(
                    func_body=_actor_method,
                    step_type=step_type,
                    inputs=workflow_inputs,
                    max_retries=1,
                    catch_exceptions=False,
                    ray_options={},
                    name=None,
                    user_metadata=None,
                )
                wf = Workflow(workflow_data)
                return wf

            method.step = functools.partial(step, method_name, method)