コード例 #1
0
def extract_deployments_from_serve_dag(
    serve_dag_root: DAGNode,
) -> List[Deployment]:
    """Extract deployment python objects from a transformed serve DAG. Should
    only be called after `transform_ray_dag_to_serve_dag`, otherwise nothing
    to return.

    Args:
        serve_dag_root (DAGNode): Transformed serve dag root node.
    Returns:
        deployments (List[Deployment]): List of deployment python objects
            fetched from serve dag.
    """
    deployments = OrderedDict()

    def extractor(dag_node):
        if isinstance(dag_node, (DeploymentNode, DeploymentFunctionNode)):
            deployment = dag_node._deployment
            # In case same deployment is used in multiple DAGNodes
            deployments[deployment.name] = deployment
        return dag_node

    serve_dag_root.apply_recursive(extractor)

    return list(deployments.values())
コード例 #2
0
def get_pipeline_input_node(serve_dag_root_node: DAGNode):
    """Return the InputNode singleton node from serve dag, and throw
    exceptions if we didn't find any, or found more than one.

    Args:
        ray_dag_root_node: DAGNode acting as root of a Ray authored DAG. It
            should be executable via `ray_dag_root_node.execute(user_input)`
            and should have `InputNode` in it.
    Returns
        pipeline_input_node: Singleton input node for the serve pipeline.
    """

    input_nodes = []

    def extractor(dag_node):
        if isinstance(dag_node, InputNode):
            input_nodes.append(dag_node)

    serve_dag_root_node.apply_recursive(extractor)
    assert len(input_nodes) == 1, (
        "There should be one and only one InputNode in the DAG. "
        f"Found {len(input_nodes)} InputNode(s) instead."
    )

    return input_nodes[0]
コード例 #3
0
def get_pipeline_input_node(serve_dag_root_node: DAGNode):
    """Return the PipelineInputNode singleton node from serve dag, and throw
    exceptions if we didn't find any, or found more than one.

    Args:
        ray_dag_root_node: DAGNode acting as root of a Ray authored DAG. It
            should be executable via `ray_dag_root_node.execute(user_input)`
            and should have `PipelineInputNode` in it.
    Returns
        pipeline_input_node: Singleton input node for the serve pipeline.
    """

    input_nodes = []

    def extractor(dag_node):
        if isinstance(dag_node, PipelineInputNode):
            input_nodes.append(dag_node)
        elif isinstance(dag_node, InputNode):
            raise ValueError(
                "Please change Ray DAG InputNode to PipelineInputNode in order "
                "to build serve application. See docstring of "
                "PipelineInputNode for examples."
            )

    serve_dag_root_node.apply_recursive(extractor)
    assert len(input_nodes) == 1, (
        "There should be one and only one PipelineInputNode in the DAG. "
        f"Found {len(input_nodes)} PipelineInputNode(s) instead."
    )

    return input_nodes[0]
コード例 #4
0
def transform_ray_dag_to_workflow(dag_node: DAGNode,
                                  input_context: DAGInputData):
    """
    Transform a Ray DAG to a workflow. Map FunctionNode to workflow step with
    the workflow decorator.

    Args:
        dag_node: The DAG to be converted to a workflow.
        input_context: The input data that wraps varibles for the input node of the DAG.
    """
    if not isinstance(dag_node, FunctionNode):
        raise TypeError(
            "Currently workflow does not support classes as DAG inputs.")

    def _node_visitor(node: Any) -> Any:
        if isinstance(node, FunctionNode):
            # "_resolve_like_object_ref_in_args" indicates we should resolve the
            # workflow like an ObjectRef, when included in the arguments of
            # another workflow.
            workflow_step = workflow.step(
                node._body).options(**node._bound_options,
                                    _resolve_like_object_ref_in_args=True)
            wf = workflow_step.step(*node._bound_args, **node._bound_kwargs)
            return wf
        if isinstance(node, InputAtrributeNode):
            return node._execute_impl()  # get data from input node
        if isinstance(node, InputNode):
            return input_context  # replace input node with input data
        if not isinstance(node, DAGNode):
            return node  # return normal objects
        raise TypeError(f"Unsupported DAG node: {node}")

    return dag_node.apply_recursive(_node_visitor)
コード例 #5
0
ファイル: api.py プロジェクト: afzalmushtaque/ray
def build(ray_dag_root_node: DAGNode):
    """Do all the DAG transformation, extraction and generation needed to
    produce a runnable and deployable serve pipeline application from a valid
    DAG authored with Ray DAG API.

    This should be the only user facing API that user interacts with.

    Assumptions:
        Following enforcements are only applied at generating and applying
        pipeline artifact, but not blockers for local development and testing.

        - ALL args and kwargs used in DAG building should be JSON serializable.
            This means in order to ensure your pipeline application can run on
            a remote cluster potentially with different runtime environment,
            among all options listed:

                1) binding in-memory objects
                2) Rely on pickling
                3) Enforce JSON serialibility on all args used

            We believe both 1) & 2) rely on unstable in-memory objects or
            cross version pickling / closure capture, where JSON serialization
            provides the right contract needed for proper deployment.

        - ALL classes and methods used should be visible on top of the file and
            importable via a fully qualified name. Thus no inline class or
            function definitions should be used.

    Args:
        ray_dag_root_node: DAGNode acting as root of a Ray authored DAG. It
            should be executable via `ray_dag_root_node.execute(user_input)`
            and should have `PipelineInputNode` in it.

    Returns:
        app: The Ray Serve application object that wraps all deployments needed
            along with ingress deployment for an e2e runnable serve pipeline,
            accessible via python .remote() call and HTTP.

    Examples:
        >>> with ServeInputNode(preprocessor=request_to_data_int) as dag_input:
        ...    m1 = Model.bind(1)
        ...    m2 = Model.bind(2)
        ...    m1_output = m1.forward.bind(dag_input[0])
        ...    m2_output = m2.forward.bind(dag_input[1])
        ...    ray_dag = ensemble.bind(m1_output, m2_output)

        Assuming we have non-JSON serializable or inline defined class or
        function in local pipeline development.

        >>> app = serve.pipeline.build(ray_dag) # This works
        >>> handle = app.deploy()
        >>> # This also works, we're simply executing the transformed serve_dag.
        >>> ray.get(handle.remote(data)
        >>> # This will fail where enforcements are applied.
        >>> deployment_yaml = app.to_yaml()
    """
    serve_root_dag = ray_dag_root_node.apply_recursive(transform_ray_dag_to_serve_dag)
    deployments = extract_deployments_from_serve_dag(serve_root_dag)
    pipeline_input_node = get_pipeline_input_node(serve_root_dag)
    ingress_deployment = get_ingress_deployment(serve_root_dag, pipeline_input_node)
    deployments.insert(0, ingress_deployment)

    # TODO (jiaodong): Call into Application once Shreyas' PR is merged
    # TODO (jiaodong): Apply enforcements at serve app to_yaml level
    return deployments
コード例 #6
0
def ray_dag_to_serve_dag(dag: DAGNode):
    with DAGNodeNameGenerator() as deployment_name_generator:
        serve_dag = dag.apply_recursive(
            lambda node: transform_ray_dag_to_serve_dag(
                node, deployment_name_generator))
    return serve_dag
コード例 #7
0
def build(ray_dag_root_node: DAGNode) -> List[Deployment]:
    """Do all the DAG transformation, extraction and generation needed to
    produce a runnable and deployable serve pipeline application from a valid
    DAG authored with Ray DAG API.

    This should be the only user facing API that user interacts with.

    Assumptions:
        Following enforcements are only applied at generating and applying
        pipeline artifact, but not blockers for local development and testing.

        - ALL args and kwargs used in DAG building should be JSON serializable.
            This means in order to ensure your pipeline application can run on
            a remote cluster potentially with different runtime environment,
            among all options listed:

                1) binding in-memory objects
                2) Rely on pickling
                3) Enforce JSON serialibility on all args used

            We believe both 1) & 2) rely on unstable in-memory objects or
            cross version pickling / closure capture, where JSON serialization
            provides the right contract needed for proper deployment.

        - ALL classes and methods used should be visible on top of the file and
            importable via a fully qualified name. Thus no inline class or
            function definitions should be used.

    Args:
        ray_dag_root_node: DAGNode acting as root of a Ray authored DAG. It
            should be executable via `ray_dag_root_node.execute(user_input)`
            and should have `InputNode` in it.

    Returns:
        deployments: All deployments needed for an e2e runnable serve pipeline,
            accessible via python .remote() call.

    Examples:
        >>> with InputNode() as dag_input:
        ...    m1 = Model.bind(1)
        ...    m2 = Model.bind(2)
        ...    m1_output = m1.forward.bind(dag_input[0])
        ...    m2_output = m2.forward.bind(dag_input[1])
        ...    ray_dag = ensemble.bind(m1_output, m2_output)

        Assuming we have non-JSON serializable or inline defined class or
        function in local pipeline development.

        >>> from ray.serve.api import build as build_app
        >>> deployments = build_app(ray_dag) # it can be method node
        >>> deployments = build_app(m1) # or just a regular node.
    """
    with DAGNodeNameGenerator() as node_name_generator:
        serve_root_dag = ray_dag_root_node.apply_recursive(
            lambda node: transform_ray_dag_to_serve_dag(node, node_name_generator)
        )
    deployments = extract_deployments_from_serve_dag(serve_root_dag)

    # After Ray DAG is transformed to Serve DAG with deployments and their init
    # args filled, generate a minimal weight executor serve dag for perf
    serve_executor_root_dag = serve_root_dag.apply_recursive(
        transform_serve_dag_to_serve_executor_dag
    )
    root_driver_deployment = deployments[-1]
    new_driver_deployment = generate_executor_dag_driver_deployment(
        serve_executor_root_dag, root_driver_deployment
    )
    # Replace DAGDriver deployment with executor DAGDriver deployment
    deployments[-1] = new_driver_deployment
    # Validate and only expose HTTP for the endpoint
    deployments_with_http = process_ingress_deployment_in_serve_dag(deployments)

    return deployments_with_http