Ejemplo n.º 1
0
def serve_instance(_shared_serve_instance):
    yield _shared_serve_instance
    # Clear all state between tests to avoid naming collisions.
    _shared_serve_instance.delete_deployments(serve.list_deployments().keys())
    # Clear the ServeHandle cache between tests to avoid them piling up.
    _shared_serve_instance.handle_cache.clear()
    # Clear deployment generation shared state between tests
    DeploymentNameGenerator.reset()
Ejemplo n.º 2
0
def _test_deployment_json_serde_helper(
    ray_dag: DAGNode, input=None, expected_num_deployments=None
):
    """Helper function for DeploymentNode and DeploymentMethodNode calls, checks
    the following:
        1) Transform ray dag to serve dag, and ensure serve dag is JSON
            serializable.
        2) Serve dag JSON and be deserialized back to serve dag.
        3) Deserialized serve dag can extract correct number and definition of
            serve deployments.
    """
    with DeploymentNameGenerator() as deployment_name_generator:
        serve_root_dag = ray_dag.apply_recursive(
            lambda node: transform_ray_dag_to_serve_dag(node, deployment_name_generator)
        )
    json_serialized = json.dumps(serve_root_dag, cls=DAGNodeEncoder)
    deserialized_serve_root_dag_node = json.loads(
        json_serialized, object_hook=dagnode_from_json
    )
    deserialized_deployments = extract_deployments_from_serve_dag(
        deserialized_serve_root_dag_node
    )
    assert len(deserialized_deployments) == expected_num_deployments
    # Deploy deserilized version to ensure JSON serde correctness
    for model in deserialized_deployments:
        model.deploy()
    if input is None:
        assert ray.get(ray_dag.execute()) == ray.get(serve_root_dag.execute())
    else:
        assert ray.get(ray_dag.execute(input)) == ray.get(serve_root_dag.execute(input))
    return serve_root_dag, deserialized_serve_root_dag_node
Ejemplo n.º 3
0
def test_multi_instantiation_class_nested_deployment_arg(serve_instance):
    """
    Test we can pass deployments with **nested** init_arg or init_kwarg,
    instantiated multiple times for the same class, and we can still correctly
    replace args with deployment handle and parse correct deployment instances.
    """
    ray_dag, _ = get_multi_instantiation_class_nested_deployment_arg_dag()

    with DeploymentNameGenerator() as deployment_name_generator:
        serve_root_dag = ray_dag.apply_recursive(
            lambda node: transform_ray_dag_to_serve_dag(
                node, deployment_name_generator))
    print(f"Serve DAG: \n{serve_root_dag}")
    deployments = extract_deployments_from_serve_dag(serve_root_dag)
    assert len(deployments) == 3
    # Ensure Deployments with other deployment nodes in init arg are replaced
    # with correct handle
    combine_deployment = deployments[2]
    init_arg_handle = combine_deployment.init_args[0]
    assert isinstance(init_arg_handle, RayServeLazySyncHandle)
    assert init_arg_handle.deployment_name == "Model"
    init_kwarg_handle = combine_deployment.init_kwargs["m2"][NESTED_HANDLE_KEY]
    assert isinstance(init_kwarg_handle, RayServeLazySyncHandle)
    assert init_kwarg_handle.deployment_name == "Model_1"

    for deployment in deployments:
        deployment.deploy()

    _validate_consistent_python_output(deployments[2],
                                       ray_dag,
                                       "Combine",
                                       input=1,
                                       output=5)
Ejemplo n.º 4
0
def test_func_class_with_class_method_dag(serve_instance):
    ray_dag, _ = get_func_class_with_class_method_dag()

    with DeploymentNameGenerator() as deployment_name_generator:
        serve_root_dag = ray_dag.apply_recursive(
            lambda node: transform_ray_dag_to_serve_dag(
                node, deployment_name_generator))
    deployments = extract_deployments_from_serve_dag(serve_root_dag)
    assert len(deployments) == 2
    for deployment in deployments:
        deployment.deploy()

    assert ray.get(ray_dag.execute(1, 2, 3)) == 8
    assert ray.get(serve_root_dag.execute(1, 2, 3)) == 8
Ejemplo n.º 5
0
def test_simple_single_class(serve_instance):
    ray_dag, _ = get_simple_class_with_class_method_dag()

    with DeploymentNameGenerator() as deployment_name_generator:
        serve_root_dag = ray_dag.apply_recursive(
            lambda node: transform_ray_dag_to_serve_dag(
                node, deployment_name_generator))
    deployments = extract_deployments_from_serve_dag(serve_root_dag)
    assert len(deployments) == 1
    deployments[0].deploy()
    _validate_consistent_python_output(deployments[0],
                                       ray_dag,
                                       "Model",
                                       input=1,
                                       output=0.6)
Ejemplo n.º 6
0
def test_get_pipeline_input_node():
    # 1) No InputNode found
    ray_dag = combine.bind(1, 2)
    with DeploymentNameGenerator() as deployment_name_generator:
        serve_dag = ray_dag.apply_recursive(
            lambda node: transform_ray_dag_to_serve_dag(
                node, deployment_name_generator))
    with pytest.raises(AssertionError,
                       match="There should be one and only one InputNode"):
        get_pipeline_input_node(serve_dag)

    # 2) More than one InputNode found
    with InputNode() as dag_input:
        a = combine.bind(dag_input[0], dag_input[1])
    with InputNode() as dag_input_2:
        b = combine.bind(dag_input_2[0], dag_input_2[1])
        ray_dag = combine.bind(a, b)
    with pytest.raises(AssertionError,
                       match="Each DAG should only have one unique InputNode"):
        with DeploymentNameGenerator() as deployment_name_generator:
            serve_dag = ray_dag.apply_recursive(
                lambda node: transform_ray_dag_to_serve_dag(
                    node, deployment_name_generator))
        get_pipeline_input_node(serve_dag)
Ejemplo n.º 7
0
def test_single_class_with_invalid_deployment_options(serve_instance):
    with InputNode() as dag_input:
        model = Model.options(name="my_deployment").bind(2, ratio=0.3)
        ray_dag = model.forward.bind(dag_input)

    with DeploymentNameGenerator() as deployment_name_generator:
        serve_root_dag = ray_dag.apply_recursive(
            lambda node: transform_ray_dag_to_serve_dag(
                node, deployment_name_generator))
    deployments = extract_deployments_from_serve_dag(serve_root_dag)
    assert len(deployments) == 1
    with pytest.raises(
            ValueError,
            match="Specifying name in ray_actor_options is not allowed"):
        deployments[0].deploy()
Ejemplo n.º 8
0
def test_build_simple_func_dag(serve_instance):
    ray_dag, _ = get_simple_func_dag()
    with DeploymentNameGenerator() as deployment_name_generator:
        serve_root_dag = ray_dag.apply_recursive(
            lambda node: transform_ray_dag_to_serve_dag(
                node, deployment_name_generator))

    serve_root_dag = ray_dag.apply_recursive(transform_ray_dag_to_serve_dag)
    deployments = extract_deployments_from_serve_dag(serve_root_dag)
    assert len(deployments) == 1
    deployments[0].deploy()

    deployment_handle = deployments[0].get_handle()
    # Because the bound kwarg is stored in dag, so it has to be explicitly passed in.
    assert ray.get(deployment_handle.remote(1, 2, kwargs_output=1)) == 4
    assert ray.get(ray_dag.execute([1, 2])) == 4
Ejemplo n.º 9
0
def test_single_class_with_valid_ray_options(serve_instance):
    with InputNode() as dag_input:
        model = Model.options(num_cpus=1, memory=1000).bind(2, ratio=0.3)
        ray_dag = model.forward.bind(dag_input)

    with DeploymentNameGenerator() as deployment_name_generator:
        serve_root_dag = ray_dag.apply_recursive(
            lambda node: transform_ray_dag_to_serve_dag(
                node, deployment_name_generator))
    deployments = extract_deployments_from_serve_dag(serve_root_dag)
    assert len(deployments) == 1
    deployments[0].deploy()
    _validate_consistent_python_output(deployments[0],
                                       ray_dag,
                                       deployments[0].name,
                                       input=1,
                                       output=0.6)

    deployment = serve.get_deployment(deployments[0].name)
    assert deployment.ray_actor_options.get("num_cpus") == 1
    assert deployment.ray_actor_options.get("memory") == 1000
    assert deployment.ray_actor_options.get("runtime_env") == {}
Ejemplo n.º 10
0
def test_shared_deployment_handle(serve_instance):
    """
    Test we can re-use the same deployment handle multiple times or in
    multiple places, without incorrectly parsing duplicated deployments.
    """
    ray_dag, _ = get_shared_deployment_handle_dag()

    with DeploymentNameGenerator() as deployment_name_generator:
        serve_root_dag = ray_dag.apply_recursive(
            lambda node: transform_ray_dag_to_serve_dag(
                node, deployment_name_generator))
    print(f"Serve DAG: \n{serve_root_dag}")
    deployments = extract_deployments_from_serve_dag(serve_root_dag)
    assert len(deployments) == 2
    for deployment in deployments:
        deployment.deploy()

    _validate_consistent_python_output(deployments[1],
                                       ray_dag,
                                       "Combine",
                                       input=1,
                                       output=4)
Ejemplo n.º 11
0
def test_multi_instantiation_class_deployment_in_init_args(serve_instance):
    """
    Test we can pass deployments as init_arg or init_kwarg, instantiated
    multiple times for the same class, and we can still correctly replace
    args with deployment handle and parse correct deployment instances.
    """
    ray_dag, _ = get_multi_instantiation_class_deployment_in_init_args_dag()

    with DeploymentNameGenerator() as deployment_name_generator:
        serve_root_dag = ray_dag.apply_recursive(
            lambda node: transform_ray_dag_to_serve_dag(
                node, deployment_name_generator))
    print(f"Serve DAG: \n{serve_root_dag}")
    deployments = extract_deployments_from_serve_dag(serve_root_dag)
    assert len(deployments) == 3
    for deployment in deployments:
        deployment.deploy()

    _validate_consistent_python_output(deployments[2],
                                       ray_dag,
                                       "Combine",
                                       input=1,
                                       output=5)
Ejemplo n.º 12
0
def build(ray_dag_root_node: DAGNode) -> List[Deployment]:
    """Do all the DAG transformation, extraction and generation needed to
    produce a runnable and deployable serve pipeline application from a valid
    DAG authored with Ray DAG API.

    This should be the only user facing API that user interacts with.

    Assumptions:
        Following enforcements are only applied at generating and applying
        pipeline artifact, but not blockers for local development and testing.

        - ALL args and kwargs used in DAG building should be JSON serializable.
            This means in order to ensure your pipeline application can run on
            a remote cluster potentially with different runtime environment,
            among all options listed:

                1) binding in-memory objects
                2) Rely on pickling
                3) Enforce JSON serialibility on all args used

            We believe both 1) & 2) rely on unstable in-memory objects or
            cross version pickling / closure capture, where JSON serialization
            provides the right contract needed for proper deployment.

        - ALL classes and methods used should be visible on top of the file and
            importable via a fully qualified name. Thus no inline class or
            function definitions should be used.

    Args:
        ray_dag_root_node: DAGNode acting as root of a Ray authored DAG. It
            should be executable via `ray_dag_root_node.execute(user_input)`
            and should have `InputNode` in it.

    Returns:
        deployments: All deployments needed for an e2e runnable serve pipeline,
            accessible via python .remote() call.

    Examples:
        >>> with InputNode() as dag_input:
        ...    m1 = Model.bind(1)
        ...    m2 = Model.bind(2)
        ...    m1_output = m1.forward.bind(dag_input[0])
        ...    m2_output = m2.forward.bind(dag_input[1])
        ...    ray_dag = ensemble.bind(m1_output, m2_output)

        Assuming we have non-JSON serializable or inline defined class or
        function in local pipeline development.

        >>> from ray.serve.api import build as build_app
        >>> deployments = build_app(ray_dag) # it can be method node
        >>> deployments = build_app(m1) # or just a regular node.
    """
    with DeploymentNameGenerator() as deployment_name_generator:
        serve_root_dag = ray_dag_root_node.apply_recursive(
            lambda node: transform_ray_dag_to_serve_dag(
                node, deployment_name_generator))
    deployments = extract_deployments_from_serve_dag(serve_root_dag)
    deployments_with_http = process_ingress_deployment_in_serve_dag(
        deployments)

    return deployments_with_http