def _test_deployment_json_serde_helper(ray_dag: DAGNode, input=None, expected_num_deployments=None): """Helper function for DeploymentNode and DeploymentMethodNode calls, checks the following: 1) Transform ray dag to serve dag, and ensure serve dag is JSON serializable. 2) Serve dag JSON and be deserialized back to serve dag. 3) Deserialized serve dag can extract correct number and definition of serve deployments. """ serve_root_dag = ray_dag.apply_recursive(transform_ray_dag_to_serve_dag) json_serialized = json.dumps(serve_root_dag, cls=DAGNodeEncoder) deserialized_serve_root_dag_node = json.loads( json_serialized, object_hook=dagnode_from_json) deserialized_deployments = extract_deployments_from_serve_dag( deserialized_serve_root_dag_node) assert len(deserialized_deployments) == expected_num_deployments # Deploy deserilized version to ensure JSON serde correctness for model in deserialized_deployments: model.deploy() if input is None: assert ray.get(ray_dag.execute()) == ray.get(serve_root_dag.execute()) else: assert ray.get(ray_dag.execute(input)) == ray.get( serve_root_dag.execute(input)) return serve_root_dag, deserialized_serve_root_dag_node
def test_multi_instantiation_class_deployment_in_init_args(serve_instance): """ Test we can pass deployments as init_arg or init_kwarg, instantiated multiple times for the same class, and we can still correctly replace args with deployment handle and parse correct deployment instances. """ ray_dag, dag_input = get_multi_instantiation_class_deployment_in_init_args_dag( ) serve_root_dag = ray_dag.apply_recursive(transform_ray_dag_to_serve_dag) print(f"Serve DAG: \n{serve_root_dag}") deployments = extract_deployments_from_serve_dag(serve_root_dag) assert len(deployments) == 3 for deployment in deployments: deployment.deploy() ingress_deployment = get_ingress_deployment(serve_root_dag, dag_input) ingress_deployment.deploy() _validate_consistent_python_output(deployments[2], ray_dag, "Combine", input=1, output=5) for _ in range(5): resp = requests.get("http://127.0.0.1:8000/ingress", data="1") assert resp.text == "5"
def test_shared_deployment_handle(serve_instance): """ Test we can re-use the same deployment handle multiple times or in multiple places, without incorrectly parsing duplicated deployments. """ ray_dag, dag_input = get_shared_deployment_handle_dag() serve_root_dag = ray_dag.apply_recursive(transform_ray_dag_to_serve_dag) print(f"Serve DAG: \n{serve_root_dag}") deployments = extract_deployments_from_serve_dag(serve_root_dag) assert len(deployments) == 2 for deployment in deployments: deployment.deploy() ingress_deployment = get_ingress_deployment(serve_root_dag, dag_input) ingress_deployment.deploy() _validate_consistent_python_output(deployments[1], ray_dag, "Combine", input=1, output=4) for _ in range(5): resp = requests.get("http://127.0.0.1:8000/ingress", data="1") assert resp.text == "4"
def test_multi_instantiation_class_nested_deployment_arg(serve_instance): """ Test we can pass deployments with **nested** init_arg or init_kwarg, instantiated multiple times for the same class, and we can still correctly replace args with deployment handle and parse correct deployment instances. """ m1 = Model._bind(2) m2 = Model._bind(3) combine = Combine._bind(m1, m2={NESTED_HANDLE_KEY: m2}, m2_nested=True) ray_dag = combine.__call__._bind(InputNode()) print(f"Ray DAG: \n{ray_dag}") serve_root_dag = ray_dag._apply_recursive(transform_ray_dag_to_serve_dag) print(f"Serve DAG: \n{serve_root_dag}") deployments = extract_deployments_from_serve_dag(serve_root_dag) assert len(deployments) == 3 # Ensure Deployments with other deployment nodes in init arg are replaced # with correct handle combine_deployment = deployments[2] init_arg_handle = combine_deployment.init_args[0] assert isinstance(init_arg_handle, RayServeSyncHandle) assert init_arg_handle.deployment_name == "Model" init_kwarg_handle = combine_deployment.init_kwargs["m2"][NESTED_HANDLE_KEY] assert isinstance(init_kwarg_handle, RayServeSyncHandle) assert init_kwarg_handle.deployment_name == "Model_1" for deployment in deployments: deployment.deploy() _validate_consistent_output(deployments[2], ray_dag, "Combine", input=1, output=5)
def test_multi_instantiation_class_nested_deployment_arg(serve_instance): """ Test we can pass deployments with **nested** init_arg or init_kwarg, instantiated multiple times for the same class, and we can still correctly replace args with deployment handle and parse correct deployment instances. """ ray_dag, _ = get_multi_instantiation_class_nested_deployment_arg_dag() with DAGNodeNameGenerator() as node_name_generator: serve_root_dag = ray_dag.apply_recursive( lambda node: transform_ray_dag_to_serve_dag( node, node_name_generator)) print(f"Serve DAG: \n{serve_root_dag}") deployments = extract_deployments_from_serve_dag(serve_root_dag) assert len(deployments) == 3 # Ensure Deployments with other deployment nodes in init arg are replaced # with correct handle combine_deployment = deployments[2] init_arg_handle = combine_deployment.init_args[0] assert isinstance(init_arg_handle, RayServeLazySyncHandle) assert init_arg_handle.deployment_name == "Model" init_kwarg_handle = combine_deployment.init_kwargs["m2"][NESTED_HANDLE_KEY] assert isinstance(init_kwarg_handle, RayServeLazySyncHandle) assert init_kwarg_handle.deployment_name == "Model_1" for deployment in deployments: deployment.deploy() _validate_consistent_python_output(deployments[2], ray_dag, "Combine", input=1, output=5)
def build(ray_dag_root_node: DAGNode) -> List[Deployment]: """Do all the DAG transformation, extraction and generation needed to produce a runnable and deployable serve pipeline application from a valid DAG authored with Ray DAG API. This should be the only user facing API that user interacts with. Assumptions: Following enforcements are only applied at generating and applying pipeline artifact, but not blockers for local development and testing. - ALL args and kwargs used in DAG building should be JSON serializable. This means in order to ensure your pipeline application can run on a remote cluster potentially with different runtime environment, among all options listed: 1) binding in-memory objects 2) Rely on pickling 3) Enforce JSON serialibility on all args used We believe both 1) & 2) rely on unstable in-memory objects or cross version pickling / closure capture, where JSON serialization provides the right contract needed for proper deployment. - ALL classes and methods used should be visible on top of the file and importable via a fully qualified name. Thus no inline class or function definitions should be used. Args: ray_dag_root_node: DAGNode acting as root of a Ray authored DAG. It should be executable via `ray_dag_root_node.execute(user_input)` and should have `InputNode` in it. Returns: deployments: All deployments needed for an e2e runnable serve pipeline, accessible via python .remote() call. Examples: >>> with InputNode() as dag_input: ... m1 = Model.bind(1) ... m2 = Model.bind(2) ... m1_output = m1.forward.bind(dag_input[0]) ... m2_output = m2.forward.bind(dag_input[1]) ... ray_dag = ensemble.bind(m1_output, m2_output) Assuming we have non-JSON serializable or inline defined class or function in local pipeline development. >>> from ray.serve.api import build as build_app >>> deployments = build_app(ray_dag) # it can be method node >>> deployments = build_app(m1) # or just a regular node. """ serve_root_dag = ray_dag_root_node.apply_recursive( transform_ray_dag_to_serve_dag) deployments = extract_deployments_from_serve_dag(serve_root_dag) deployments_with_http = process_ingress_deployment_in_serve_dag( deployments) return deployments_with_http
def test_build_simple_func_dag(serve_instance): ray_dag, _ = get_simple_func_dag() serve_root_dag = ray_dag.apply_recursive(transform_ray_dag_to_serve_dag) deployments = extract_deployments_from_serve_dag(serve_root_dag) assert len(deployments) == 1 deployments[0].deploy() deployment_handle = deployments[0].get_handle() # Because the bound kwarg is stored in dag, so it has to be explicitly passed in. assert ray.get(deployment_handle.remote(1, 2, kwargs_output=1)) == 4 assert ray.get(ray_dag.execute([1, 2])) == 4
def test_func_class_with_class_method_dag(serve_instance): ray_dag, _ = get_func_class_with_class_method_dag() serve_root_dag = ray_dag.apply_recursive(transform_ray_dag_to_serve_dag) deployments = extract_deployments_from_serve_dag(serve_root_dag) assert len(deployments) == 2 for deployment in deployments: deployment.deploy() assert ray.get(ray_dag.execute(1, 2, 3)) == 8 assert ray.get(serve_root_dag.execute(1, 2, 3)) == 8
def test_single_class_with_invalid_deployment_options(serve_instance): model = Model.options(name="my_deployment")._bind(2, ratio=0.3) ray_dag = model.forward._bind(InputNode()) serve_root_dag = ray_dag._apply_recursive(transform_ray_dag_to_serve_dag) deployments = extract_deployments_from_serve_dag(serve_root_dag) assert len(deployments) == 1 with pytest.raises( ValueError, match="Specifying name in ray_actor_options is not allowed"): deployments[0].deploy()
def test_simple_single_class(serve_instance): ray_dag, _ = get_simple_class_with_class_method_dag() serve_root_dag = ray_dag.apply_recursive(transform_ray_dag_to_serve_dag) deployments = extract_deployments_from_serve_dag(serve_root_dag) assert len(deployments) == 1 deployments[0].deploy() _validate_consistent_python_output(deployments[0], ray_dag, "Model", input=1, output=0.6)
def test_func_class_with_class_method_dag(serve_instance): ray_dag, _ = get_func_class_with_class_method_dag() with DAGNodeNameGenerator() as node_name_generator: serve_root_dag = ray_dag.apply_recursive( lambda node: transform_ray_dag_to_serve_dag( node, node_name_generator)) deployments = extract_deployments_from_serve_dag(serve_root_dag) assert len(deployments) == 2 for deployment in deployments: deployment.deploy() assert ray.get(ray_dag.execute(1, 2, 3)) == 8 assert ray.get(serve_root_dag.execute(1, 2, 3)) == 8
def test_simple_single_class(serve_instance): # Assert converting both arg and kwarg model = Model._bind(2, ratio=0.3) ray_dag = model.forward._bind(InputNode()) serve_root_dag = ray_dag._apply_recursive(transform_ray_dag_to_serve_dag) deployments = extract_deployments_from_serve_dag(serve_root_dag) assert len(deployments) == 1 deployments[0].deploy() _validate_consistent_output(deployments[0], ray_dag, "Model", input=1, output=0.6)
def test_simple_single_class(serve_instance): ray_dag, _ = get_simple_class_with_class_method_dag() with DAGNodeNameGenerator() as node_name_generator: serve_root_dag = ray_dag.apply_recursive( lambda node: transform_ray_dag_to_serve_dag( node, node_name_generator)) deployments = extract_deployments_from_serve_dag(serve_root_dag) assert len(deployments) == 1 deployments[0].deploy() _validate_consistent_python_output(deployments[0], ray_dag, "Model", input=1, output=0.6)
def test_single_class_with_invalid_deployment_options(serve_instance): with InputNode() as dag_input: model = Model.options(name="my_deployment").bind(2, ratio=0.3) ray_dag = model.forward.bind(dag_input) with DAGNodeNameGenerator() as node_name_generator: serve_root_dag = ray_dag.apply_recursive( lambda node: transform_ray_dag_to_serve_dag( node, node_name_generator)) deployments = extract_deployments_from_serve_dag(serve_root_dag) assert len(deployments) == 1 with pytest.raises( ValueError, match="Specifying 'name' in ray_actor_options is not allowed"): deployments[0].deploy()
def test_func_class_with_class_method_dag(serve_instance): ray_dag, dag_input = get_func_class_with_class_method_dag() serve_root_dag = ray_dag.apply_recursive(transform_ray_dag_to_serve_dag) deployments = extract_deployments_from_serve_dag(serve_root_dag) ingress_deployment = get_ingress_deployment(serve_root_dag, dag_input) assert len(deployments) == 2 for deployment in deployments: deployment.deploy() ingress_deployment.deploy() assert ray.get(ray_dag.execute(1, 2, 3)) == 8 assert ray.get(serve_root_dag.execute(1, 2, 3)) == 8 for _ in range(5): resp = requests.get("http://127.0.0.1:8000/ingress", data=json.dumps([1, 2, 3])) assert resp.text == "8"
def test_single_class_with_valid_ray_options(serve_instance): model = Model.options(num_cpus=1, memory=1000)._bind(2, ratio=0.3) ray_dag = model.forward._bind(InputNode()) serve_root_dag = ray_dag._apply_recursive(transform_ray_dag_to_serve_dag) deployments = extract_deployments_from_serve_dag(serve_root_dag) assert len(deployments) == 1 deployments[0].deploy() _validate_consistent_output(deployments[0], ray_dag, deployments[0].name, input=1, output=0.6) deployment = serve.get_deployment(deployments[0].name) assert deployment.ray_actor_options.get("num_cpus") == 1 assert deployment.ray_actor_options.get("memory") == 1000 assert deployment.ray_actor_options.get("runtime_env") == {}
def test_simple_single_class(serve_instance): ray_dag, dag_input = get_simple_class_with_class_method_dag() serve_root_dag = ray_dag.apply_recursive(transform_ray_dag_to_serve_dag) deployments = extract_deployments_from_serve_dag(serve_root_dag) ingress_deployment = get_ingress_deployment(serve_root_dag, dag_input) assert len(deployments) == 1 deployments[0].deploy() ingress_deployment.deploy() _validate_consistent_python_output(deployments[0], ray_dag, "Model", input=1, output=0.6) for _ in range(5): resp = requests.get("http://127.0.0.1:8000/ingress", data="1") assert resp.text == "0.6"
def test_shared_deployment_handle(serve_instance): """ Test we can re-use the same deployment handle multiple times or in multiple places, without incorrectly parsing duplicated deployments. """ ray_dag, _ = get_shared_deployment_handle_dag() serve_root_dag = ray_dag.apply_recursive(transform_ray_dag_to_serve_dag) print(f"Serve DAG: \n{serve_root_dag}") deployments = extract_deployments_from_serve_dag(serve_root_dag) assert len(deployments) == 2 for deployment in deployments: deployment.deploy() _validate_consistent_python_output(deployments[1], ray_dag, "Combine", input=1, output=4)
def test_multi_instantiation_class_deployment_in_init_args(serve_instance): """ Test we can pass deployments as init_arg or init_kwarg, instantiated multiple times for the same class, and we can still correctly replace args with deployment handle and parse correct deployment instances. """ ray_dag, _ = get_multi_instantiation_class_deployment_in_init_args_dag() serve_root_dag = ray_dag.apply_recursive(transform_ray_dag_to_serve_dag) print(f"Serve DAG: \n{serve_root_dag}") deployments = extract_deployments_from_serve_dag(serve_root_dag) assert len(deployments) == 3 for deployment in deployments: deployment.deploy() _validate_consistent_python_output(deployments[2], ray_dag, "Combine", input=1, output=5)
def test_shared_deployment_handle(serve_instance): """ Test we can re-use the same deployment handle multiple times or in multiple places, without incorrectly parsing duplicated deployments. """ m = Model._bind(2) combine = Combine._bind(m, m2=m) ray_dag = combine.__call__._bind(InputNode()) print(f"Ray DAG: \n{ray_dag}") serve_root_dag = ray_dag._apply_recursive(transform_ray_dag_to_serve_dag) print(f"Serve DAG: \n{serve_root_dag}") deployments = extract_deployments_from_serve_dag(serve_root_dag) assert len(deployments) == 2 for deployment in deployments: deployment.deploy() _validate_consistent_output(deployments[1], ray_dag, "Combine", input=1, output=4)
def test_single_class_with_valid_ray_options(serve_instance): with InputNode() as dag_input: model = Model.options(num_cpus=1, memory=1000).bind(2, ratio=0.3) ray_dag = model.forward.bind(dag_input) with DAGNodeNameGenerator() as node_name_generator: serve_root_dag = ray_dag.apply_recursive( lambda node: transform_ray_dag_to_serve_dag( node, node_name_generator)) deployments = extract_deployments_from_serve_dag(serve_root_dag) assert len(deployments) == 1 deployments[0].deploy() _validate_consistent_python_output(deployments[0], ray_dag, deployments[0].name, input=1, output=0.6) deployment = serve.get_deployment(deployments[0].name) assert deployment.ray_actor_options.get("num_cpus") == 1 assert deployment.ray_actor_options.get("memory") == 1000 assert deployment.ray_actor_options.get("runtime_env") == {}
def test_multi_instantiation_class_nested_deployment_arg(serve_instance): """ Test we can pass deployments with **nested** init_arg or init_kwarg, instantiated multiple times for the same class, and we can still correctly replace args with deployment handle and parse correct deployment instances. """ ray_dag, dag_input = get_multi_instantiation_class_nested_deployment_arg_dag( ) serve_root_dag = ray_dag.apply_recursive(transform_ray_dag_to_serve_dag) print(f"Serve DAG: \n{serve_root_dag}") deployments = extract_deployments_from_serve_dag(serve_root_dag) assert len(deployments) == 3 # Ensure Deployments with other deployment nodes in init arg are replaced # with correct handle combine_deployment = deployments[2] init_arg_handle = combine_deployment.init_args[0] assert isinstance(init_arg_handle, RayServeSyncHandle) assert init_arg_handle.deployment_name == "Model" init_kwarg_handle = combine_deployment.init_kwargs["m2"][NESTED_HANDLE_KEY] assert isinstance(init_kwarg_handle, RayServeSyncHandle) assert init_kwarg_handle.deployment_name == "Model_1" for deployment in deployments: deployment.deploy() ingress_deployment = get_ingress_deployment(serve_root_dag, dag_input) ingress_deployment.deploy() _validate_consistent_python_output(deployments[2], ray_dag, "Combine", input=1, output=5) for _ in range(5): resp = requests.get("http://127.0.0.1:8000/ingress", data="1") assert resp.text == "5"
def test_multi_instantiation_class_deployment_in_init_args(serve_instance): """ Test we can pass deployments as init_arg or init_kwarg, instantiated multiple times for the same class, and we can still correctly replace args with deployment handle and parse correct deployment instances. """ m1 = Model._bind(2) m2 = Model._bind(3) combine = Combine._bind(m1, m2=m2) ray_dag = combine.__call__._bind(InputNode()) print(f"Ray DAG: \n{ray_dag}") serve_root_dag = ray_dag._apply_recursive(transform_ray_dag_to_serve_dag) print(f"Serve DAG: \n{serve_root_dag}") deployments = extract_deployments_from_serve_dag(serve_root_dag) assert len(deployments) == 3 for deployment in deployments: deployment.deploy() _validate_consistent_output(deployments[2], ray_dag, "Combine", input=1, output=5)
def build(ray_dag_root_node: DAGNode): """Do all the DAG transformation, extraction and generation needed to produce a runnable and deployable serve pipeline application from a valid DAG authored with Ray DAG API. This should be the only user facing API that user interacts with. Assumptions: Following enforcements are only applied at generating and applying pipeline artifact, but not blockers for local development and testing. - ALL args and kwargs used in DAG building should be JSON serializable. This means in order to ensure your pipeline application can run on a remote cluster potentially with different runtime environment, among all options listed: 1) binding in-memory objects 2) Rely on pickling 3) Enforce JSON serialibility on all args used We believe both 1) & 2) rely on unstable in-memory objects or cross version pickling / closure capture, where JSON serialization provides the right contract needed for proper deployment. - ALL classes and methods used should be visible on top of the file and importable via a fully qualified name. Thus no inline class or function definitions should be used. Args: ray_dag_root_node: DAGNode acting as root of a Ray authored DAG. It should be executable via `ray_dag_root_node.execute(user_input)` and should have `PipelineInputNode` in it. Returns: app: The Ray Serve application object that wraps all deployments needed along with ingress deployment for an e2e runnable serve pipeline, accessible via python .remote() call and HTTP. Examples: >>> with ServeInputNode(preprocessor=request_to_data_int) as dag_input: ... m1 = Model.bind(1) ... m2 = Model.bind(2) ... m1_output = m1.forward.bind(dag_input[0]) ... m2_output = m2.forward.bind(dag_input[1]) ... ray_dag = ensemble.bind(m1_output, m2_output) Assuming we have non-JSON serializable or inline defined class or function in local pipeline development. >>> app = serve.pipeline.build(ray_dag) # This works >>> handle = app.deploy() >>> # This also works, we're simply executing the transformed serve_dag. >>> ray.get(handle.remote(data) >>> # This will fail where enforcements are applied. >>> deployment_yaml = app.to_yaml() """ serve_root_dag = ray_dag_root_node.apply_recursive(transform_ray_dag_to_serve_dag) deployments = extract_deployments_from_serve_dag(serve_root_dag) pipeline_input_node = get_pipeline_input_node(serve_root_dag) ingress_deployment = get_ingress_deployment(serve_root_dag, pipeline_input_node) deployments.insert(0, ingress_deployment) # TODO (jiaodong): Call into Application once Shreyas' PR is merged # TODO (jiaodong): Apply enforcements at serve app to_yaml level return deployments