예제 #1
0
def test_multi_instantiation_class_nested_deployment_arg(serve_instance):
    """
    Test we can pass deployments with **nested** init_arg or init_kwarg,
    instantiated multiple times for the same class, and we can still correctly
    replace args with deployment handle and parse correct deployment instances.
    """
    m1 = Model._bind(2)
    m2 = Model._bind(3)
    combine = Combine._bind(m1, m2={NESTED_HANDLE_KEY: m2}, m2_nested=True)
    ray_dag = combine.__call__._bind(InputNode())
    print(f"Ray DAG: \n{ray_dag}")

    serve_root_dag = ray_dag._apply_recursive(transform_ray_dag_to_serve_dag)
    print(f"Serve DAG: \n{serve_root_dag}")
    deployments = extract_deployments_from_serve_dag(serve_root_dag)
    assert len(deployments) == 3
    # Ensure Deployments with other deployment nodes in init arg are replaced
    # with correct handle
    combine_deployment = deployments[2]
    init_arg_handle = combine_deployment.init_args[0]
    assert isinstance(init_arg_handle, RayServeSyncHandle)
    assert init_arg_handle.deployment_name == "Model"
    init_kwarg_handle = combine_deployment.init_kwargs["m2"][NESTED_HANDLE_KEY]
    assert isinstance(init_kwarg_handle, RayServeSyncHandle)
    assert init_kwarg_handle.deployment_name == "Model_1"

    for deployment in deployments:
        deployment.deploy()

    _validate_consistent_output(deployments[2],
                                ray_dag,
                                "Combine",
                                input=1,
                                output=5)
def test_wide_fanout_deployment_graph(fanout_degree,
                                      init_delay_secs=0,
                                      compute_delay_secs=0):
    """
    Test that focuses on wide fanout of deployment graph
        -> Node_1
        /          \
    INPUT --> Node_2  --> combine -> OUTPUT
        \    ...   /
        -> Node_10

    1) Intermediate blob size can be large / small
    2) Compute time each node can be long / short
    3) Init time can be long / short
    """
    nodes = [
        Node.bind(i, init_delay_secs=init_delay_secs)
        for i in range(0, fanout_degree)
    ]
    outputs = []
    with InputNode() as user_input:
        for i in range(0, fanout_degree):
            outputs.append(nodes[i].compute.bind(
                user_input, compute_delay_secs=compute_delay_secs))

        dag = combine.bind(outputs)

        serve_dag = DAGDriver.bind(dag)

    return serve_dag
예제 #3
0
def test_long_chain_deployment_graph(
    chain_length, init_delay_secs=0, compute_delay_secs=0
):
    """
    Test that focuses on long chain of deployment graph
    INPUT -> Node_1 -> Node_2 -> ... -> Node_10 -> OUTPUT
    1) Intermediate blob size can be large / small
    2) Compute time each node can be long / short
    3) Init time can be long / short
    """

    nodes = [Node.bind(i, init_delay_secs=init_delay_secs) for i in range(chain_length)]
    prev_outputs = [None for _ in range(chain_length)]

    with InputNode() as user_input:
        for i in range(chain_length):
            if i == 0:
                prev_outputs[i] = nodes[i].compute.bind(
                    user_input, compute_delay_secs=compute_delay_secs
                )
            else:
                prev_outputs[i] = nodes[i].compute.bind(
                    prev_outputs[i - 1], compute_delay_secs=compute_delay_secs
                )

        serve_dag = DAGDriver.bind(prev_outputs[-1])

    return serve_dag
예제 #4
0
def test_dag_to_workflow_execution(workflow_start_regular_shared):
    """This test constructs a DAG with complex dependencies
    and turns it into a workflow."""

    @ray.remote
    def begin(x, pos, a):
        return x * a + pos  # 23.14

    @ray.remote
    def left(x, c, a):
        return f"left({x}, {c}, {a})"

    @ray.remote
    def right(x, b, pos):
        return f"right({x}, {b}, {pos})"

    @ray.remote
    def end(lf, rt, b):
        return f"{lf},{rt};{b}"

    with pytest.raises(TypeError):
        workflow.create(begin.remote(1, 2, 3))

    with InputNode() as dag_input:
        f = begin.bind(2, dag_input[1], a=dag_input.a)
        lf = left.bind(f, "hello", dag_input.a)
        rt = right.bind(f, b=dag_input.b, pos=dag_input[0])
        b = end.bind(lf, rt, b=dag_input.b)

    wf = workflow.create(b, 2, 3.14, a=10, b="ok")
    assert len(list(wf._iter_workflows_in_dag())) == 4, "incorrect amount of steps"
    assert wf.run() == "left(23.14, hello, 10),right(23.14, ok, 2);ok"
예제 #5
0
def test_get_pipeline_input_node():
    # 1) No PipelineInputNode found
    ray_dag = combine.bind(1, 2)
    serve_dag = ray_dag.apply_recursive(transform_ray_dag_to_serve_dag)
    with pytest.raises(
            AssertionError,
            match="There should be one and only one PipelineInputNode"):
        get_pipeline_input_node(serve_dag)

    # 2) More than one PipelineInputNode found
    with PipelineInputNode(preprocessor=request_to_data_int) as dag_input:
        a = combine.bind(dag_input[0], dag_input[1])
    with PipelineInputNode(preprocessor=request_to_data_int) as dag_input_2:
        b = combine.bind(dag_input_2[0], dag_input_2[1])
        ray_dag = combine.bind(a, b)
    serve_dag = ray_dag.apply_recursive(transform_ray_dag_to_serve_dag)
    with pytest.raises(
            AssertionError,
            match="There should be one and only one PipelineInputNode"):
        get_pipeline_input_node(serve_dag)

    # 3) User forgot to change InputNode to PipelineInputNode
    with InputNode() as dag_input:
        ray_dag = combine.bind(dag_input[0], dag_input[1])
    serve_dag = ray_dag.apply_recursive(transform_ray_dag_to_serve_dag)
    with pytest.raises(
            ValueError,
            match="Please change Ray DAG InputNode to PipelineInputNode"):
        get_pipeline_input_node(serve_dag)
예제 #6
0
def test_get_pipeline_input_node():
    # 1) No InputNode found
    ray_dag = combine.bind(1, 2)
    serve_dag = ray_dag.apply_recursive(transform_ray_dag_to_serve_dag)
    with pytest.raises(AssertionError,
                       match="There should be one and only one InputNode"):
        get_pipeline_input_node(serve_dag)

    # 2) More than one InputNode found
    with InputNode() as dag_input:
        a = combine.bind(dag_input[0], dag_input[1])
    with InputNode() as dag_input_2:
        b = combine.bind(dag_input_2[0], dag_input_2[1])
        ray_dag = combine.bind(a, b)
    with pytest.raises(AssertionError,
                       match="Each DAG should only have one unique InputNode"):
        serve_dag = ray_dag.apply_recursive(transform_ray_dag_to_serve_dag)
        get_pipeline_input_node(serve_dag)
예제 #7
0
파일: json_serde.py 프로젝트: smorad/ray
def dagnode_from_json(input_json: Any) -> Union[DAGNode, RayServeHandle, Any]:
    """
    Decode a DAGNode from given input json dictionary. JSON serialization is
    only used and enforced in ray serve from ray core API authored DAGNode(s).

    Covers both RayServeHandle and DAGNode types.

    Assumptions:
        - User object's JSON dict does not have keys that collide with our
            reserved DAGNODE_TYPE_KEY
        - RayServeHandle and Deployment can be re-constructed without losing
            states needed for their functionality or correctness.
        - DAGNode type can be re-constructed with new stable_uuid upon each
            deserialization without effective correctness of execution.
            - Only exception is ClassNode used as parent of ClassMethodNode
                that we perserve the same parent node.
        - .options() does not contain any DAGNode type
    """
    # Deserialize RayServeHandle type
    if SERVE_HANDLE_JSON_KEY in input_json:
        return serve_handle_from_json_dict(input_json)
    # Base case for plain objects
    elif DAGNODE_TYPE_KEY not in input_json:
        return input_json
    elif input_json[DAGNODE_TYPE_KEY] == RayServeDAGHandle.__name__:
        return RayServeDAGHandle(input_json["dag_node_json"])
    elif input_json[DAGNODE_TYPE_KEY] == "DeploymentSchema":
        return DeploymentSchema.parse_obj(input_json["schema"])
    elif input_json[DAGNODE_TYPE_KEY] == RayServeLazySyncHandle.__name__:
        return RayServeLazySyncHandle(
            input_json["deployment_name"],
            HandleOptions(input_json["handle_options_method_name"]),
        )
    # Deserialize DAGNode type
    elif input_json[DAGNODE_TYPE_KEY] == InputNode.__name__:
        return InputNode.from_json(input_json)
    elif input_json[DAGNODE_TYPE_KEY] == InputAttributeNode.__name__:
        return InputAttributeNode.from_json(input_json)
    elif input_json[DAGNODE_TYPE_KEY] == ClassMethodNode.__name__:
        return ClassMethodNode.from_json(input_json)
    elif input_json[DAGNODE_TYPE_KEY] == DeploymentNode.__name__:
        return DeploymentNode.from_json(input_json)
    elif input_json[DAGNODE_TYPE_KEY] == DeploymentMethodNode.__name__:
        return DeploymentMethodNode.from_json(input_json)
    elif input_json[DAGNODE_TYPE_KEY] == DeploymentFunctionNode.__name__:
        return DeploymentFunctionNode.from_json(input_json)
    else:
        # Class and Function nodes require original module as body.
        module_name, attr_name = parse_import_path(input_json["import_path"])
        module = getattr(import_module(module_name), attr_name)
        if input_json[DAGNODE_TYPE_KEY] == FunctionNode.__name__:
            return FunctionNode.from_json(input_json, module)
        elif input_json[DAGNODE_TYPE_KEY] == ClassNode.__name__:
            return ClassNode.from_json(input_json, module)
예제 #8
0
def test_single_class_with_invalid_deployment_options(serve_instance):
    model = Model.options(name="my_deployment")._bind(2, ratio=0.3)
    ray_dag = model.forward._bind(InputNode())

    serve_root_dag = ray_dag._apply_recursive(transform_ray_dag_to_serve_dag)
    deployments = extract_deployments_from_serve_dag(serve_root_dag)
    assert len(deployments) == 1
    with pytest.raises(
            ValueError,
            match="Specifying name in ray_actor_options is not allowed"):
        deployments[0].deploy()
예제 #9
0
def test_simple_single_class(serve_instance):
    # Assert converting both arg and kwarg
    model = Model._bind(2, ratio=0.3)
    ray_dag = model.forward._bind(InputNode())

    serve_root_dag = ray_dag._apply_recursive(transform_ray_dag_to_serve_dag)
    deployments = extract_deployments_from_serve_dag(serve_root_dag)
    assert len(deployments) == 1
    deployments[0].deploy()
    _validate_consistent_output(deployments[0],
                                ray_dag,
                                "Model",
                                input=1,
                                output=0.6)
예제 #10
0
파일: test_generate.py 프로젝트: smorad/ray
def test_single_class_with_invalid_deployment_options(serve_instance):
    with InputNode() as dag_input:
        model = Model.options(name="my_deployment").bind(2, ratio=0.3)
        ray_dag = model.forward.bind(dag_input)

    with DAGNodeNameGenerator() as node_name_generator:
        serve_root_dag = ray_dag.apply_recursive(
            lambda node: transform_ray_dag_to_serve_dag(
                node, node_name_generator))
    deployments = extract_deployments_from_serve_dag(serve_root_dag)
    assert len(deployments) == 1
    with pytest.raises(
            ValueError,
            match="Specifying 'name' in ray_actor_options is not allowed"):
        deployments[0].deploy()
예제 #11
0
파일: test_generate.py 프로젝트: alipay/ray
def test_get_pipeline_input_node():
    # 1) No InputNode found
    ray_dag = combine.bind(1, 2)
    with DeploymentNameGenerator() as deployment_name_generator:
        serve_dag = ray_dag.apply_recursive(
            lambda node: transform_ray_dag_to_serve_dag(
                node, deployment_name_generator))
    with pytest.raises(AssertionError,
                       match="There should be one and only one InputNode"):
        get_pipeline_input_node(serve_dag)

    # 2) More than one InputNode found
    with InputNode() as dag_input:
        a = combine.bind(dag_input[0], dag_input[1])
    with InputNode() as dag_input_2:
        b = combine.bind(dag_input_2[0], dag_input_2[1])
        ray_dag = combine.bind(a, b)
    with pytest.raises(AssertionError,
                       match="Each DAG should only have one unique InputNode"):
        with DeploymentNameGenerator() as deployment_name_generator:
            serve_dag = ray_dag.apply_recursive(
                lambda node: transform_ray_dag_to_serve_dag(
                    node, deployment_name_generator))
        get_pipeline_input_node(serve_dag)
예제 #12
0
def test_single_class_with_valid_ray_options(serve_instance):
    model = Model.options(num_cpus=1, memory=1000)._bind(2, ratio=0.3)
    ray_dag = model.forward._bind(InputNode())

    serve_root_dag = ray_dag._apply_recursive(transform_ray_dag_to_serve_dag)
    deployments = extract_deployments_from_serve_dag(serve_root_dag)
    assert len(deployments) == 1
    deployments[0].deploy()
    _validate_consistent_output(deployments[0],
                                ray_dag,
                                deployments[0].name,
                                input=1,
                                output=0.6)

    deployment = serve.get_deployment(deployments[0].name)
    assert deployment.ray_actor_options.get("num_cpus") == 1
    assert deployment.ray_actor_options.get("memory") == 1000
    assert deployment.ray_actor_options.get("runtime_env") == {}
예제 #13
0
def test_same_object_many_dags(workflow_start_regular_shared):
    """Ensure that when we dedupe uploads, we upload the object once per DAG,
    since different DAGs shouldn't look in each others object directories.
    """
    @ray.remote
    def f(a):
        return [a[0]]

    x = {0: ray.put(10)}

    result1 = workflow.create(f.bind(x)).run()
    result2 = workflow.create(f.bind(x)).run()
    with InputNode() as dag_input:
        result3 = workflow.create(f.bind(dag_input.x), x=x).run()

    assert ray.get(*result1) == 10
    assert ray.get(*result2) == 10
    assert ray.get(*result3) == 10
예제 #14
0
def test_shared_deployment_handle(serve_instance):
    """
    Test we can re-use the same deployment handle multiple times or in
    multiple places, without incorrectly parsing duplicated deployments.
    """
    m = Model._bind(2)
    combine = Combine._bind(m, m2=m)
    ray_dag = combine.__call__._bind(InputNode())
    print(f"Ray DAG: \n{ray_dag}")

    serve_root_dag = ray_dag._apply_recursive(transform_ray_dag_to_serve_dag)
    print(f"Serve DAG: \n{serve_root_dag}")
    deployments = extract_deployments_from_serve_dag(serve_root_dag)
    assert len(deployments) == 2
    for deployment in deployments:
        deployment.deploy()

    _validate_consistent_output(deployments[1],
                                ray_dag,
                                "Combine",
                                input=1,
                                output=4)
예제 #15
0
파일: test_generate.py 프로젝트: smorad/ray
def test_single_class_with_valid_ray_options(serve_instance):
    with InputNode() as dag_input:
        model = Model.options(num_cpus=1, memory=1000).bind(2, ratio=0.3)
        ray_dag = model.forward.bind(dag_input)

    with DAGNodeNameGenerator() as node_name_generator:
        serve_root_dag = ray_dag.apply_recursive(
            lambda node: transform_ray_dag_to_serve_dag(
                node, node_name_generator))
    deployments = extract_deployments_from_serve_dag(serve_root_dag)
    assert len(deployments) == 1
    deployments[0].deploy()
    _validate_consistent_python_output(deployments[0],
                                       ray_dag,
                                       deployments[0].name,
                                       input=1,
                                       output=0.6)

    deployment = serve.get_deployment(deployments[0].name)
    assert deployment.ray_actor_options.get("num_cpus") == 1
    assert deployment.ray_actor_options.get("memory") == 1000
    assert deployment.ray_actor_options.get("runtime_env") == {}
예제 #16
0
def test_multi_instantiation_class_deployment_in_init_args(serve_instance):
    """
    Test we can pass deployments as init_arg or init_kwarg, instantiated
    multiple times for the same class, and we can still correctly replace
    args with deployment handle and parse correct deployment instances.
    """
    m1 = Model._bind(2)
    m2 = Model._bind(3)
    combine = Combine._bind(m1, m2=m2)
    ray_dag = combine.__call__._bind(InputNode())
    print(f"Ray DAG: \n{ray_dag}")

    serve_root_dag = ray_dag._apply_recursive(transform_ray_dag_to_serve_dag)
    print(f"Serve DAG: \n{serve_root_dag}")
    deployments = extract_deployments_from_serve_dag(serve_root_dag)
    assert len(deployments) == 3
    for deployment in deployments:
        deployment.deploy()

    _validate_consistent_output(deployments[2],
                                ray_dag,
                                "Combine",
                                input=1,
                                output=5)