def test_extra_fields_invalid_deployment_schema(self): # Undefined fields should be forbidden in the schema deployment_schema = self.get_minimal_deployment_schema() # Schema should be createable with valid fields DeploymentSchema.parse_obj(deployment_schema) # Schema should raise error when a nonspecified field is included deployment_schema["fake_field"] = None with pytest.raises(ValidationError): DeploymentSchema.parse_obj(deployment_schema)
def deployment_to_schema(d: Deployment) -> DeploymentSchema: """Converts a live deployment object to a corresponding structured schema. If the deployment has a class or function, it will be attemptetd to be converted to a valid corresponding import path. init_args and init_kwargs must also be JSON-serializable or this call will fail. """ if d.ray_actor_options is not None: ray_actor_options_schema = RayActorOptionsSchema.parse_obj( d.ray_actor_options) else: ray_actor_options_schema = None return DeploymentSchema( name=d.name, # TODO(Sihan) DeploymentConfig num_replicas and auto_config can be set together # because internally we use these two field for autoscale and deploy. # We can improve the code after we separate the user faced deployment config and # internal deployment config. num_replicas=None if d._config.autoscaling_config else d.num_replicas, route_prefix=d.route_prefix, max_concurrent_queries=d.max_concurrent_queries, user_config=d.user_config, autoscaling_config=d._config.autoscaling_config, graceful_shutdown_wait_loop_s=d._config.graceful_shutdown_wait_loop_s, graceful_shutdown_timeout_s=d._config.graceful_shutdown_timeout_s, health_check_period_s=d._config.health_check_period_s, health_check_timeout_s=d._config.health_check_timeout_s, ray_actor_options=ray_actor_options_schema, )
def test_ge_zero_deployment_schema(self): # Ensure ValidationError is raised when any fields that must be greater # than or equal to zero is set to -1. deployment_schema = self.get_minimal_deployment_schema() ge_zero_fields = [ "graceful_shutdown_wait_loop_s", "graceful_shutdown_timeout_s", ] for field in ge_zero_fields: deployment_schema[field] = -1 with pytest.raises(ValidationError): DeploymentSchema.parse_obj(deployment_schema) deployment_schema[field] = None
def deployment_to_schema(d: Deployment) -> DeploymentSchema: """Converts a live deployment object to a corresponding structured schema. If the deployment has a class or function, it will be attemptetd to be converted to a valid corresponding import path. init_args and init_kwargs must also be JSON-serializable or this call will fail. """ if d.ray_actor_options is not None: ray_actor_options_schema = RayActorOptionsSchema.parse_obj( d.ray_actor_options) else: ray_actor_options_schema = None return DeploymentSchema( name=d.name, import_path=get_deployment_import_path(d, enforce_importable=True, replace_main=True), init_args=(), init_kwargs={}, num_replicas=d.num_replicas, route_prefix=d.route_prefix, max_concurrent_queries=d.max_concurrent_queries, user_config=d.user_config, autoscaling_config=d._config.autoscaling_config, graceful_shutdown_wait_loop_s=d._config.graceful_shutdown_wait_loop_s, graceful_shutdown_timeout_s=d._config.graceful_shutdown_timeout_s, health_check_period_s=d._config.health_check_period_s, health_check_timeout_s=d._config.health_check_timeout_s, ray_actor_options=ray_actor_options_schema, )
def test_gt_zero_deployment_schema(self): # Ensure ValidationError is raised when any fields that must be greater # than zero is set to zero. deployment_schema = self.get_minimal_deployment_schema() gt_zero_fields = [ "num_replicas", "max_concurrent_queries", "health_check_period_s", "health_check_timeout_s", ] for field in gt_zero_fields: deployment_schema[field] = 0 with pytest.raises(ValidationError): DeploymentSchema.parse_obj(deployment_schema) deployment_schema[field] = None
def test_invalid_python_attributes(self): # Test setting invalid attributes for Python to ensure a validation or # value error is raised. # Python requires an import path deployment_schema = self.get_minimal_deployment_schema() # DeploymentSchema should be generated with valid import_paths for path in get_valid_import_paths(): deployment_schema["import_path"] = path DeploymentSchema.parse_obj(deployment_schema) # Invalid import_path syntax should raise a ValidationError for path in get_invalid_import_paths(): deployment_schema["import_path"] = path with pytest.raises(ValidationError): DeploymentSchema.parse_obj(deployment_schema)
def test_valid_deployment_schema(self): # Ensure a valid DeploymentSchema can be generated deployment_schema = { "name": "shallow", "init_args": [4, "glue"], "init_kwargs": { "fuel": "diesel" }, "import_path": "test_env.shallow_import.ShallowClass", "num_replicas": 2, "route_prefix": "/shallow", "max_concurrent_queries": 32, "user_config": { "threshold": 0.2, "pattern": "rainbow" }, "autoscaling_config": None, "graceful_shutdown_wait_loop_s": 17, "graceful_shutdown_timeout_s": 49, "health_check_period_s": 11, "health_check_timeout_s": 11, "ray_actor_options": { "runtime_env": { "working_dir": ("https://github.com/shrekris-anyscale/" "test_module/archive/HEAD.zip"), "py_modules": [ ("https://github.com/shrekris-anyscale/" "test_deploy_group/archive/HEAD.zip"), ], }, "num_cpus": 3, "num_gpus": 4.2, "memory": 5, "object_store_memory": 3, "resources": { "custom_asic": 8 }, "accelerator_type": NVIDIA_TESLA_P4, }, } DeploymentSchema.parse_obj(deployment_schema)
def dagnode_from_json(input_json: Any) -> Union[DAGNode, RayServeHandle, Any]: """ Decode a DAGNode from given input json dictionary. JSON serialization is only used and enforced in ray serve from ray core API authored DAGNode(s). Covers both RayServeHandle and DAGNode types. Assumptions: - User object's JSON dict does not have keys that collide with our reserved DAGNODE_TYPE_KEY - RayServeHandle and Deployment can be re-constructed without losing states needed for their functionality or correctness. - DAGNode type can be re-constructed with new stable_uuid upon each deserialization without effective correctness of execution. - Only exception is ClassNode used as parent of ClassMethodNode that we perserve the same parent node. - .options() does not contain any DAGNode type """ node_type_to_cls = { # Ray DAG Inputs InputNode.__name__: InputNode, InputAttributeNode.__name__: InputAttributeNode, # Deployment graph execution nodes DeploymentExecutorNode.__name__: DeploymentExecutorNode, DeploymentMethodExecutorNode.__name__: DeploymentMethodExecutorNode, DeploymentFunctionExecutorNode.__name__: DeploymentFunctionExecutorNode, } # Deserialize RayServeHandle type if SERVE_HANDLE_JSON_KEY in input_json: return serve_handle_from_json_dict(input_json) # Base case for plain objects elif DAGNODE_TYPE_KEY not in input_json: return input_json elif input_json[DAGNODE_TYPE_KEY] == RayServeDAGHandle.__name__: return RayServeDAGHandle(input_json["dag_node_json"]) elif input_json[DAGNODE_TYPE_KEY] == "DeploymentSchema": return DeploymentSchema.parse_obj(input_json["schema"]) elif input_json[DAGNODE_TYPE_KEY] == RayServeLazySyncHandle.__name__: return RayServeLazySyncHandle( input_json["deployment_name"], HandleOptions(input_json["handle_options_method_name"]), ) # Deserialize DAGNode type elif input_json[DAGNODE_TYPE_KEY] in node_type_to_cls: return node_type_to_cls[input_json[DAGNODE_TYPE_KEY]].from_json( input_json) else: # Class and Function nodes require original module as body. module_name, attr_name = parse_import_path(input_json["import_path"]) module = getattr(import_module(module_name), attr_name) if input_json[DAGNODE_TYPE_KEY] == FunctionNode.__name__: return FunctionNode.from_json(input_json, module) elif input_json[DAGNODE_TYPE_KEY] == ClassNode.__name__: return ClassNode.from_json(input_json, module)
def test_mutually_exclusive_num_replicas_and_autoscaling_config(self): # num_replicas and autoscaling_config cannot be set at the same time deployment_schema = self.get_minimal_deployment_schema() deployment_schema["num_replicas"] = 5 deployment_schema["autoscaling_config"] = None DeploymentSchema.parse_obj(deployment_schema) deployment_schema["num_replicas"] = None deployment_schema["autoscaling_config"] = AutoscalingConfig().dict() DeploymentSchema.parse_obj(deployment_schema) deployment_schema["num_replicas"] = 5 deployment_schema["autoscaling_config"] = AutoscalingConfig().dict() with pytest.raises(ValueError): DeploymentSchema.parse_obj(deployment_schema)
def test_invalid_python_attributes(self): # Test setting invalid attributes for Python to ensure a validation or # value error is raised. # Python requires an import path deployment_schema = self.get_minimal_deployment_schema() deployment_schema["init_args"] = [1, 2] deployment_schema["init_kwargs"] = {"threshold": 0.5} del deployment_schema["import_path"] with pytest.raises(ValueError, match="must be specified"): DeploymentSchema.parse_obj(deployment_schema) # DeploymentSchema should be generated once import_path is set deployment_schema["import_path"] = "my_module.MyClass" DeploymentSchema.parse_obj(deployment_schema) # Invalid import_path syntax should raise a ValidationError invalid_paths = ["", "MyClass", ".", "hello,world"] for path in invalid_paths: deployment_schema["import_path"] = path with pytest.raises(ValidationError): DeploymentSchema.parse_obj(deployment_schema)
def test_route_prefix(self): # Ensure that route_prefix is validated deployment_schema = self.get_minimal_deployment_schema() # route_prefix must start with a "/" deployment_schema["route_prefix"] = "hello/world" with pytest.raises(ValueError): DeploymentSchema.parse_obj(deployment_schema) # route_prefix must end with a "/" deployment_schema["route_prefix"] = "/hello/world/" with pytest.raises(ValueError): DeploymentSchema.parse_obj(deployment_schema) # route_prefix cannot contain wildcards, meaning it can't have # "{" or "}" deployment_schema["route_prefix"] = "/hello/{adjective}/world/" with pytest.raises(ValueError): DeploymentSchema.parse_obj(deployment_schema) # Ensure a valid route_prefix works deployment_schema["route_prefix"] = "/hello/wonderful/world" DeploymentSchema.parse_obj(deployment_schema) # Ensure route_prefix of "/" works deployment_schema["route_prefix"] = "/" DeploymentSchema.parse_obj(deployment_schema) # Ensure route_prefix of None works deployment_schema["route_prefix"] = None DeploymentSchema.parse_obj(deployment_schema)