def run_graph(import_path: str, deployment_override_options: List[Dict]): """Deploys a Serve application to the controller's Ray cluster.""" from ray import serve from ray.serve.api import build # Import and build the graph graph = import_attr(import_path) app = build(graph) # Override options for each deployment for options_dict in deployment_override_options: name = options_dict["name"] app.deployments[name].set_options(**options_dict) # Run the graph locally on the cluster serve.start(_override_controller_namespace="serve") serve.run(app)
def run_graph(): """Deploys a Serve application to the controller's Ray cluster.""" from ray import serve from ray._private.utils import import_attr from ray.serve.api import build # Import and build the graph graph = import_attr("test_config_files.pizza.serve_dag") app = build(graph) # Override options for each deployment for name in app.deployments: app.deployments[name].set_options(ray_actor_options={"num_cpus": 0.1}) # Run the graph locally on the cluster serve.start(detached=True) serve.run(graph)
def run_graph(import_path: str, graph_env: dict, deployment_override_options: List[Dict]): """Deploys a Serve application to the controller's Ray cluster.""" try: from ray import serve from ray.serve.api import build # Import and build the graph graph = import_attr(import_path) app = build(graph) # Override options for each deployment for options in deployment_override_options: name = options["name"] # Merge graph-level and deployment-level runtime_envs if "ray_actor_options" in options: # If specified, get ray_actor_options from config ray_actor_options = options["ray_actor_options"] else: # Otherwise, get options from graph code (and default to {} if code # sets options to None) ray_actor_options = app.deployments[ name].ray_actor_options or {} deployment_env = ray_actor_options.get("runtime_env", {}) merged_env = override_runtime_envs_except_env_vars( graph_env, deployment_env) ray_actor_options.update({"runtime_env": merged_env}) options["ray_actor_options"] = ray_actor_options # Update the deployment's options app.deployments[name].set_options(**options) # Run the graph locally on the cluster serve.start(_override_controller_namespace="serve") serve.run(app) except KeyboardInterrupt: # Error is raised when this task is canceled with ray.cancel(), which # happens when deploy_app() is called. logger.debug("Existing config deployment request terminated.")
def check_fruit_deployment_graph_updates(): """Checks the graph after updating all prices to 0.""" assert requests.post("http://localhost:8000/", json=["MANGO", 1]).json() == 0 assert requests.post("http://localhost:8000/", json=["ORANGE", 1]).json() == 0 assert requests.post("http://localhost:8000/", json=["PEAR", 1]).json() == 0 # Test behavior from this documentation example serve.start(detached=True) app = build(deployment_graph) for deployment in app.deployments.values(): deployment.set_options(ray_actor_options={"num_cpus": 0.1}) serve.run(app) check_fruit_deployment_graph() MangoStand.options(name="MangoStand", user_config={"price": 0}).deploy() OrangeStand.options(user_config={"price": 0}).deploy() PearStand.options(user_config={"price": 0}).deploy() check_fruit_deployment_graph_updates() print("Example ran successfully from the file.") serve.shutdown() # Check that deployments have been torn down try: requests.post("http://localhost:8000/", json=["MANGO", 1]).json() raise ValueError("Deployments should have been torn down!")