import logging import aiohttp.web import ray.dashboard.utils as dashboard_utils import ray.dashboard.optional_utils as dashboard_optional_utils import ray.dashboard.modules.test.test_utils as test_utils import ray.dashboard.modules.test.test_consts as test_consts from ray.dashboard.datacenter import DataSource from ray.ray_constants import env_bool logger = logging.getLogger(__name__) routes = dashboard_optional_utils.ClassMethodRouteTable @dashboard_utils.dashboard_module(enable=env_bool( test_consts.TEST_MODULE_ENVIRONMENT_KEY, False)) class TestHead(dashboard_utils.DashboardHeadModule): def __init__(self, dashboard_head): super().__init__(dashboard_head) self._notified_agents = {} DataSource.agents.signal.append(self._update_notified_agents) async def _update_notified_agents(self, change): if change.old: ip, port = change.old self._notified_agents.pop(ip) if change.new: ip, ports = change.new self._notified_agents[ip] = ports @staticmethod
def aiohttp_cache( ttl_seconds=dashboard_consts.AIOHTTP_CACHE_TTL_SECONDS, maxsize=dashboard_consts.AIOHTTP_CACHE_MAX_SIZE, enable=not env_bool(dashboard_consts.AIOHTTP_CACHE_DISABLE_ENVIRONMENT_KEY, False), ): assert maxsize > 0 cache = collections.OrderedDict() def _wrapper(handler): if enable: @functools.wraps(handler) async def _cache_handler(*args) -> aiohttp.web.Response: # Make the route handler as a bound method. # The args may be: # * (Request, ) # * (self, Request) req = args[-1] # Make key. if req.method in _AIOHTTP_CACHE_NOBODY_METHODS: key = req.path_qs else: key = (req.path_qs, await req.read()) # Query cache. value = cache.get(key) if value is not None: cache.move_to_end(key) if not value.task.done( ) or value.expiration >= time.time(): # Update task not done or the data is not expired. return aiohttp.web.Response(**value.data) def _update_cache(task): try: response = task.result() except Exception: response = rest_response( success=False, message=traceback.format_exc()) data = { "status": response.status, "headers": dict(response.headers), "body": response.body, } cache[key] = _AiohttpCacheValue(data, time.time() + ttl_seconds, task) cache.move_to_end(key) if len(cache) > maxsize: cache.popitem(last=False) return response task = create_task(handler(*args)) task.add_done_callback(_update_cache) if value is None: return await task else: return aiohttp.web.Response(**value.data) suffix = f"[cache ttl={ttl_seconds}, max_size={maxsize}]" _cache_handler.__name__ += suffix _cache_handler.__qualname__ += suffix return _cache_handler else: return handler if inspect.iscoroutinefunction(ttl_seconds): target_func = ttl_seconds ttl_seconds = dashboard_consts.AIOHTTP_CACHE_TTL_SECONDS return _wrapper(target_func) else: return _wrapper
parse_event_strings, monitor_events, ) from ray.core.generated import event_pb2 from ray.core.generated import event_pb2_grpc from ray.dashboard.datacenter import DataSource logger = logging.getLogger(__name__) routes = dashboard_utils.ClassMethodRouteTable JobEvents = OrderedDict dashboard_utils._json_compatible_types.add(JobEvents) @dashboard_utils.dashboard_module( enable=env_bool(event_consts.EVENT_MODULE_ENVIRONMENT_KEY, False)) class EventHead(dashboard_utils.DashboardHeadModule, event_pb2_grpc.ReportEventServiceServicer): def __init__(self, dashboard_head): super().__init__(dashboard_head) self._event_dir = os.path.join(self._dashboard_head.log_dir, "events") os.makedirs(self._event_dir, exist_ok=True) self._monitor: Union[asyncio.Task, None] = None @staticmethod def _update_events(event_list): # {job_id: {event_id: event}} all_job_events = defaultdict(JobEvents) for event in event_list: event_id = event["event_id"] custom_fields = event.get("custom_fields")
from grpc.experimental import aio as aiogrpc import ray.dashboard.utils as dashboard_utils import ray.dashboard.consts as dashboard_consts from ray.ray_constants import env_bool from ray.dashboard.utils import async_loop_forever, create_task from ray.dashboard.modules.event import event_consts from ray.dashboard.modules.event.event_utils import monitor_events from ray.core.generated import event_pb2 from ray.core.generated import event_pb2_grpc logger = logging.getLogger(__name__) routes = dashboard_utils.ClassMethodRouteTable @dashboard_utils.dashboard_module(enable=env_bool( event_consts.EVENT_MODULE_ENVIRONMENT_KEY, False)) class EventAgent(dashboard_utils.DashboardAgentModule): def __init__(self, dashboard_agent): super().__init__(dashboard_agent) self._event_dir = os.path.join(self._dashboard_agent.log_dir, "events") os.makedirs(self._event_dir, exist_ok=True) self._monitor: Union[asyncio.Task, None] = None self._stub: Union[event_pb2_grpc.ReportEventServiceStub, None] = None self._cached_events = asyncio.Queue( event_consts.EVENT_AGENT_CACHE_SIZE) logger.info("Event agent cache buffer size: %s", self._cached_events.maxsize) async def _connect_to_dashboard(self): """ Connect to the dashboard. If the dashboard is not started, then this method will never returns.
import logging import aiohttp.web import ray.new_dashboard.utils as dashboard_utils import ray.new_dashboard.modules.test.test_utils as test_utils import ray.new_dashboard.modules.test.test_consts as test_consts from ray.new_dashboard.datacenter import DataSource from ray.ray_constants import env_bool logger = logging.getLogger(__name__) routes = dashboard_utils.ClassMethodRouteTable @dashboard_utils.dashboard_module( enable=env_bool(test_consts.TEST_MODULE_ENVIRONMENT_KEY, False)) class TestHead(dashboard_utils.DashboardHeadModule): def __init__(self, dashboard_head): super().__init__(dashboard_head) self._notified_agents = {} DataSource.agents.signal.append(self._update_notified_agents) async def _update_notified_agents(self, change): if change.old: ip, port = change.old self._notified_agents.pop(ip) if change.new: ip, ports = change.new self._notified_agents[ip] = ports @routes.get("/test/route_get")