def stop_leader_server(): """shutdown all jobs and save a screenshot, then stop the leader server it will broadcast a message to all follower workers """ container_conn = DockerContainerManager() container_conn.stop() app_stop()
def start_leader_server(gpu=False): """start the system on a leader server in a cluster. initialize necessary services such as database and monitor Args: gpu (bool, optional): [description]. Defaults to False. """ # todo: lazy docker start container_conn = DockerContainerManager(enable_gpu=gpu) if not container_conn.start(): container_conn.connect() app_start()
def start_leader_server(gpu=False): """start the system on a leader server in a cluster. initialize necessary services such as database and monitor Args: gpu (bool, optional): [description]. Defaults to False. """ # todo: lazy docker start container_conn = DockerContainerManager(enable_gpu=gpu) if not container_conn.start(): container_conn.connect() # FIXME: app not started because pytorch imported when this function is called. # PyTorch and subprocess have conflicts. app_start()
def remove_services(): """stop all services and remove downloaded docker images """ container_conn = DockerContainerManager() container_conn.stop() app_stop() container_conn.remove_all()
def clean(): """Stop the ModelCI service and remove all containers.""" # remove all services container_conn = DockerContainerManager() container_conn.stop() app_stop() container_conn.remove_all()
def stop(): """Stop the ModelCI service""" container_conn = DockerContainerManager() container_conn.stop() app_stop()
def start(gpu=False): """Start the ModelCI service.""" container_conn = DockerContainerManager(enable_gpu=gpu) if not container_conn.start(): container_conn.connect() app_start()