def do_load_model(): request_data = request.json request_data["servings"] = CenterConfig.get_settings(path=SERVING_PATH, servings_zk_path=SERVINGS_ZK_PATH, use_zk=USE_CONFIGURATION_CENTER, hosts=ZOOKEEPER_HOSTS, server_conf_path=SERVER_CONF_PATH) load_status = publish_model.load_model(config_data=request_data) return get_json_result(retcode=(0 if load_status else 101))
def bind_model_service(): request_config = request.json if not request_config.get('servings'): # get my party all servings request_config['servings'] = CenterConfig.get_settings(path=SERVING_PATH, servings_zk_path=SERVINGS_ZK_PATH, use_zk=USE_CONFIGURATION_CENTER, hosts=ZOOKEEPER_HOSTS, server_conf_path=SERVER_CONF_PATH) if not request_config.get('service_id'): return get_json_result(retcode=101, retmsg='no service id') bind_status, service_id = publish_model.bind_model_service(config_data=request_config) return get_json_result(retcode=(0 if bind_status else 101), retmsg='service id is {}'.format(service_id))
help="if standalone node mode or not ", action='store_true') args = parser.parse_args() if args.standalone_node: RuntimeConfig.init_config(WORK_MODE=WorkMode.STANDALONE) RuntimeConfig.init_config(HTTP_PORT=CLUSTER_STANDALONE_JOB_SERVER_PORT) session_utils.init_session_for_flow_server() RuntimeConfig.init_env() RuntimeConfig.set_process_role(ProcessRole.SERVER) queue_manager.init_job_queue() job_controller.JobController.init() history_job_clean = job_controller.JobClean() history_job_clean.start() PrivilegeAuth.init() CenterConfig.init(ZOOKEEPER_HOSTS, USE_CONFIGURATION_CENTER, FATE_FLOW_ZK_PATH, HTTP_PORT, FATE_FLOW_MODEL_TRANSFER_PATH) # start job detector job_detector.JobDetector(interval=5 * 1000).start() # start scheduler scheduler = dag_scheduler.DAGScheduler( queue=RuntimeConfig.JOB_QUEUE, concurrent_num=MAX_CONCURRENT_JOB_RUN) scheduler.start() # start grpc server server = grpc.server( futures.ThreadPoolExecutor(max_workers=10), options=[(cygrpc.ChannelArgKey.max_send_message_length, -1), (cygrpc.ChannelArgKey.max_receive_message_length, -1)]) proxy_pb2_grpc.add_DataTransferServiceServicer_to_server( UnaryServicer(), server)
CLUSTER_STANDALONE_JOB_SERVER_PORT = 9381 # services ip and port SERVER_CONF_PATH = 'arch/conf/server_conf.json' SERVING_PATH = '/servers/servings' server_conf = file_utils.load_json_conf(SERVER_CONF_PATH) PROXY_HOST = server_conf.get(SERVERS).get('proxy').get('host') PROXY_PORT = server_conf.get(SERVERS).get('proxy').get('port') BOARD_HOST = server_conf.get(SERVERS).get('fateboard').get('host') if BOARD_HOST == 'localhost': BOARD_HOST = get_lan_ip() BOARD_PORT = server_conf.get(SERVERS).get('fateboard').get('port') MANAGER_HOST = server_conf.get(SERVERS).get('fatemanager', {}).get('host') MANAGER_PORT = server_conf.get(SERVERS).get('fatemanager', {}).get('port') SERVINGS = CenterConfig.get_settings(path=SERVING_PATH, servings_zk_path=SERVINGS_ZK_PATH, use_zk=USE_CONFIGURATION_CENTER, hosts=ZOOKEEPER_HOSTS, server_conf_path=SERVER_CONF_PATH) BOARD_DASHBOARD_URL = 'http://%s:%d/index.html#/dashboard?job_id={}&role={}&party_id={}' % ( BOARD_HOST, BOARD_PORT) # switch SAVE_AS_TASK_INPUT_DATA_SWITCH = True SAVE_AS_TASK_INPUT_DATA_IN_MEMORY = True # init RuntimeConfig.init_config(WORK_MODE=WORK_MODE) RuntimeConfig.init_config(HTTP_PORT=HTTP_PORT) RuntimeConfig.init_config(BACKEND=BACKEND)