def _init_argument(self): parser = argparse.ArgumentParser() parser.add_argument('-c', '--config', required=True, type=str, help="Specify a config json file path") parser.add_argument('-j', '--job_id', type=str, required=True, help="Specify the job id") # parser.add_argument('-p', '--party_id', type=str, required=True, help="Specify the party id") # parser.add_argument('-l', '--LOGGER_path', type=str, required=True, help="Specify the LOGGER path") args = parser.parse_args() config_path = args.config self.config_path = config_path if not args.config: LOGGER.error("Config File should be provided") exit(-100) self.job_id = args.job_id all_checker = AllChecker(config_path) all_checker.check_all() self._initialize(config_path) with open(config_path) as conf_f: runtime_json = json.load(conf_f) eggroll.init(self.job_id, self.workflow_param.work_mode) LOGGER.debug("The job id is {}".format(self.job_id)) federation.init(self.job_id, runtime_json) LOGGER.debug("Finish eggroll and federation init") self._init_pipeline()
def init(job_id, runtime_conf, mode, server_conf_path="arch/conf/server_conf.json"): eggroll.init(job_id, mode) print("runtime_conf:{}".format(runtime_conf)) all_checker = AllChecker(runtime_conf) all_checker.check_all() with open(runtime_conf) as conf_p: runtime_json = json.load(conf_p) if mode is None: raise EnvironmentError("eggroll should be initialized before fate_script") if mode == WorkMode.STANDALONE: RuntimeInstance.FEDERATION = standalone_fate_script.init(job_id=job_id, runtime_conf=runtime_json) else: RuntimeInstance.FEDERATION = cluster_fate_script.init(job_id=job_id, runtime_conf=runtime_json, server_conf_path=server_conf_path)