def modify_config(self,model_name): #model config if 'LR' == self.algorithm.upper(): src_models_conf = os.path.join(self.path,'./lr_templates/models.conf') elif 'DNN' == self.algorithm.upper(): src_models_conf = os.path.join(self.path,'./dnn_templates/models.conf') dst_models_conf = os.path.join(self.data_path,'./models.conf') rc = util.copy_file(src_models_conf,dst_models_conf,self.logger) if rc is False: return False rc = util.text_replace(dst_models_conf,'model_demo',model_name,self.logger) if rc is False: return False #service config if 'LR' == self.algorithm.upper(): src_service_conf = os.path.join(self.path,'./lr_templates/service.conf') elif 'DNN' == self.algorithm.upper(): src_service_conf = os.path.join(self.path,'./dnn_templates/service.conf') dst_service_conf = os.path.join(self.service_conf_path,'service.conf') rc = util.copy_file(src_service_conf,dst_service_conf,self.logger) if rc is False: return False rc = util.text_replace(dst_service_conf,'model_demo',model_name,self.logger) if rc is False: return False rc = util.text_replace(dst_service_conf,'demo_version',self.version,self.logger) if rc is False: return False rc = util.text_replace(dst_service_conf,'svc_name',self.service_name,self.logger) if rc is False: return False if 'DNN' == self.algorithm.upper(): src_model_conf = os.path.join(self.path,'./dnn_templates/model.conf') dst_model_conf = os.path.join(self.data_path,self.model_name,'./model.conf') rc = util.copy_file(src_model_conf,dst_model_conf,self.logger) if rc is False: print_log("copy dnn model conf error") return False rc = util.text_replace(dst_model_conf,'model_demo',model_name,self.logger) if rc is False: return False fp = dst_service_conf conf = ConfigParser.SafeConfigParser() conf.read(fp) #conf.set('common','port',self.model_port) for item in self.zklist: group_id=item['group_id'] type_id=item['type_id'] cluster_id=item['cluster_id'] service_id=item['service_id'] conf.set('monitor','zk_addr',cluster_id) conf.set('monitor','zk_group_id',group_id) conf.set('monitor','zk_service_id',service_id) conf.set('monitor','zk_type_id',type_id) with open(fp, 'w') as fw: conf.write(fw) fw.close() return True
def restore_env_conf(self): src_path = os.path.join(self.path,"./bakenv.conf") dst_path = os.path.join(self.path,"./env.conf") rc = util.copy_file(src_path,dst_path,self.logger) if rc is False: print_log("restore env conf error") return False
def get_release_so(self): src_release_lib=os.path.join(self.path,'./lr_templates/model_demo.so') dst_release_lib = os.path.join(self.release_lib_path,self.model_name+'.so') rc = util.copy_file(src_release_lib,dst_release_lib,self.logger) if(rc is False): return False os.chmod(dst_release_lib,stat.S_IRWXG|stat.S_IRWXU|stat.S_IRWXO)
def reloadModel(self, model_name, model_version, model_port): #dispatch service.conf src_service_conf = os.path.join(self.service_plugin_path, model_name, model_version, './config/service.conf') dst_service_conf = os.path.join(self.service_config_path, model_port + '.conf') if not os.path.isfile(src_service_conf): return False, src_service_conf + ' not exsit,try another version' rc = util.text_replace(src_service_conf, 'maxport', model_port, self.logger) if (rc is False): return False, 'Fail to change model_port in service.conf' rc = util.copy_file(src_service_conf, dst_service_conf, self.logger) if (rc is False): return False, 'Fail to dispatch service.conf' client = ServiceClient() rc, stderr = client.init('127.0.0.1', model_port) if (rc is False): return False, stderr result = client.control('ControlReloadModule', 'ControlReloadModule') reload_status = result['status'] result = client.control('ControlModuleVersion', 'ControlModuleVersion') version_status = result['status'] cur_version = result['value'] if (reload_status == 0): return True, 'reload model success,current version is ' + cur_version else: return False, 'reload model failed,current version is ' + cur_version
def _write_conf_policies(self, topo_dicts): """ Write AS configurations and path policies. """ as_confs = {} for topo_id, as_topo, base in srv_iter( topo_dicts, self.args.output_dir, common=True): as_confs.setdefault(topo_id, yaml.dump( self._gen_as_conf(as_topo), default_flow_style=False)) conf_file = os.path.join(base, AS_CONF_FILE) write_file(conf_file, as_confs[topo_id]) # Confirm that config parses cleanly. Config.from_file(conf_file) copy_file(self.args.path_policy, os.path.join(base, PATH_POLICY_FILE)) # Confirm that parser actually works on path policy file PathPolicy.from_file(self.args.path_policy)
def write_as_conf_and_path_policy(isd_as, as_obj, instance_path): """ Writes AS configuration (i.e. as.yml) and path policy files. :param ISD_AS isd_as: ISD-AS for which the config will be written. :param str instance_path: Location (in the file system) to write the configuration into. """ conf = { 'MasterASKey': as_obj.master_as_key, 'RegisterTime': 5, 'PropagateTime': 5, 'CertChainVersion': 0, 'RegisterPath': True, } conf_file = os.path.join(instance_path, AS_CONF_FILE) write_file(conf_file, yaml.dump(conf, default_flow_style=False)) path_policy_file = os.path.join(PROJECT_ROOT, DEFAULT_PATH_POLICY_FILE) copy_file(path_policy_file, os.path.join(instance_path, PATH_POLICY_FILE))
def get_release_so(self): if 'LR' == self.algorithm.upper(): src_release_lib=os.path.join(self.path,'./lr_templates/model_demo.so') elif 'DNN' == self.algorithm.upper(): src_release_lib=os.path.join(self.path,'./dnn_templates/model_demo.so') dst_release_lib = os.path.join(self.release_lib_path,self.model_name+'.so') rc = util.copy_file(src_release_lib,dst_release_lib,self.logger) if rc is False: return False os.chmod(dst_release_lib,stat.S_IRWXG|stat.S_IRWXU|stat.S_IRWXO)
def _write_as_zk_configs(self, topo_id, zks): # Build up server block servers = [] for id_, zk in zks.values(): servers.append("server.%s=%s:%d:%d" % (id_, zk.addr.ip, zk.leaderPort, zk.electionPort)) server_block = "\n".join(sorted(servers)) base_dir = os.path.join(self.out_dir, topo_id.ISD(), topo_id.AS()) for name, (id_, zk) in zks.items(): copy_file(DEFAULT_ZK_LOG4J, os.path.join(base_dir, name, "log4j.properties")) text = StringIO() datalog_dir = os.path.join(ZOOKEEPER_TMPFS_DIR, name) text.write("%s\n\n" % zk.zk_conf( os.path.join(base_dir, name, "data"), datalog_dir, )) text.write("%s\n" % server_block) write_file(os.path.join(base_dir, name, 'zoo.cfg'), text.getvalue()) write_file(os.path.join(base_dir, name, "data", "myid"), "%s\n" % id_) self.datalog_dirs.append(datalog_dir)
def write_as_conf_and_path_policy(isd_as, instance_path): """ Writes AS configuration (i.e. as.yml) and path policy files. :param ISD_AS isd_as: ISD-AS for which the config will be written. :param str instance_path: Location (in the file system) to write the configuration into. """ try: as_obj = AD.objects.get(isd_id=isd_as[0], as_id=isd_as[1]) except AD.DoesNotExist: logger.error("AS %s-%s was not found." % (isd_as[0], isd_as[1])) return conf = { 'MasterASKey': as_obj.master_as_key, 'RegisterTime': 5, 'PropagateTime': 5, 'CertChainVersion': 0, 'RegisterPath': True, } conf_file = os.path.join(instance_path, AS_CONF_FILE) write_file(conf_file, yaml.dump(conf, default_flow_style=False)) path_policy_file = os.path.join(PROJECT_ROOT, DEFAULT_PATH_POLICY_FILE) copy_file(path_policy_file, os.path.join(instance_path, PATH_POLICY_FILE))
def get_model_from_hdfs(self,model_path,feature_path,model_name): dst_path=os.path.join(self.data_path,model_name) util.mkdir(dst_path,self.logger) rc = util.hdfs_file_copy(feature_path,dst_path,False,"hadoop",self.logger) if rc is False: self.logger.error("Fail to get %s from hdfs",feature_path) return False local_models_path = os.path.join(self.path, "models") if not os.path.isdir(local_models_path): util.mkdir(local_models_path,self.logger) rc = util.hdfs_file_copy(model_path,os.path.join(local_models_path, model_name), False,"hadoop",self.logger) if rc is False: self.logger.error("Fail to get %s from hdfs",model_path) return False predict_model_path = self.assemble_model_file(os.path.join(local_models_path,model_name)) print_log("assemble predict model path:%s\n" % predict_model_path) rc = util.copy_file(predict_model_path,os.path.join(dst_path,"predict.model"),self.logger) if rc is False: print_log("copy predict model to data dir error:%s\n") return False return True
def test_basic(self, dirname, mkdirs, cpfile): # Call copy_file("a", "b") # Tests mkdirs.assert_called_once_with(dirname.return_value, exist_ok=True) cpfile.assert_called_once_with("a", "b")