def do_POST(self, args): """ Create resource """ logger.info("CMD: POST %s" % args) global execute_action try: if ACTIONS_KEY not in curr_context: print('POST is not available') return actions_dict = curr_context[ACTIONS_KEY] # this is used when any default action is called if execute_action: curr_action = execute_action else: curr_action = 'POST' if 'help' == args: if actions_dict[curr_action]: CommonUtil.print_query_params(actions_dict[curr_action].query_params) CommonUtil.print_attributes(actions_dict[curr_action].method_name) return post_payload, query_str, content_type = self.__process_args(args, actions_dict[curr_action]) cookie = self.__get_cookie() if execute_action: post_url = curr_path + '/' + execute_action else: post_url = curr_path response = ViPRConnection.submitHttpRequest('POST', post_url+query_str, cookie, payload=post_payload, contentType=content_type) if response: self.__print_response(response.text) except Exception as e: print(str(e)) logger.error(str(e))
def main(): util.safe_mkdir('./results') util.safe_mkdir('./debug') # please define your own split here img_total_num = 28000 split = 0.8 indices = np.asarray(range(img_total_num)) testing_flag = (indices > split*max_idx) testing_inds = indices[testing_flag] training_inds = indices[np.logical_not(testing_flag)] testing_inds = testing_inds.tolist() training_inds = training_inds.tolist() np.random.shuffle(testing_inds) np.random.shuffle(training_inds) gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.8) sess = tf.Session(config=tf.ConfigProto(gpu_options=gpu_options)) time_str = time.strftime('%y_%m_%d_%H_%M_%S') trainer = Trainer(sess) trainer.train('./TrainingDataPreparation/synthetic_dataset_final', training_inds, testing_inds, results_dir='./results/results_final_' + time_str, # directory to stored the results graph_dir='./results/graph_final_' + time_str, # directory as tensorboard working space batch_size=4, # batch size epoch_num=12, # epoch number first_channel=8, bottle_width=4, dis_reps=1, mode='retrain', pre_model_dir=None)
def do_GET(self, args): """ GET resource """ logger.info("CMD: GET %s" % args) try: if ACTIONS_KEY not in curr_context: print('GET is not available') return actions_dict = curr_context[ACTIONS_KEY] if 'help' == args: if actions_dict['GET']: CommonUtil.print_query_params(actions_dict['GET'].query_params) return cookie = self.__get_cookie() args_dict = self.__convert_args_to_dict(args) query_str = '' if actions_dict['GET'] and actions_dict['GET'].query_params: query_str = self.__process_return_query_params(args_dict, actions_dict['GET'].query_params) accept_type = args_dict[RESPONSE_TYPE_KEY] if RESPONSE_TYPE_KEY in args_dict else '' response = ViPRConnection.submitHttpRequest('GET', curr_path+query_str, cookie, xml=True if accept_type == RESPONSE_TYPE_XML else False) if response: self.__print_response(response.text, accept_type) except Exception as e: print(str(e)) logger.error(str(e))
def do_PUT(self, args): """ Update resource """ logger.info("CMD: PUT %s" % args) try: if ACTIONS_KEY not in curr_context: print('PUT is not available') return actions_dict = curr_context[ACTIONS_KEY] if 'help' == args: if actions_dict['PUT']: CommonUtil.print_query_params( actions_dict['PUT'].query_params) CommonUtil.print_attributes( actions_dict['PUT'].method_name) return put_payload, query_str, content_type = self.__process_args( args, actions_dict['PUT']) cookie = self.__get_cookie() response = ViPRConnection.submitHttpRequest( 'PUT', curr_path + query_str, cookie, payload=put_payload, contentType=content_type) if response: self.__print_response(response.text) except Exception as e: print(str(e)) logger.error(str(e))
def main(vol_dir, nml_map_dir): vol = load_volume(vol_dir) mask0 = proj_frontal_mask(vol) dpt0 = proj_frontal_depth(vol) nml0 = load_normal_map(nml_map_dir, mask0) mesh = extract_orig_mesh(vol) # dpt0 = cv.resize(dpt0, (dpt0.shape[1]*2, dpt0.shape[0]*2)) # mask0 = cv.resize(mask0, (mask0.shape[1]*2, mask0.shape[0]*2)) mesh_ = dict() # extract_hd_mesh(vol) mesh_['v'] = np.copy(mesh['v']) mesh_['f'] = np.copy(mesh['f']) mesh_['vn'] = np.copy(mesh['vn']) ObjIO.save_obj_data_binary(mesh_, vol_dir[:-4] + '_out.obj') nml_smooth_iter_num = 2 for _ in range(nml_smooth_iter_num): smooth_surface_normal(mesh_) mesh_upsampled = upsample_mesh(mesh_) assigned_normal(mesh_upsampled, dpt0, nml0, mask0) util.rotate_model_in_place(mesh_upsampled, 0, 0, np.pi) util.flip_axis_in_place(mesh_upsampled, -1, 1, 1) ObjIO.save_obj_data_binary(mesh_upsampled, vol_dir[:-4] + '_out_detailed.obj')
def writeSqlCore(self,aReaderAppEnv): inFile = CONS.INPUT_DATA_DIR / sys.argv[1] aExcelReader = ExcelReader(inFile, CONS) self.aKlassInfoList = KlassInfoList() xx = aExcelReader.getKlassListFromExcel() self.aKlassInfoList.setKlassList(xx) self.aKlassInfoList.setReaderAppEnv(aReaderAppEnv) aCommonUtil = CommonUtil() for aKlass in self.aKlassInfoList.getKlassList(): aKlass.setInsertSqlAction(aKlass) aKlass.setUpdateSqlAction(aKlass) aKlass.setDeleteSqlAction(aKlass) aKlass.setSelectSqlAction(aKlass) aKlass.setSelectSqlTbAliasAction(aKlass) ## print 'aKlass.getInsertSql\n', aKlass.getUpdateSql() ## print 'aKlass.getInsertSql\n', aKlass.getInsertSql() outSource = aCommonUtil.generateCode(aKlass, str(CONS.SQL_TEMPLATE)) fileName = CONS.OUT_DIR / aKlass.name + '.txt' aCommonUtil.writeFile(fileName, outSource) log.info('---def writeSqlCore---') log.info("(MSG) Ok: write Sql") print "(MSG)write Sql statement: Ok"
def do_GET(self, args): """ GET resource """ logger.info("CMD: GET %s" % args) try: if ACTIONS_KEY not in curr_context: print('GET is not available') return actions_dict = curr_context[ACTIONS_KEY] if 'help' == args: if actions_dict['GET']: CommonUtil.print_query_params( actions_dict['GET'].query_params) return cookie = self.__get_cookie() args_dict = self.__convert_args_to_dict(args) query_str = '' if actions_dict['GET'] and actions_dict['GET'].query_params: query_str = self.__process_return_query_params( args_dict, actions_dict['GET'].query_params) accept_type = args_dict[ RESPONSE_TYPE_KEY] if RESPONSE_TYPE_KEY in args_dict else '' response = ViPRConnection.submitHttpRequest( 'GET', curr_path + query_str, cookie, xml=True if accept_type == RESPONSE_TYPE_XML else False) if response: self.__print_response(response.text, accept_type) except Exception as e: print(str(e)) logger.error(str(e))
def setAttributes(self, fieldVisibility, fieldType, fieldName): self.fieldVisibility = fieldVisibility self.fieldType = fieldType self.fieldName = fieldName self.setJavaType(fieldType) aCommonUtil = CommonUtil() self.upperNameIndex0 = aCommonUtil.getUpperNameIndex0(fieldName)
def __init__(self): self.Outbound = {} self.Inbound = CommonUtil.Queue() self.Error = CommonUtil.Queue() self.Channels = [] self.Channels.append(Channel('General', '011', '0000000000000000')) self.handler = self.PortHandler() self.users = []
def do_find(self, args): """ Search for context """ logger.info("CMD: find %s" % args) if not args: return args_arr = args.split() find_me = args_arr[0] found_paths = list() CommonUtil.find_paths(found_paths, self._context, find_me) print('\n'.join(found_paths))
def saveAppEnvInfo(self, inAppEnvXml): doc = ComUtil1.getDomEncodeUtf8(inAppEnvXml) for attrName in ['writer','writeDate','subSystemName', \ 'isClassList','isClassDefinition',\ 'isInterfaceList', 'isInterfaceDefinition', \ 'isUseCaseList',\ 'isClassExport', 'isInterfaceExport', 'isUmlCaseInput']: outValue = doc.getElementsByTagName("appEnv")[0].getAttribute(attrName) value1 = ComUtil1.encodeCp949(outValue) self.__setitem__(attrName, value1)
def SendStatistic(self, download_speed, upload_speed): log.info("Statistic Noti") bot = bothandler.BotHandler().instance().bot if bot == None: log.info("Bot instance is none") return log.info('Statistic : %s, %s' % ( CommonUtil.hbytes(download_speed), CommonUtil.hbytes(upload_speed) ) ) msg = self.lang.GetBotHandlerLang('noti_statistic') % ( CommonUtil.hbytes(download_speed), CommonUtil.hbytes(upload_speed) ) self.SendNotifyMessage(msg) return
def saveWriterInfo(self, aClassInfo, inAppEnvXml): doc = ComUtil1.getDomEncodeUtf8(inAppEnvXml) #메소드를 만든다. objectName='aClassInfo' for attrName in ['writer','writeDate','subSystemName']: exec getStringDefineMethod(objectName, attrName) # writer = doc.getElementsByTagName("appEnv")[0].getAttribute('writer') outValue = doc.getElementsByTagName("appEnv")[0].getAttribute(attrName) value1 = ComUtil1.encodeCp949(outValue) exec getStringSetterMethod(objectName, attrName, value1)
def genDomainObject(self): aTableInfoDao = TableInfoDao(self.dataSource, self.CONS) cl = aTableInfoDao.getKlassListAction() aKlassInfoList = KlassInfoList() aKlassInfoList.setKlassList(cl) aCommonUtil = CommonUtil() for aKlass in aKlassInfoList.klassList: ##JAVA_DOMAIN outSource = aCommonUtil.generateCode(aKlass, str(self.CONS.JAVA_DOMAIN_TEMPLATE)) fileName = self.CONS.DOMAIN_OUT_DIR / aKlass.klassName + '.java' aCommonUtil.writeFile(fileName, outSource)
def Run(self): inPath = sys.argv[0] CONS = Constants(inPath) inFile = CONS.INPUT_DATA_DIR / sys.argv[1] aExcelReader = ExcelReader(inFile) aKlassInfoList = KlassInfoList() aKlassInfoList.setKlassList(aExcelReader.getKlassListFromExcel()) aCommonUtil = CommonUtil() for aKlass in aKlassInfoList.klassList: outSource = aCommonUtil.generateCode(aKlass, str(CONS.JAVA_ANALYSIS_TEMPLATE)) fileName = CONS.OUT_DIR / aKlass.klassName + '.java' aCommonUtil.writeFile(fileName, outSource)
def __print_response(self, response_text, accept_type=''): if not response_text: return if accept_type == RESPONSE_TYPE_JSON: print(json.dumps(json.loads(response_text), indent=4)) elif accept_type == RESPONSE_TYPE_XML: response_xml = xml.dom.minidom.parseString(response_text) print(response_xml.toprettyxml()) else: response_json = json.loads(response_text) if response_json: table = [('Name', 'Value')] self.__prepare_response_Table(response_json, table) CommonUtil.print_table(table)
def TaskNotiCallback(self, task_id, title, size, user, status): log.info("Task Noti") bot = bothandler.BotHandler().instance().bot if bot == None: log.info("Bot instance is none") return log.info('Task Monitor : %s, %s, %s, %s, %s' % (task_id, title, CommonUtil.hbytes(size), user, status)) #msg = '*상태* : %s\n*이름* : %s\n*크기* : %s\n*사용자* : %s' % ( self.StatusTranslate(status), title, CommonUtil.hbytes(size), user) msg = self.lang.GetBotHandlerLang('noti_torrent_status') % ( self.StatusTranslate(status), title, CommonUtil.hbytes(size), user) #self.SendNotifyMessage(msg, ParseMode = 'mark') self.SendNotifyMessage(msg)
def genServiceXmlInfo(self): aTableInfoDao = TableInfoDao(self.mdbFilePath, self.CONS) rs = aTableInfoDao.getKlassListForServiceXmlInfoAction() aServiceXmlApp = ServiceXmlApp(self.CONS) cl = aServiceXmlApp.getKlassList(rs) aKlassInfoList = KlassInfoList() aKlassInfoList.setKlassList(cl) aCommonUtil = CommonUtil() for aServiceXmlInfo in aKlassInfoList.klassList: outSource = aCommonUtil.generateCode(aServiceXmlInfo, str(self.CONS.SERVICE_XML_TEMPLATE)) fileName = self.CONS.SERVICE_XML_OUT_DIR / aServiceXmlInfo.service_Xml_Id + '-service.xml' aCommonUtil.writeFile(fileName, outSource)
def saveWriterInfo(self, aClassInfo, inAppEnvXml): doc = comUtil.getDomEncodeUtf8(inAppEnvXml) writer = doc.getElementsByTagName("appEnv")[0].getAttribute('writer') writeDate = doc.getElementsByTagName("appEnv")[0].getAttribute('writeDate') subSystemName = doc.getElementsByTagName("appEnv")[0].getAttribute('subSystemName') aClassInfo.setWriter(writer) aClassInfo.setWriteDate(writeDate) aClassInfo.setSubSystemName(subSystemName)
def create_inputs(pickle_file_path): try: parse_wadl(CommonUtil.get_file_location("descriptors", "application.xml")) parse_wadl(CommonUtil.get_file_location("descriptors", "syssvc-application.xml")) post_process_context() XSDParser.parse_xsd(CommonUtil.get_file_location("descriptors", "xsd0.xsd"), cli_inputs) XSDParser.parse_xsd(CommonUtil.get_file_location("descriptors", "syssvc-xsd0.xsd"), cli_inputs) with open(pickle_file_path, "wb") as f: # Cannot dump class, so dumping all its objects pickle.dump(cli_inputs.wadl_context, f) pickle.dump(cli_inputs.xsd_elements_dict, f) pickle.dump(cli_inputs.unknown_xsd_elements_dict, f) pickle.dump(cli_inputs.name_type_dict, f) except Exception as e: logger.error(str(e))
def get_input(self, p): while True: msg = None if self.gui: msg = self.gui.messageQueue.Pop() if msg: if self.command(msg): return Message(CommonUtil.createID(), self.id, self.alias, self.currentChannel, msg, 'command').encode() elif re.match(r'\/.+', msg): print('commands info:' ) # add a print out of all commands info else: return Message(CommonUtil.createID(), self.id, self.alias, self.currentChannel, msg, 'message').encode()
def do_POST(self, args): """ Create resource """ logger.info("CMD: POST %s" % args) global execute_action try: if ACTIONS_KEY not in curr_context: print('POST is not available') return actions_dict = curr_context[ACTIONS_KEY] # this is used when any default action is called if execute_action: curr_action = execute_action else: curr_action = 'POST' if 'help' == args: if actions_dict[curr_action]: CommonUtil.print_query_params( actions_dict[curr_action].query_params) CommonUtil.print_attributes( actions_dict[curr_action].method_name) return post_payload, query_str, content_type = self.__process_args( args, actions_dict[curr_action]) cookie = self.__get_cookie() if execute_action: post_url = curr_path + '/' + execute_action else: post_url = curr_path response = ViPRConnection.submitHttpRequest( 'POST', post_url + query_str, cookie, payload=post_payload, contentType=content_type) if response: self.__print_response(response.text) except Exception as e: print(str(e)) logger.error(str(e))
def __print_ll_response(self, response_text): """ This method prints ids and names """ response_json = json.loads(response_text) if response_json: table = [('ID', 'NAME')] for k, v in response_json.items(): for item in v: if 'id' in item: table.append((item['id'], item['name'])) elif 'op_id' in item: # this is only for tasks response table.append((item['op_id'], item['name'])) else: # This happens for san-fabrics API table.append((item, '')) #print('unknown item found') break CommonUtil.print_table(table) print(' ')
def run_im2smpl(img_dir): log_str = '' tmp_folder = ''.join(random.sample(string.ascii_letters + string.digits, 8)) tmp_path = os.path.join(img2smpl_dir, tmp_folder) curr_path = os.getcwd() util.safe_mkdir(tmp_path) sh_file_str = '' sh_file_str += '#!/usr/local/bin/bash\n' sh_file_str += 'cp %s %s\n' % (img_dir[:-4] + '_color.png', os.path.join(tmp_path, 'test_img.png')) sh_file_str += 'cd ' + img2smpl_dir + '\n' if crop_orig_img: sh_file_str += 'python2 main.py --img_file %s --out_dir %s \n' \ % (os.path.join(tmp_path, 'test_img.png'), tmp_path) else: sh_file_str += 'python2 main_wo_cropping.py --img_file %s --out_dir %s \n' \ % (os.path.join(tmp_path, 'test_img.png'), tmp_path) sh_file_str += 'cd ' + curr_path + '\n' sh_file_str += 'mv %s %s\n' % (os.path.join(tmp_path, 'test_img.png.final.txt'), img_dir[:-4] + '_final.txt') sh_file_str += 'mv %s %s\n' % (os.path.join(tmp_path, 'test_img.png.smpl.obj'), img_dir[:-4] + '_smpl.obj') sh_file_str += 'mv %s %s\n' % (os.path.join(tmp_path, 'test_img.png.smpl_proj.png'), img_dir[:-4] + '_smpl_proj.png') sh_file_str += 'cp %s %s\n' % (os.path.join(tmp_path, 'test_img.png'), img_dir[:-4] + '_color.png') # copies the cropped image back sh_file_str += 'rm -rf ' + tmp_path + '\n' sh_fname = ''.join(random.sample(string.ascii_letters + string.digits, 8)) + '.sh' with open(os.path.join('./', sh_fname), 'w') as fp: fp.write(sh_file_str) log_str += 'will run the following commands ------------\n' log_str += sh_file_str log_str += 'end of commend -----------------------------\n' print(log_str) call(['sh', os.path.join('./', sh_fname)]) os.remove(os.path.join('./', sh_fname)) return log_str
def download_db_timer(self): ret = True if self.curs == None: ret = self.db_connect() if ret == True: query = 'SELECT * FROM btdownload_event WHERE isread=0;' try: self.curs.execute(query) rowitems = self.curs.fetchall() if len(rowitems) > 0: for row in rowitems: # task_id|username|filename|status|total_size|isread|create_time task_id = row[0] username = row[1] tor_name = row[2] status = self.dsdown_status_to_str(row[3]) total_size = CommonUtil.hbytes(row[4]) # bot.sendMessage(24501560, "<b>Bold Text</b>\n<pre color='blue'>Test Message</pre>\nHTML Mode", parse_mode='HTML') # Markdown 문법에서 _ 는 * 로 대체 되므로 \_ 로 변경 tor_name = tor_name.replace('_', '\_') # Markdown 문법에서 *는 MarkDown 문법을 시작하는 문자이므로 \* 로 변경 tor_name = tor_name.replace('*', '\*') msg = '*상태* : %s\n*이름* : %s\n*크기* : %s\n*사용자* : %s' % (status, tor_name, total_size, username) self.bot.sendMessage(self.chat_id, msg, parse_mode='Markdown') query = "UPDATE btdownload_event SET isread = 1 WHERE task_id = %d" % (task_id) self.curs.execute(query) except psycopg2.IntegrityError as err: if err.pgcode != '23505': log.error('download_db_timer|DB IntegrityError : %s', err) else: log.error('download_db_timer|DB Not Intergrity Error : %s', err) self.curs.close() self.conn.close() self.curs = None except Exception as err: log.error('download_db_timer|DB Exception : %s', err) log.error("download_db_timer Exception : %s", traceback.format_exc()) strErr = str(err.message) log.error('error ---- %s, %d', strErr, strErr.find('relation "btdownload_event" does not exist')) if strErr.find('relation "btdownload_event" does not exist') != -1: self.CheckDownloadMonitorTable() self.curs.close() self.conn.close() self.curs = None except: log.error("download_db_timer|psycopg except : " + e) self.curs.close() self.conn.close() self.curs = None
def SendTaskList(self, task_id, task_size, task_status, task_title, download_size, upload_size, download_speed, upload_speed): log.info("Task Noti") bot = bothandler.BotHandler().instance().bot if bot == None: log.info("Bot instance is none") return log.info('Task Detail : %s, %s, %s, %s, %s, %s, %s, %s' % ( task_id, task_title, CommonUtil.hbytes(task_size), self.StatusTranslate(task_status), CommonUtil.hbytes(download_size), CommonUtil.hbytes(upload_size), CommonUtil.hbytes(download_speed), CommonUtil.hbytes(upload_speed) ) ) msg = self.lang.GetBotHandlerLang('noti_task_list') % ( task_id, task_title, CommonUtil.hbytes(task_size), self.StatusTranslate(task_status), CommonUtil.hbytes(download_size), CommonUtil.hbytes(upload_size), CommonUtil.hbytes(download_speed), CommonUtil.hbytes(upload_speed) ) self.SendNotifyMessage(msg)
def createNew(name, permissions): if name == 'General': id = '0000000000000000' else: id = CommonUtil.createID() ch = Channel(name, permissions, id) regex = re.match(r'([01]{3})', permissions) if regex: ch.permisions = permissions else: ch.permisions = '011' db.newChannel(ch) return ch
def __get_id_by_key(self, key, search_name, search_context): try: search_paths = CommonUtil.get_search_path_by_key(key, search_context) cookie = self.__get_cookie() # search for this name in all search paths for path in search_paths: response = ViPRConnection.submitHttpRequest('GET', path+"?name="+search_name, cookie) if response: search_json = json.loads(response.text) if search_json and search_json["resource"]: return search_json["resource"][0]["id"] except Exception: raise Exception("Name: %s not found" % search_name)
def extract_orig_mesh(vol): assert len(vol.shape) == 3 vertices, simplices, normals, _ = measure.marching_cubes_lewiner(vol, level=0.5) # https://scikit-image.org/docs/dev/api/skimage.measure.html#marching-cubes-lewiner vertices = vertices*2.0 mesh = dict() mesh['v'] = vertices mesh['f'] = simplices mesh['f'] = mesh['f'][:, (1, 0, 2)] # to ensure that normals computed by opendr are facing outwards wrt. the mesh mesh['vn'] = util.calc_normal(mesh) # normals from marchingCube are only slightly diff. from opendr's print('mesh[v] =', type(mesh['v']), mesh['v'].shape) print('mesh[vn] =', type(mesh['vn']), mesh['vn'].shape) print('mesh[f] =', type(mesh['f']), mesh['f'].shape) return mesh, normals
def do_PUT(self, args): """ Update resource """ logger.info("CMD: PUT %s" % args) try: if ACTIONS_KEY not in curr_context: print('PUT is not available') return actions_dict = curr_context[ACTIONS_KEY] if 'help' == args: if actions_dict['PUT']: CommonUtil.print_query_params(actions_dict['PUT'].query_params) CommonUtil.print_attributes(actions_dict['PUT'].method_name) return put_payload, query_str, content_type = self.__process_args(args, actions_dict['PUT']) cookie = self.__get_cookie() response = ViPRConnection.submitHttpRequest('PUT', curr_path+query_str, cookie, payload=put_payload, contentType=content_type) if response: self.__print_response(response.text) except Exception as e: print(str(e)) logger.error(str(e))
def extract_orig_mesh(vol): assert len(vol.shape) == 3 vertices, simplices, normals, _ = measure.marching_cubes_lewiner(vol, 0.5) vertices = vertices * 2.0 mesh = dict() mesh['v'] = vertices mesh['f'] = simplices mesh['f'] = mesh['f'][:, (1, 0, 2)] mesh['vn'] = util.calc_normal(mesh) print('mesh[v] =', type(mesh['v']), mesh['v'].shape) print('mesh[vn] =', type(mesh['vn']), mesh['vn'].shape) print('mesh[f] =', type(mesh['f']), mesh['f'].shape) return mesh
def system_status(interval_value=0.1): vmem_info = psutil.virtual_memory() system_info = '*' + main.botConfig.GetHostName() + u' 시스템 정보*\n\n' system_info += 'CPU : %.1f%%\n' % ( psutil.cpu_percent(interval=interval_value, percpu=False) ) system_info += 'RAM : %.1f%%\n' % ( psutil.virtual_memory().percent ) system_info += GetTopProcess(1) # Disk Info disk_list = os.popen("df | grep volume | cut -d ' ' -f 7").read() for volume in disk_list.splitlines(): if volume != '': system_info += '\n*' + volume[1:] + u' Disk 정보*\n' diskinfo = psutil.disk_usage(volume) system_info += u'전체 : %s\n' % (CommonUtil.hbytes(diskinfo.total)) system_info += u'사용된 공간 : %s\n' % (CommonUtil.hbytes(diskinfo.used)) system_info += u'사용 가능 공간 : %s\n' % (CommonUtil.hbytes(diskinfo.free)) system_info += u'사용율 : %s%%\n' % (diskinfo.percent) return system_info.encode('utf-8')
def __get_id_by_key(self, key, search_name, search_context): try: search_paths = CommonUtil.get_search_path_by_key( key, search_context) cookie = self.__get_cookie() # search for this name in all search paths for path in search_paths: response = ViPRConnection.submitHttpRequest( 'GET', path + "?name=" + search_name, cookie) if response: search_json = json.loads(response.text) if search_json and search_json["resource"]: return search_json["resource"][0]["id"] except Exception: raise Exception("Name: %s not found" % search_name)
def get_recommand_message(self, feature_dict): feature_list = list(feature_dict.values()) model = word2vec.Word2Vec.load('instagram_model_20181030') # learn_result = model.most_similar(positive=feature_list) learn_result = model.most_similar(positive=['엄마', '생신', '선물']) recommand_present_list = [] for item, similar in learn_result: if CommonUtil.get_is_present(item): recommand_present_list.append(item) return '빅데이터 추천 결과입니다! ' + ','.join(recommand_present_list) + ' 어떠신가요?'
def create_inputs(pickle_file_path): try: parse_wadl( CommonUtil.get_file_location('descriptors', 'application.xml')) parse_wadl( CommonUtil.get_file_location('descriptors', 'syssvc-application.xml')) post_process_context() XSDParser.parse_xsd( CommonUtil.get_file_location('descriptors', 'xsd0.xsd'), cli_inputs) XSDParser.parse_xsd( CommonUtil.get_file_location('descriptors', 'syssvc-xsd0.xsd'), cli_inputs) with open(pickle_file_path, 'wb') as f: # Cannot dump class, so dumping all its objects pickle.dump(cli_inputs.wadl_context, f) pickle.dump(cli_inputs.xsd_elements_dict, f) pickle.dump(cli_inputs.unknown_xsd_elements_dict, f) pickle.dump(cli_inputs.name_type_dict, f) except Exception as e: logger.error(str(e))
def processLine(sogouObserverStr): cols = sogouObserverStr.split(',') sogouObserver = SogouObserver('') for col in cols: #print col attrval = col.split("=") #print len(attrval) if(len(attrval)<2): continue #print len(attrval) #print attrval setattr(sogouObserver, attrval[0], attrval[1]) sogouObserver.phonetype = CommonUtil.getPhonetype(sogouObserver.h) #print 'h:',sogouObserver.h #print 'ks:',sogouObserver.ks #print sogouObserver.phonetype return sogouObserver
def genDao(self): aDao = DaoMethodInfoDao(self.dataSource, self.CONS) klassList = aDao.getKlassListAction() aCommonUtil = CommonUtil() for aKlass in klassList: ##DAO소스생성 outSource = aCommonUtil.generateCode(aKlass, str(self.CONS.DAO_TEMPLATE)) fileName = self.CONS.DAO_OUT_DIR / 'SqlMap'+aKlass.klassName + 'Dao.java' #print fileName aCommonUtil.writeFile(fileName, outSource) for aKlass in klassList: ##IDAO소스생성 outSource = aCommonUtil.generateCode(aKlass, str(self.CONS.IDAO_TEMPLATE)) fileName = self.CONS.IDAO_OUT_DIR / aKlass.klassName + 'Dao.java' #print fileName aCommonUtil.writeFile(fileName, outSource)
def load_config(): logger = logging.getLogger(__name__) global VIPR_HOST, VIPR_PORT, COOKIE_DIR_ABS_PATH parser = configparser.ConfigParser() try: config_file = CommonUtil.get_file_location('config', 'cli_config.ini') parser.read(config_file) VIPR_HOST = parser['vipr']['HOST'] VIPR_PORT = parser['vipr']['PORT'] COOKIE_DIR_ABS_PATH = parser['general']['COOKIE_DIR_ABS_PATH'] if not COOKIE_DIR_ABS_PATH: COOKIE_DIR_ABS_PATH = os.path.dirname( __file__ ) logger.info("ViPR host: %s" % VIPR_HOST) logger.info("ViPR port: %s" % VIPR_PORT) logger.info("Cookie path: %s" % COOKIE_DIR_ABS_PATH) except: print('Error occurred reading ViPR config file:%s,%s' %(sys.exc_info()[0],sys.exc_info()[1]))
def load_config(): logger = logging.getLogger(__name__) global VIPR_HOST, VIPR_PORT, COOKIE_DIR_ABS_PATH parser = configparser.ConfigParser() try: config_file = CommonUtil.get_file_location('config', 'cli_config.ini') parser.read(config_file) VIPR_HOST = parser['vipr']['HOST'] VIPR_PORT = parser['vipr']['PORT'] COOKIE_DIR_ABS_PATH = parser['general']['COOKIE_DIR_ABS_PATH'] if not COOKIE_DIR_ABS_PATH: COOKIE_DIR_ABS_PATH = os.path.dirname(__file__) logger.info("ViPR host: %s" % VIPR_HOST) logger.info("ViPR port: %s" % VIPR_PORT) logger.info("Cookie path: %s" % COOKIE_DIR_ABS_PATH) except: print('Error occurred reading ViPR config file:%s,%s' % (sys.exc_info()[0], sys.exc_info()[1]))
def __init__(self, master): self.root = master self.root.protocol("WM_DELETE_WINDOW", self.on_closing) self.messageQueue = CommonUtil.Queue() # the main frame self.frame = Frame(master) self.frame.pack() # title self.title = Label(self.frame, text="TDO") # the field with messages deploying self.textField = Text(self.frame, width=48, height=13) # the field with rooms to join visible self.roomName = Text(self.frame, width=25, height=6) # send button self.send = Button(self.frame, text="Send", command=self.sendPressed) # the main chat bar self.mainText = Entry(self.frame) self.mainText.bind('<Return>', self.enter) # the room to join text bar # button for join room # field for users online self.online = Text(self.frame, width=25, height=6) self.sp = Label(self.frame, text=" ") # the placement of objects in the grid self.title.grid(row=0, columnspan=7) self.textField.grid(row=1, columnspan=2, rowspan=3) self.roomName.grid(row=1, column=5, columnspan=2) self.sp.grid(column=4, row=2) # self.room2Join.grid(row=2,column=3, sticky= E) # self.roomJoin.grid(row=2,column=4, sticky = W) self.mainText.grid(row=4, column=0, sticky=E) self.send.grid(row=4, column=1, sticky=W) self.online.grid(row=3, column=5, columnspan=2) self.textField.config(state=DISABLED) self.roomName.config(state=DISABLED) self.online.config(state=DISABLED)
def statPhonetypeuser(userStatisticsMap): phonetypeuserTopNumber =20 phonetypeuserStatisticsMap = {} for uid in userStatisticsMap.keys(): phonetype = CommonUtil.getPhonetype(uid) if(len(phonetype)==0): continue if (phonetypeuserStatisticsMap.has_key(phonetype)==False): phonetypeuserStatisticsMap[phonetype] = PhonetypeuserStatistics() phonetypeuserStatisticsMap[phonetype].phonetype=phonetype updatePhonetypeuserStatistics(phonetypeuserStatisticsMap[phonetype],userStatisticsMap[uid]) print 'phonetype user sort by click number' sortByClick = sorted(phonetypeuserStatisticsMap.values(), key=operator.attrgetter('click'),reverse=True)[0:phonetypeuserTopNumber] for pts in sortByClick: pts.toString() print 'phonetype user sort by close number' sortByClose = sorted(phonetypeuserStatisticsMap.values(), key=operator.attrgetter('close'),reverse=True)[0:phonetypeuserTopNumber] for pts in sortByClose: pts.toString()
if method in [cv2.TM_SQDIFF, cv2.TM_SQDIFF_NORMED]: top_left = min_loc else: top_left = max_loc bottom_right = (top_left[0] + w, top_left[1] + h) # 画矩形 cv2.rectangle(img2, top_left, bottom_right, 255, 2) plt.subplot(121), plt.imshow(res, cmap='gray') plt.xticks([]), plt.yticks([]) # 隐藏坐标轴 plt.subplot(122), plt.imshow(img2, cmap='gray') plt.xticks([]), plt.yticks([]) plt.suptitle(meth) plt.show() img_rgb = cv2.imread(imgPath, ) img_gray = cv2.cvtColor(img_rgb, cv2.COLOR_BGR2GRAY) template = cv2.imread(templatePath, 0) h, w = template.shape[:2] res = cv2.matchTemplate(img_gray, template, cv2.TM_CCOEFF_NORMED) threshold = 0.8 # 取匹配程度大于%80的坐标 loc = np.where(res >= threshold) for pt in zip(*loc[::-1]): # *号表示可选参数 bottom_right = (pt[0] + w, pt[1] + h) cv2.rectangle(img_rgb, pt, bottom_right, (0, 0, 255), 2) CommonUtil.cv_show("img_rgb", img_rgb)
import cv2 #opencv读取的格式是BGR import numpy as np import CommonUtil import matplotlib.pyplot as plt imgPath = '../data/tliangtrans.jpg' img = cv2.imread(imgPath) print('img.shape:', img.shape) gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) sift = cv2.xfeatures2d.SIFT_create() kp = sift.detect(gray, None) img = cv2.drawKeypoints(gray, kp, img) CommonUtil.cv_show('sift', img) kp, des = sift.compute(gray, kp) print(np.array(kp).shape) print(des.shape)
import CommonUtil as util print util.toIpAddr(20)
statuses = search_results["statuses"] search_results_len = len(statuses) # print "search_results_len" , search_results_len # per tweet processing while row_num < count_per_search: # and row_num < search_results_len: t = Tweet() status = statuses[row_num] #print status resp = json.dumps(status, indent=4) #print resp text = status["text"] t.text = text # Message based features t.length_tweet = len(text) t.num_words = len(text.split()) t.num_unique_chars = CommonUtil.count_unique_chars(text) t.num_hashtags = text.count("#") t.retweet_cnt = status["retweet_count"] max_id = status["id"] t.num_swear_words = CommonUtil.count_swear_words(text) t.num_at_emotions = text.count("@") # Source based Features user_features = status["user"] t.registration_age = CommonUtil.count_num_days_from_today(user_features["created_at"]) t.num_followers = user_features["followers_count"] t.num_followee = user_features["friends_count"] if t.num_followee !=0: t.ratio_foll_followee = t.num_followers / t.num_followee is_verified = user_features["verified"] if is_verified:
while count_fetched < total_count: row_num = 0 search_results = twitter_api.search.tweets(q=topic, count=count_per_search, max_id=max_id) statuses = search_results["statuses"] search_results_len = len(statuses) # per tweet processing while row_num < count_per_search: status = statuses[row_num] print status resp = json.dumps(status, indent=4) print resp text = status["text"] # Message based features length_tweet = len(text) num_words = len(text.split()) num_unique_chars = CommonUtil.count_unique_chars(text) num_hashtags = text.count("#") retweet_cnt = status["retweet_count"] max_id = status["id"] num_swear_words = CommonUtil.count_swear_words(text) num_at_emotions = text.count("@") # Source based Features user_features = status["user"] num_followers = user_features["followers_count"] num_friends = user_features["friends_count"] is_verified = user_features["verified"] if is_verified: is_verified = 1 else: is_verified = 0
def fetch_tweets_for_topic(topic): total_count = 100 count_per_search = 100 row_num = 0 max_id = -1 tweet_list = [] tweet_text_list = [] count_fetched = 0 auth = twitter.oauth.OAuth(OAUTH_TOKEN, OAUTH_TOKEN_SECRET, CONSUMER_KEY, CONSUMER_SECRET) twitter_api = twitter.Twitter(auth=auth) while count_fetched < total_count: row_num = 0 search_results = twitter_api.search.tweets(q=topic, count=count_per_search, max_id=max_id) statuses = search_results["statuses"] search_results_len = len(statuses) # per tweet processing while row_num < count_per_search: t = Tweet() status = statuses[row_num] #print status resp = json.dumps(status, indent=4) #print resp text = status["text"] text = smart_str(text) text = text.replace("\n", " ") text = text.replace("|", "") text = text.replace("$", "") t.text = text # Message based features t.length_tweet = len(text) t.num_words = len(text.split()) t.num_unique_chars = CommonUtil.count_unique_chars(text) t.num_hashtags = text.count("#") t.retweet_cnt = status["retweet_count"] max_id = status["id"] t.num_swear_words = CommonUtil.count_swear_words(text) t.num_at_emotions = text.count("@") # Source based Features user_features = status["user"] t.registration_age = CommonUtil.count_num_days_from_today(user_features["created_at"]) t.num_followers = user_features["followers_count"] t.num_followee = user_features["friends_count"] if t.num_followee !=0: t.ratio_foll_followee = t.num_followers / t.num_followee is_verified = user_features["verified"] if is_verified: t.is_verified = 1 else: t.is_verified = 0 t.len_desc = len(user_features["description"]) t.len_screen_name = len(user_features["screen_name"]) user_url = user_features["url"] if user_url: t.has_url = 1 # Create tweet characteristics to write to file tweet_str = text + "|" + str(t.length_tweet) + "|" + str(t.num_words) + "|" + str(t.num_unique_chars) + "|" \ + str(t.num_hashtags) + "|" + str(t.retweet_cnt) + "|" + str(t.num_swear_words) + "|" \ + str(t.num_at_emotions) + "|" \ + str(t.registration_age) + "|" + str(t.num_followers) + "|" + str(t.num_followee) + "|" \ + str(t.is_verified) + "|" + str(t.len_desc) + "|" + str(t.len_screen_name) + "|" \ + str(t.has_url) tweet_list.append(tweet_str) tweet_text_list.append(smart_str(text)) row_num += 1 count_fetched += search_results_len # write the tweets to a file outfile = open("test_tweets.txt", "w") for tweet in tweet_list: outfile.write(tweet) outfile.write("\n") outfile.close() # convert the tweet string to comma separated string tweet_text_str = "" for tweet in tweet_text_list: tweet_text_str = tweet_text_str + "$" + tweet return tweet_text_str
import cv2 #opencv读取的格式是BGR import numpy as np import CommonUtil import matplotlib.pyplot as plt imgPath = '../data/tliangtrans.jpg' img = cv2.imread(imgPath) kernel = np.ones((3, 3), np.uint8) erosion = cv2.erode(img, kernel, iterations=1) CommonUtil.cv_show("erosion", erosion) kernel = np.ones((3, 3), np.uint8) dilate = cv2.dilate(erosion, kernel, iterations=1) CommonUtil.cv_show("dilate", dilate) kernel = np.ones((30, 30), np.uint8) dilate_1 = cv2.dilate(img, kernel, iterations=1) dilate_2 = cv2.dilate(img, kernel, iterations=2) dilate_3 = cv2.dilate(img, kernel, iterations=3) res = np.hstack((dilate_1, dilate_2, dilate_3)) CommonUtil.cv_show("dilate3", res)
XSD_URI = "/application.wadl/xsd0.xsd" SYSSVC_WADL_URI = "/syssvc-application.wadl" SYSSVC_XSD_URI = "/syssvc-application.wadl/xsd0.xsd" def login(user, pswd): """ Log into ViPR """ cookie = ViPRConnection.login(user, pswd) if cookie: #print("Copying cookie to: %s" % os.path.join(COOKIE_DIR, COOKIE_FILE_NAME)) with open(os.path.join(ConfigUtil.COOKIE_DIR_ABS_PATH, Constants.COOKIE_FILE_NAME), 'w+') as f: f.write(cookie) return cookie try: # Initiating logging logs_dir = CommonUtil.get_file_dir_location('logs') log_config_path = CommonUtil.get_file_location('config', 'logging.conf') if not os.path.exists(logs_dir): os.makedirs(logs_dir) os.environ['ViPR_COMMAND_LOG_DIR'] = logs_dir logging.config.fileConfig(log_config_path, disable_existing_loggers=False) logger = logging.getLogger(__name__) logger.info("## Starting ViPRCommand ##") ConfigUtil.load_config() # Get username, password from arguments or prompt user if sys.argv and len(sys.argv) > 1: if sys.argv[1] == "help" or len(sys.argv) != 5: print("python ViPRCommand -u name -p password") sys.exit()
# -*- coding: utf-8 -*- from win32com.client import Dispatch import pythoncom from CommonUtil import * #from Constants import * ComUtil = CommonUtil() #import win32com.client #a=win32com.client.Dispatch("access.application.8") #self.xl = Dispatch("Excel.Application") try: a=Dispatch("Access.Application") except pythoncom.com_error, (hr, msg, exc, arg): ComUtil.printPythonComError(hr, msg, exc, arg) a.Visible=1 #C:\_projectautomation\source\dbAutomation\checkDbDictionary\db a.OpenCurrentDatabase("C://_projectautomation/source/test/Access/MyDB.mdb") aDoCmd = a.DoCmd aDoCmd.OpenForm('test',3) #a.DoCmd.OpenReport("ber",2) #a.DoCmd.OpenForm "Employees", acNormal ##p = PRTMIP() ##p.read(a, 0) ##p.display()
def login(user, pswd): """ Log into ViPR """ cookie = ViPRConnection.login(user, pswd) if cookie: #print("Copying cookie to: %s" % os.path.join(COOKIE_DIR, COOKIE_FILE_NAME)) with open( os.path.join(ConfigUtil.COOKIE_DIR_ABS_PATH, Constants.COOKIE_FILE_NAME), 'w+') as f: f.write(cookie) return cookie try: # Initiating logging logs_dir = CommonUtil.get_file_dir_location('logs') log_config_path = CommonUtil.get_file_location('config', 'logging.conf') if not os.path.exists(logs_dir): os.makedirs(logs_dir) os.environ['ViPR_COMMAND_LOG_DIR'] = logs_dir logging.config.fileConfig(log_config_path, disable_existing_loggers=False) logger = logging.getLogger(__name__) logger.info("## Starting ViPRCommand ##") ConfigUtil.load_config() # Get username, password from arguments or prompt user if sys.argv and len(sys.argv) > 1: if sys.argv[1] == "help" or len(sys.argv) != 5: print("python ViPRCommand -u name -p password") sys.exit()
import cv2 #opencv读取的格式是BGR import numpy as np import CommonUtil import matplotlib.pyplot as plt imgPath = '../data/tliangtrans.jpg' img = cv2.imread(imgPath) sobelx = cv2.Sobel(img, cv2.CV_64F, 1, 0, ksize=3) sobelx = cv2.convertScaleAbs(sobelx) CommonUtil.cv_show("sobelx", sobelx) sobely = cv2.Sobel(img, cv2.CV_64F, 0, 1, ksize=3) sobely = cv2.convertScaleAbs(sobely) CommonUtil.cv_show("sobely", sobely) sobelxy = cv2.addWeighted(sobelx, 0.5, sobely, 0.5, 0) CommonUtil.cv_show("sobelxy", sobelxy) scharrx = cv2.Scharr(img, cv2.CV_64F, 1, 0) scharry = cv2.Scharr(img, cv2.CV_64F, 0, 1) scharrx = cv2.convertScaleAbs(scharrx) scharry = cv2.convertScaleAbs(scharry) scharrxy = cv2.addWeighted(scharrx, 0.5, scharry, 0.5, 0) CommonUtil.cv_show("scharrxy", scharrxy) laplacian = cv2.Laplacian(img, cv2.CV_64F) laplacian = cv2.convertScaleAbs(laplacian)
import cv2 #opencv读取的格式是BGR import numpy as np import CommonUtil import matplotlib.pyplot as plt imgPath = '../data/tliangtrans.jpg' img=cv2.imread(imgPath) res = cv2.resize(img, (0, 0), fx=4, fy=4) CommonUtil.cv_show("resized_4_4",res) res = cv2.resize(img, (0, 0), fx=1, fy=3) CommonUtil.cv_show("resized_1_3",res)
def fetch_new_tweets(): num_of_topics = 1 auth = twitter.oauth.OAuth(OAUTH_TOKEN, OAUTH_TOKEN_SECRET, CONSUMER_KEY, CONSUMER_SECRET) twitter_api = twitter.Twitter(auth=auth) US_WOE_ID = 23424977 WORLD_WOE_ID = 1 tweet_list = [] tweet_text_list = [] us_trends = twitter_api.trends.place(_id=WORLD_WOE_ID) # print the top 10 tweets topics = [] for i in range(num_of_topics): name = us_trends[0]["trends"][i]["name"] print name topics.append(name) total_count = 500 count_per_search = 100 topic_counter = 0 for topic in topics: topic_counter += 1 print "topic #", topic_counter count_fetched = 0 max_id = -1 while count_fetched < total_count: row_num = 0 search_results = twitter_api.search.tweets(q=topic, count=count_per_search, max_id=max_id) statuses = search_results["statuses"] search_results_len = len(statuses) # per tweet processing while row_num < count_per_search: t = Tweet() status = statuses[row_num] #print status resp = json.dumps(status, indent=4) #print resp text = status["text"] t.text = text # Message based features t.length_tweet = len(text) t.num_words = len(text.split()) t.num_unique_chars = CommonUtil.count_unique_chars(text) t.num_hashtags = text.count("#") t.retweet_cnt = status["retweet_count"] max_id = status["id"] t.num_swear_words = CommonUtil.count_swear_words(text) t.num_at_emotions = text.count("@") # Source based Features user_features = status["user"] t.registration_age = CommonUtil.count_num_days_from_today(user_features["created_at"]) t.num_followers = user_features["followers_count"] t.num_followee = user_features["friends_count"] if t.num_followee !=0: t.ratio_foll_followee = t.num_followers / t.num_followee is_verified = user_features["verified"] if is_verified: t.is_verified = 1 else: t.is_verified = 0 t.len_desc = len(user_features["description"]) t.len_screen_name = len(user_features["screen_name"]) user_url = user_features["url"] if user_url: t.has_url = 1 # Create tweet characteristics to write to file tweet_str = str(t.length_tweet) + "|" + str(t.num_words) + "|" + str(t.num_unique_chars) + "|" \ + str(t.num_hashtags) + "|" + str(t.retweet_cnt) + "|" + str(t.num_swear_words) + "|" \ + str(t.num_at_emotions) + "|" \ + str(t.registration_age) + "|" + str(t.num_followers) + "|" + str(t.num_followee) + "|" \ + str(t.is_verified) + "|" + str(t.len_desc) + "|" + str(t.len_screen_name) + "|" \ + str(t.has_url) tweet_list.append(tweet_str) tweet_text_list.append(smart_str(text)) row_num += 1 count_fetched += search_results_len return tweet_list , tweet_text_list
import cv2 #opencv读取的格式是BGR import numpy as np import CommonUtil import matplotlib.pyplot as plt imgPath = '../data/tliangtrans.jpg' img=cv2.imread(imgPath) print ('img.shape:',img.shape) gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) # gray = np.float32(gray) dst = cv2.cornerHarris(gray, 2, 3, 0.04) print ('dst.shape:',dst.shape) img[dst>0.01*dst.max()]=[0,0,255] CommonUtil.cv_show('dst',img)
def setAttributeName(self, attributeName): self.attributeName = attributeName aCommonUtil = CommonUtil() self.upperNameIndex0 = aCommonUtil.getUpperNameIndex0(attributeName)