def caseCodeValue_get(self,word_all,causeValue,line): if causeValue == "UNDEF" or causeValue == "": return "" try: ret = int(causeValue) except: exCodeIndex = self.load_dict[self.getKeyStr(EXCEPTION_CODE)] exCodeList = [] # exCodeList = [k for k,v in self.load_dict if v == exCodeIndex] for k in self.load_dict: if self.load_dict[k] == exCodeIndex: exCodeList.append(k) exStr = "" for wd in word_all: if keyStrSplit in wd and filter(lambda ex:re.search(ex.lower(),wd.lower()),exCodeList): exStr = wd break if not exStr: raise Exception("CaseCode get Error:Cannot find key ExceptionCode when causevalue is not int:%s"%line) try: tmp = exStr.split(keyStrSplit) exValue = int(tmp[1].strip(" ").strip("\n").rstrip('"')) except: # raise Exception("CaseCode get Error, line:%s" % word_all) exValue = -1 ret = DBInfo.get_causecode_value(causeValue,exValue) return str(ret)
def init_dict(self): """ Initial 各种字典 """ self.output_power_dict = DBInfo.loadOutputPower() #载入数据库的mspowerindividual 信息 #call information 必备信息收集字典 for cnt in range(0,65537): cnt = str(cnt) self.call_information[cnt] = []
def get_cellid(self,filed): """ CellID: 根据filed值获取cellid (读取从数据库中的字典表) """ if filed == "" or filed == None: return "" tmp = filed.split(",")[2] cell_info = tmp.split("=")[1] if self.cell_dict.has_key(cell_info): return self.cell_dict[cell_info] else: self.cell_dict = DBInfo.loadCellInfo(CellName = cell_info) return self.cell_dict[cell_info]
def cellid_match(self,cid): """ cellid <-> CID 数据库对应 """ cellid = "" #print cid cid = str(cid) if cid == "" or cid == None or cid=="UNDEF": cellid = "" ret="" else: if not cid in self.cid_dict.keys(): from util.loadInfo import DBInfo self.cid_dict = DBInfo.load_cellid_dict(CID=cid) cellid = self.cid_dict[cid] cellid = self.dealWithCellid(cellid) #print "not:",cellid,len(self.cid_dict) ret=cellid[0:8]+"-"+cellid[8:12]+"-"+cellid[12:16]+"-"+cellid[16:20]+"-"+cellid[20:32] #return cellid return ret
def main(): result_information = [] load_key = {} #################### #command line parser parser = OptionParser() parser.add_option('-f','--file', action = 'store', type = 'string', dest = 'file_str' ) parser.add_option('-s','--savedir', action = 'store', type = 'string', dest = 'save_path' ) parser.add_option('-r','--return', action = 'store', type = 'string', dest = 'return_file') #ATUTEST2210465292330515664201206271450_120627_145004.tmp #ATUTEST3059711936502054577201206271451_120627_145103.tmp #ATUTEST5219307231012006613201206271720_120627_172001.tmp #eventData_120419_085606.txt #eventData_120419_113412.txt arge_test = ['-f',FILE_SOURCE, '-s','D:\\EricssonData\\ParsedData', '-r','D:\\EricssonData\\output.json', ] arge = sys.argv[1:] if len(sys.argv) > 1 else arge_test (options,args) = parser.parse_args(arge) print options.save_path print options.file_str print options.return_file ############ #start parsing file_str = options.file_str save_file = options.save_path if not os.path.exists(save_file): os.mkdir(save_file) return_file = options.return_file print 'Start Time:' + time.ctime() start = time.clock() files = [] files.append(file_str) for f in files: if not os.path.exists(f): print "cannot find %s"%f raise Exception("cannot find %s"%f) return p = processStr(filename = f,save_path = save_file) ret = p.parseProcess() result_information.append(ret) break json_object = p.merge_json() print "End Time", time.ctime() print "total time(seconds)", str(time.clock() - start) #print ret #if json_object == None: # return # ##################### # #Write into JSON file # json_object = { # MR_FILE:[], # EVENT_FILE:[], # CALL_FILE:[] # } # for f in FILE_SETS: # for i in range(0,MAX_TIME_SPAN+1): # if ret[f][i]['DataStartTime'] != None and ret[f][i]['DataStartTime'] != "": # json_object[f].append(ret[f][i]) # else: # break #print json_object target_file = open(return_file,"w") simplejson.dump(json_object,target_file) target_file.close() DBInfo.close()
#other key-value self.recent_segment[key] = self.key_value_get(word_all[1:],self.recent_segment[key],line) '''cellid <-> CID matching''' self.recent_segment[key][self.load_dict[self.getKeyStr(CID_STR)]] = self.cellid_match(self.recent_segment[key][self.load_dict[self.getKeyStr(CID_STR)]]) '''write line''' self.recent_segment[key][0] = str(uuid.uuid1()) write_line = writeStrSplit.join(self.recent_segment[key]) self.result_file.write(write_line + "\n") self.log.accumulator(OTHER_UEH) #del key del self.recent_segment[key] if __name__ == "__main__": print "start Time", time.ctime() start = time.clock() from util.loadInfo import loadInfo p = processStr("G:\UEH\source\ECR28_0427.log",DBInfo.loadkey_static(),'123qwe456rt') ret = p.parseProcess() #print ret print "End Time", time.ctime() print "total time(seconds)", str(time.clock() - start)
''' Created on 2012-8-1 @author: liaopengyu ''' from util.loadInfo import DBInfo print 'exec MatchException @ExceptionCode=%d, @CauseCodeString=%s'%(310,'dl_radio_resources_not_available') ret = DBInfo.get_causecode_value('dl_radio_resources_not_available', 310) print ret ret = DBInfo.get_causecode_value('dl_radio_resources_not_available', 310) print ret #DBInfo.current.execute('exec MatchException @ExceptionCode=%d, @CauseCodeString=%s',(310,'dl_radio_resources_not_available')) #print DBInfo.current.fetchall()[0][0] #
def main(): result_information = [] load_key = {} #################### #command line parser """ if len(sys.argv) <5: print 'cannot get any legal files, argv < 5 %d'%(len(sys.argv)) return """ #python uehprocess.py -o "G:\UEH\Origin.json" -r "G:\UEH\Test.json" -f "G:\UEH\source\ECR28_0427.log" parser = OptionParser() parser.add_option('-o','--output', action = 'store', type = 'string', dest = 'output_str') parser.add_option('-f','--file', action = 'store', type = 'string', dest = 'file_str') parser.add_option('-r','--return', action = 'store', type = 'string', dest = 'return_file') parser.add_option('-l','--logdir', action = 'store', type = 'string', dest = 'log_path') parser.add_option('-s','--savedir', action = 'store', type = 'string', dest = 'save_path') print sys.argv args_test = ['-f',"D:\\ECR09_0407.log", '-r',"D:\UEH\Test.json", '-o',"D:\\724d4958-7aa6-4b34-aa26-c1c66ae2a904_Out.json", '-l',"D:\UEH\\log\\" , '-s',"D:\UEH\\result"] (options,args) = parser.parse_args(args_test) print options.output_str print options.file_str print options.return_file print options.log_path print options.save_path #os.path.join(options.log_path) if not os.path.exists(options.log_path): os.mkdir(os.path.join(options.log_path)) if not os.path.exists(options.save_path): os.mkdir(os.path.join(options.save_path)) ############ #JSON 格式解析 json_val = simplejson.load(file(options.output_str)) table_id = json_val[0].get("TableID") field_colums = json_val[0].get("Columns") #close and delete file cnt = 0 for field in field_colums: if field == None: tmp = "___" + str(cnt) + "___" load_key[tmp] = cnt else: keys = [key.strip(" ") for key in field.split(field_colums_split)] # load_key[str(field)] = cnt for key in keys: load_key[key] = cnt cnt = cnt + 1 print load_key print table_id ############ #start parsing table_id = table_id file_str = options.file_str return_file = options.return_file log_file = options.log_path save_file = options.save_path print 'Start Time:' + time.ctime() start = time.clock() files = [] files.append(file_str) for f in files: if not os.path.exists(f): print "cannot find %s"%f return #p = processStr(f,DBInfo.loadkey(table_id)) p = processStr(f,load_key,table_id,log_file,save_file) ret = p.parseProcess() result_information.append(ret) break print "End Time", time.ctime() print "total time(seconds)", str(time.clock() - start) ##################### #Write into JSON file json_object = [] for i in range(0,MAX_TIME_SPAN+1): if ret[i]['DataStartTime'] != None and ret[i]['DataStartTime'] != "": json_object.append(ret[i]) else: break print json_object target_file = open(return_file,"w") simplejson.dump(json_object,target_file) target_file.close() DBInfo.close()
# -*- coding: UTF-8 -*- ''' Created on 2012-8-23 @author: tianwei Function: ''' from util.loadInfo import DBInfo ret = DBInfo.loadCellInfo("TSBS11","TSG089A") ret2 = DBInfo.loadCellInfo("TSBS11","TSG089A") print ret
def main(): result_information = [] load_key = {} #################### #command line parser parser = OptionParser() parser.add_option('-f','--file', action = 'store', type = 'string', dest = 'file_str' ) parser.add_option('-s','--savedir', action = 'store', type = 'string', dest = 'save_path' ) parser.add_option('-r','--return', action = 'store', type = 'string', dest = 'return_file') arge_test = ['-f','G:\\MR\\source\\eventData_120419_113412.txt', '-s','G:\\MR\\result', '-r','G:\\MR\\output.json', ] (options,args) = parser.parse_args(arge_test) print options.save_path print options.file_str print options.return_file ############ #start parsing file_str = options.file_str save_file = options.save_path return_file = options.return_file print 'Start Time:' + time.ctime() start = time.clock() files = [] files.append(file_str) for f in files: if not os.path.exists(f): print "cannot find %s"%f return p = processStr(filename = f,save_path = save_file) ret = p.parseProcess() print ret result_information.append(ret) break print "End Time", time.ctime() print "total time(seconds)", str(time.clock() - start) ##################### #Write into JSON file json_object = { MR_FILE:[], EVENT_FILE:[], CALL_FILE:[] } for f in FILE_SETS: for i in range(0,MAX_TIME_SPAN+1): if ret[f][i]['DataStartTime'] != None and ret[f][i]['DataStartTime'] != "": json_object[f].append(ret[f][i]) else: break print json_object target_file = open(return_file,"w") simplejson.dump(json_object,target_file) target_file.close() DBInfo.close()