def AddWebSocket(self, wsconn): wsname = wsconn.wsname if self.websockets.has_key(wsname): PLOG.debug("Already has ws connect %s,close old connect" % wsname) self.websockets[wsname].close() self.websockets[wsname] = wsconn PLOG.debug("ws manager add %s" % wsname)
def scanFile(rootpath,filetype): PLOG.debug('Type["%s"] file start crawling...dir = %s ' %(filetype,rootpath)) outputjsfilename = "" rootDirname = "" if rootpath[-1] == '\\' or rootpath[-1] == '/' : rootpath = rootpath[:-1] rootDirname = os.path.split(rootpath)[-1] if filetype == "movie": outputjsfilename = conf.movieOutputFile elif filetype == "app": outputjsfilename = conf.appOutputFile outputjsfilename = outputjsfilename.decode('utf8') rootpath = rootpath.decode('utf8') dirlist = enumDir(rootpath) allJsonInfo = {} allJsonInfo["update"] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") allJsonInfo["source"] = conf.httpServerSite + rootDirname + '/' allJsonInfo["list"] =[] for subdir in dirlist: fileitems = enumFile(os.path.join(rootpath,subdir)) for fileitem in fileitems: if fileitem[-5:] == ".json" : addJsonInfo(fileitem,allJsonInfo) with open(outputjsfilename,"w") as f: json.dump(allJsonInfo, f,indent=4,ensure_ascii=False) PLOG.debug('Type["%s"] file crawl dir %s finished' %(filetype,rootpath))
def AddWebSocket(self,wsconn): wsname = wsconn.wsname if self.websockets.has_key(wsname) : PLOG.debug("Already has ws connect %s,close old connect"%wsname) self.websockets[wsname].close() self.websockets[wsname] = wsconn PLOG.debug("ws manager add %s"%wsname)
def get(self): result = "" jsresult = {} cmdlist = self.get_query_arguments("cmd") for cmd in cmdlist: PLOG.debug("Receive msg %s" % cmd) if len(cmd) > 0: js = json.loads(cmd.decode('utf8')) if js.has_key('msgid'): msg = js["msgid"] if msg == "seturi": if js.has_key('body'): body = js["body"] if body.has_key('uri'): uri = body["uri"] if len(uri) > 0: global lastpageuri lastpageuri = uri HandleSetURI(uri) jsresult["errmsg"] = "OK" else: jsresult["errmsg"] = "uri is empty!" else: jsresult[ "errmsg"] = "msg seturi body has no uri,invalid msg" else: jsresult[ "errmsg"] = "msg seturi has no body,invalid msg" else: jsresult["errmsg"] = "not support msgid " + msg self.write(json.dumps(jsresult))
def get(self): result = "" jsresult = {} cmdlist = self.get_query_arguments("cmd") for cmd in cmdlist: PLOG.debug("Receive msg %s"%cmd) if len(cmd) >0 : js = json.loads(cmd.decode('utf8')) if js.has_key('msgid') : msg = js["msgid"] if msg == "seturi": if js.has_key('body'): body = js["body"] if body.has_key('uri'): uri = body["uri"] if len(uri) >0: global lastpageuri lastpageuri = uri HandleSetURI(uri) jsresult["errmsg"] = "OK" else: jsresult["errmsg"] = "uri is empty!" else: jsresult["errmsg"] = "msg seturi body has no uri,invalid msg" else: jsresult["errmsg"] = "msg seturi has no body,invalid msg" else: jsresult["errmsg"] = "not support msgid " + msg self.write(json.dumps(jsresult))
def scanFile(rootpath, filetype): PLOG.debug('Type["%s"] file start crawling...dir = %s ' % (filetype, rootpath)) outputjsfilename = "" rootDirname = "" if rootpath[-1] == '\\' or rootpath[-1] == '/': rootpath = rootpath[:-1] rootDirname = os.path.split(rootpath)[-1] if filetype == "movie": outputjsfilename = conf.movieOutputFile elif filetype == "app": outputjsfilename = conf.appOutputFile outputjsfilename = outputjsfilename.decode('utf8') rootpath = rootpath.decode('utf8') dirlist = enumDir(rootpath) allJsonInfo = {} allJsonInfo["update"] = datetime.datetime.now().strftime( "%Y-%m-%d %H:%M:%S") allJsonInfo["source"] = conf.httpServerSite + rootDirname + '/' allJsonInfo["list"] = [] for subdir in dirlist: fileitems = enumFile(os.path.join(rootpath, subdir)) for fileitem in fileitems: if fileitem[-5:] == ".json": addJsonInfo(fileitem, allJsonInfo) with open(outputjsfilename, "w") as f: json.dump(allJsonInfo, f, indent=4, ensure_ascii=False) PLOG.debug('Type["%s"] file crawl dir %s finished' % (filetype, rootpath))
def openmysqlconn(): dboperater =None try: dboperater = DBOperater() dboperater.createconnection(host=sadb.host,user=sadb.dbuser,passwd=sadb.dbpwd,dbname=sadb.dbname) except MySQLdb.Error,e: PLOG.debug("Mysql Error %d: %s" %(e.args[0], e.args[1]))
def heartbeatCheck(self): if self.isTimeOut(): PLOG.debug("%s websocket timeout,disconnect it"%self.wsname) self.close() WSManager.RemoveWebSocket(self.wsname) else: PLOG.trace("send ping") self.ping("ping")
def heartbeatCheck(self): if self.isTimeOut(): PLOG.debug("%s websocket timeout,disconnect it" % self.wsname) self.close() WSManager.RemoveWebSocket(self.wsname) else: PLOG.trace("send ping") self.ping("ping")
def InvokeStopRadius(): strStopradiusCMD = "service radiusd stop" try: PLOG.info("call:%s\n" % (strStopradiusCMD)) stopret = os.popen(strStopradiusCMD).read() PLOG.debug("output:%s\n" % (stopret)) except Exception, e: PLOG.info("执行命令失败,CMD=%s\nError=%s\n" % (strStopradiusCMD, e.args[1])) exit(1)
def InvokeStopRadius(): strStopradiusCMD = "service radiusd stop" try: PLOG.info("call:%s\n"%(strStopradiusCMD)) stopret = os.popen(strStopradiusCMD).read() PLOG.debug("output:%s\n"%(stopret) ) except Exception, e: PLOG.info("执行命令失败,CMD=%s\nError=%s\n"%(strStopradiusCMD,e.args[1])) exit(1)
def connection(): global ws while(True): ws = websocket.WebSocketApp("ws://172.16.5.16:18030/ws", on_open = on_open, on_message = on_message, on_error = on_error, on_close = on_close) ws.run_forever() PLOG.debug("ws may break")
def on_message(ws, message): PLOG.debug( "Recv: "+message) msgJson = json.loads(message) global logined if msgJson["msgid"] == "login": if msgJson["errcode"] == 0: logined = True else: PLOG.debug("Userid is wrong,exit") #ws.close() sys.exit(0)
def executeproc(procsqlname,dboperater=None,args=None): resultls = [] try: if dboperater == None: dboperater = DBOperater() if dboperater.conn == None: dboperater.createconnection(host=sadb.host,user=sadb.dbuser,passwd=sadb.dbpwd,dbname=sadb.dbname) #数据库连接 cur = dboperater.conn.cursor() cur.callproc(procsqlname,args) dboperater.conn.commit() #提交SQL语句 cur.close() except MySQLdb.Error,e: PLOG.debug("Mysql Error %d: %s,sql=%s" %(e.args[0], e.args[1],procsqlname))
def querysql(sqltext,dboperater=None,how = 0): resultls = [] try: if dboperater == None: dboperater = DBOperater() if dboperater.conn == None: dboperater.createconnection(host=sadb.host,user=sadb.dbuser,passwd=sadb.dbpwd,dbname=sadb.dbname) #数据库连接 rowNum, result = dboperater.query(sqltext) PLOG.trace("%s query finish"%(sqltext)) resultls = dboperater.fetch_queryresult(result,rowNum, how = how) except MySQLdb.Error,e: PLOG.debug("Mysql Error %d: %s,sql=%s" %(e.args[0], e.args[1],sqltext)) return None
def reloadStop(self): querystopsql = 'SELECT a.innerid,a.`name` FROM sa_region AS a LEFT JOIN sa_region AS b ON b.parentid=a.innerid WHERE b.parentid IS NULL' stopresultls = SAPeakDataPublic.querysql(querystopsql) if stopresultls != None and len(stopresultls) > 0: for row in stopresultls: stop = Stop(row[0], row[1]) if stop != None: self.stops[row[0]] = stop PLOG.debug("load stop %s,id=%s" % (row[1], row[0])) else: PLOG.debug("load stop failed!") return False return True
def loadconfig(): config = ConfigParser.ConfigParser() configfile = assistant.SF("%s/SAPeakData.conf" % (os.path.dirname(__file__))) PLOG.info("Load configer file:%s" % configfile) config.readfp(open(configfile, "rb")) SAPeakDataPublic.st.loglevel = config.get("system", "loglevel") SAPeakDataPublic.st.queryunit = config.getint("system", "queryunit") SAPeakDataPublic.st.queryrepeattimes = config.getint("system", "queryrepeattimes") if 24%SAPeakDataPublic.st.queryunit != 0: PLOG.debug("queryunit is invalid,please check config!") sys.exit(2) SAPeakDataPublic.sadb.host = config.get("system", "datasource") SAPeakDataPublic.sadb.dbuser = config.get("system", "dbuser") SAPeakDataPublic.sadb.dbpwd = config.get("system", "dbpwd") SAPeakDataPublic.sadb.dbname = config.get("system", "dbname") SAPeakDataPublic.sadb.tablename = config.get("system", "tablename")
def loadconfig(): config = ConfigParser.ConfigParser() configfile = assistant.SF("%s/SAPeakData.conf" % (os.path.dirname(__file__))) PLOG.info("Load configer file:%s" % configfile) config.readfp(open(configfile, "rb")) SAPeakDataPublic.st.loglevel = config.get("system", "loglevel") SAPeakDataPublic.st.queryunit = config.getint("system", "queryunit") SAPeakDataPublic.st.queryrepeattimes = config.getint( "system", "queryrepeattimes") if 24 % SAPeakDataPublic.st.queryunit != 0: PLOG.debug("queryunit is invalid,please check config!") sys.exit(2) SAPeakDataPublic.sadb.host = config.get("system", "datasource") SAPeakDataPublic.sadb.dbuser = config.get("system", "dbuser") SAPeakDataPublic.sadb.dbpwd = config.get("system", "dbpwd") SAPeakDataPublic.sadb.dbname = config.get("system", "dbname") SAPeakDataPublic.sadb.tablename = config.get("system", "tablename")
def run(): time.sleep(5) global logined if logined: global ser while (ser.isOpen()): text = ser.readline() # read one, with timout if text: # check if not timeout n = ser.inWaiting() while n >0: # look if there is more to read text = text + ser.readline() #get it n = ser.inWaiting() PLOG.debug( text) if logined : processData(text) # 50ms 读取一次数据 time.sleep(0.05) ser.close()
def InvokeProc(): #echo "User-Name = radiusSelfCheck, User-Password = radiusSelfCheck" | ./radclient -xxxx 127.0.0.1:1812 auth testing123 strCMD="echo \"User-Name = radiusSelfCheck,User-Password = radiusSelfCheck\" | %s -xxxx %s:1812 auth %s" % \ (conf.clientPath,conf.radiusIP,conf.secret) try: PLOG.info("call:%s\n" % (strCMD)) beforeInvokeAuth = int(time.time()) retstr = os.popen(strCMD).read() afterInvokeAuth = int(time.time()) PLOG.debug("output:%s\n" % (retstr)) if (afterInvokeAuth - beforeInvokeAuth > conf.reponseTimeout): PLOG.info("radius auth reponse timeout,stop radius") InvokeStopRadius() return 0 if (retstr.find("rad_recv:") != -1 and retstr.find("Reply-Message") != -1): # 收到回应 if (retstr.find("radius status is ok") != -1): # radius运行正常 PLOG.info("radius run status is ok") return 1 else: # radius状态不正确,关掉radius repmsg = "" repMsgpattern = re.compile( 'Reply-Message\s*=\s*(?P<repmsg>.*)\s*') m = repMsgpattern.search(retstr) if (m != None and m.group('repmsg') != None): repmsg = m.group('repmsg') PLOG.info("radius run status error,errmsg = %s ,stop radius" % repmsg) InvokeStopRadius() return 0 else: # radius状态不正确,关掉radius PLOG.info("radius run status error,no response,stop radius") InvokeStopRadius() return 0 except Exception, e: PLOG.info("执行命令失败,CMD=%s\nError=%s\n" % (strCMD, e.args[1])) exit(1)
def InvokeProc(): #echo "User-Name = radiusSelfCheck, User-Password = radiusSelfCheck" | ./radclient -xxxx 127.0.0.1:1812 auth testing123 strCMD="echo \"User-Name = radiusSelfCheck,User-Password = radiusSelfCheck\" | %s -xxxx %s:1812 auth %s" % \ (conf.clientPath,conf.radiusIP,conf.secret) try: PLOG.info("call:%s\n"%(strCMD)) beforeInvokeAuth = int(time.time()) retstr = os.popen(strCMD).read() afterInvokeAuth = int(time.time()) PLOG.debug("output:%s\n"%(retstr)) if ( afterInvokeAuth - beforeInvokeAuth > conf.reponseTimeout ): PLOG.info("radius auth reponse timeout,stop radius") InvokeStopRadius() return 0 if(retstr.find("rad_recv:") != -1 and retstr.find("Reply-Message") != -1) : # 收到回应 if( retstr.find("radius status is ok") != -1 ) : # radius运行正常 PLOG.info("radius run status is ok") return 1 else: # radius状态不正确,关掉radius repmsg = "" repMsgpattern=re.compile('Reply-Message\s*=\s*(?P<repmsg>.*)\s*') m=repMsgpattern.search(retstr) if ( m != None and m.group('repmsg') != None): repmsg = m.group('repmsg') PLOG.info("radius run status error,errmsg = %s ,stop radius" % repmsg) InvokeStopRadius() return 0 else: # radius状态不正确,关掉radius PLOG.info("radius run status error,no response,stop radius") InvokeStopRadius() return 0 except Exception, e: PLOG.info("执行命令失败,CMD=%s\nError=%s\n"%(strCMD,e.args[1])) exit(1)
def executearrsql(sqltext,dboperater=None,arrsql=None,sqlnum=100,mode = DBHelper.CURSOR_MODE): try: if dboperater == None: dboperater = DBOperater() if dboperater.conn == None: dboperater.createconnection(host=sadb.host,user=sadb.dbuser,passwd=sadb.dbpwd,dbname=sadb.dbname) #数据库连接 if arrsql!=None and len(arrsql)>0: totalnum = len(arrsql) if totalnum%sqlnum == 0: foocount = totalnum/sqlnum else: foocount = totalnum/sqlnum+1 i = 0 while i<foocount: arr = arrsql[i*sqlnum:(i+1)*sqlnum] dboperater.execute(sqltext,args=arr,mode=mode,many=True)#执行SQL语句 dboperater.conn.commit() #提交SQL语句 i+=1 else: # 执行单条sql语句 dboperater.execute(sqltext,mode=mode) dboperater.conn.commit() except MySQLdb.Error,e: PLOG.debug("Mysql Error %d: %s,sql=%s" %(e.args[0], e.args[1],sqltext))
def addJsonInfo(jsonSourcefile,destJson): filedir = os.path.dirname(jsonSourcefile) parentDirName = os.path.split(filedir)[-1] primaryFilename = "" jsSourceFileInfo = None with open(jsonSourcefile,"r") as f: jsSourceFileInfo = json.load(f,'utf8') if jsSourceFileInfo !=None and isinstance(jsSourceFileInfo,dict): if jsSourceFileInfo.has_key("file"): primaryFilename = jsSourceFileInfo["file"] if primaryFilename != "": jsSourceFileInfo["id"] = str(uuid.uuid1()) if primaryFilename.startswith("https:") : # ios info file filetimestamp = time.localtime( os.path.getmtime(jsonSourcefile)) primaryFileTime = time.strftime('%Y-%m-%d %H:%M:%S',filetimestamp) jsSourceFileInfo["filetime"] = primaryFileTime if not jsSourceFileInfo.has_key("filesize") : jsSourceFileInfo["filesize"] = "0" #destJson["list"].append(jsSourceFileInfo) else: try: primaryFileSize = os.path.getsize(os.path.join(filedir,primaryFilename)) filetimestamp = time.localtime( os.path.getmtime(os.path.join(filedir,primaryFilename)) ) primaryFileTime = time.strftime('%Y-%m-%d %H:%M:%S',filetimestamp) jsSourceFileInfo["filesize"] = str(primaryFileSize) jsSourceFileInfo["filetime"] = primaryFileTime if jsSourceFileInfo.has_key("file") : jsSourceFileInfo["file"] = parentDirName +'/' + jsSourceFileInfo["file"] except: PLOG.info("generate file info of dir %s failed,primary File %s not find,skip it"% (filedir,primaryFilename)) return if jsSourceFileInfo.has_key("poster") : jsSourceFileInfo["poster"] = parentDirName +'/' + jsSourceFileInfo["poster"] if jsSourceFileInfo.has_key("thumbnail") : jsSourceFileInfo["thumbnail"] = parentDirName +'/' + jsSourceFileInfo["thumbnail"] if jsSourceFileInfo.has_key("extend") : jsextend = jsSourceFileInfo["extend"] if jsextend.has_key("screenshot") : jsscreenshottmp = [] for picture in jsextend["screenshot"] : picture = parentDirName +'/' + picture jsscreenshottmp.append(picture) jsextend["screenshot"] =jsscreenshottmp destJson["list"].append(jsSourceFileInfo) PLOG.debug('generate file info of dir "%s" success'%(filedir)) else: PLOG.debug("generate file info of dir %s failed,primary File name is empty"% (filedir)) else : PLOG.debug('not find "file" node in info file %s , skip it' %(jsonSourcefile)) else: PLOG.warn('js file %s is null,maybe path error! skip it' %(jsonSourcefile))
def statisticsCurrentDayData(daydate) : nextday = daydate+datetime.timedelta(days=1) startquerytime = daydate endquerytime = daydate+datetime.timedelta(hours=SAPeakDataPublic.st.queryunit) while endquerytime<=nextday: acctquerysql = "select acctinputoctets,acctoutputoctets,acctstarttime,acctstoptime,regionid from %s where acctstarttime>='%s' and acctstarttime<'%s'"%\ (SAPeakDataPublic.sadb.tablename,startquerytime.strftime('%Y-%m-%d %H:%M:%S'),endquerytime.strftime('%Y-%m-%d %H:%M:%S')) PLOG.debug("sql=%s"%acctquerysql) startquerytime=endquerytime endquerytime=endquerytime+datetime.timedelta(hours=SAPeakDataPublic.st.queryunit) i = 0 while i<SAPeakDataPublic.st.queryrepeattimes: res = SAPeakDataPublic.querysql(acctquerysql) if res!=None: break else: i = i+1 if i==3 or res==None: print("%s statistics data failed! db query appear error %d consecutive times,please execute again later!"%(daydate.strftime('%Y-%m-%d'),SAPeakDataPublic.st.queryrepeattimes)) PLOG.info("%s statistics data failed! db query appear error %d consecutive times,please execute again later!"%(daydate.strftime('%Y-%m-%d'),SAPeakDataPublic.st.queryrepeattimes)) return # 统计数据 PLOG.trace("start statistics...") for row in res: if row[2] ==None or row[3] ==None or row[4] ==None: PLOG.warn("lack essential data!skip this data") continue regionid = row[4] totalflow = 0 if row[0]!=None: totalflow += row[0] if row[1]!=None: totalflow += row[1] if row[3].day > row[2].day: # 跨天 endMinute = 23*60+59 elif row[3].day < row[2].day: PLOG.info("stoptime day less than starttime day,invalid data,skip") else: endMinute = row[3].hour*60+row[3].minute startMinute = row[2].hour*60+row[2].minute #startMinute = datetime.datetime.strptime(row[2],'%Y-%m-%d %H:%M:%S') #endMinute = datetime.datetime.strptime(row[3],'%Y-%m-%d %H:%M:%S') totalMinute = endMinute-startMinute + 1 if totalMinute <=0: PLOG.info("stoptime less than starttime,invalid data,skip") continue if SAStopDefine.stopDc.stops.has_key(regionid): stop = SAStopDefine.stopDc.stops[regionid] startindex = startMinute endindex = endMinute flowOneMinute = float(totalflow)/totalMinute/1024/1024 index = startindex while index <= endindex: stop.dayArray[index][0] += 1 stop.dayArray[index][1] += flowOneMinute if stop.dayArray[index][0] > stop.peakonlinenum: stop.peakonlinenum = stop.dayArray[index][0] stop.peakonlinetime = datetime.datetime(daydate.year,daydate.month,daydate.day,index/60,index%60) if stop.dayArray[index][0] > stop.peakbandwidth: stop.peakbandwidth = stop.dayArray[index][1] stop.peakbandwidthtime = datetime.datetime(daydate.year,daydate.month,daydate.day,index/60,index%60) index += 1 PLOG.trace("statistics end") # 数据处理结束,输出各站点峰值数据 for stopid,stop in stopsCentor.stops.items(): peakbandwidth = stop.peakbandwidth*8/60 print("%s %s %d %.2f"%(daydate.strftime('%Y-%m-%d'),stop.name,stop.peakonlinenum,peakbandwidth)) PLOG.debug("%s %s %d %.2f %s %s"%(daydate.strftime('%Y-%m-%d'),stop.name,stop.peakonlinenum,peakbandwidth,stop.peakonlinetime.strftime('%H:%M'),stop.peakbandwidthtime.strftime('%H:%M')))
def statisticsCurrentDayData(daydate): nextday = daydate + datetime.timedelta(days=1) startquerytime = daydate endquerytime = daydate + datetime.timedelta( hours=SAPeakDataPublic.st.queryunit) while endquerytime <= nextday: acctquerysql = "select acctinputoctets,acctoutputoctets,acctstarttime,acctstoptime,regionid from %s where acctstarttime>='%s' and acctstarttime<'%s'"%\ (SAPeakDataPublic.sadb.tablename,startquerytime.strftime('%Y-%m-%d %H:%M:%S'),endquerytime.strftime('%Y-%m-%d %H:%M:%S')) PLOG.debug("sql=%s" % acctquerysql) startquerytime = endquerytime endquerytime = endquerytime + datetime.timedelta( hours=SAPeakDataPublic.st.queryunit) i = 0 while i < SAPeakDataPublic.st.queryrepeattimes: res = SAPeakDataPublic.querysql(acctquerysql) if res != None: break else: i = i + 1 if i == 3 or res == None: print( "%s statistics data failed! db query appear error %d consecutive times,please execute again later!" % (daydate.strftime('%Y-%m-%d'), SAPeakDataPublic.st.queryrepeattimes)) PLOG.info( "%s statistics data failed! db query appear error %d consecutive times,please execute again later!" % (daydate.strftime('%Y-%m-%d'), SAPeakDataPublic.st.queryrepeattimes)) return # 统计数据 PLOG.trace("start statistics...") for row in res: if row[2] == None or row[3] == None or row[4] == None: PLOG.warn("lack essential data!skip this data") continue regionid = row[4] totalflow = 0 if row[0] != None: totalflow += row[0] if row[1] != None: totalflow += row[1] if row[3].day > row[2].day: # 跨天 endMinute = 23 * 60 + 59 elif row[3].day < row[2].day: PLOG.info( "stoptime day less than starttime day,invalid data,skip") else: endMinute = row[3].hour * 60 + row[3].minute startMinute = row[2].hour * 60 + row[2].minute #startMinute = datetime.datetime.strptime(row[2],'%Y-%m-%d %H:%M:%S') #endMinute = datetime.datetime.strptime(row[3],'%Y-%m-%d %H:%M:%S') totalMinute = endMinute - startMinute + 1 if totalMinute <= 0: PLOG.info("stoptime less than starttime,invalid data,skip") continue if SAStopDefine.stopDc.stops.has_key(regionid): stop = SAStopDefine.stopDc.stops[regionid] startindex = startMinute endindex = endMinute flowOneMinute = float(totalflow) / totalMinute / 1024 / 1024 index = startindex while index <= endindex: stop.dayArray[index][0] += 1 stop.dayArray[index][1] += flowOneMinute if stop.dayArray[index][0] > stop.peakonlinenum: stop.peakonlinenum = stop.dayArray[index][0] stop.peakonlinetime = datetime.datetime( daydate.year, daydate.month, daydate.day, index / 60, index % 60) if stop.dayArray[index][0] > stop.peakbandwidth: stop.peakbandwidth = stop.dayArray[index][1] stop.peakbandwidthtime = datetime.datetime( daydate.year, daydate.month, daydate.day, index / 60, index % 60) index += 1 PLOG.trace("statistics end") # 数据处理结束,输出各站点峰值数据 for stopid, stop in stopsCentor.stops.items(): peakbandwidth = stop.peakbandwidth * 8 / 60 print("%s %s %d %.2f" % (daydate.strftime('%Y-%m-%d'), stop.name, stop.peakonlinenum, peakbandwidth)) PLOG.debug( "%s %s %d %.2f %s %s" % (daydate.strftime('%Y-%m-%d'), stop.name, stop.peakonlinenum, peakbandwidth, stop.peakonlinetime.strftime('%H:%M'), stop.peakbandwidthtime.strftime('%H:%M')))
def RemoveWebSocket(self,wsstr): self.websockets.pop(wsstr) PLOG.debug("ws manager remove %s"%wsstr)
def closemysqlconn(dboperater): if dboperater!=None and isinstance(dboperater,DBOperater): try: dboperater.closeconnection() except MySQLdb.Error,e: PLOG.debug("Mysql Error %d: %s" %(e.args[0], e.args[1]))
def heartbeatCheck(self): if self.isTimeOut(): PLOG.debug("%s websocket timeout,disconnect it"%wsname)
def scanFtpServerFiles(self, root, filetype): PLOG.debug( 'Type["%s"] file start crawling...ftpserver = %s ,dir = %s ' % (filetype, self.host, root)) outputjsfilename = "" filesource = "" if filetype == "movie": outputjsfilename = conf.movieOutputFile filesource = conf.ftpServerMovieSource elif filetype == "app": outputjsfilename = conf.appOutputFile filesource = conf.ftpServerAppSource # 枚举工作目录下的所有目录 fileDir = self.listdir(root) # 所有电影 or APP信息json串 allJsonInfo = {} allJsonInfo["update"] = datetime.datetime.now().strftime( "%Y-%m-%d %H:%M:%S") allJsonInfo["source"] = filesource allJsonInfo["list"] = [] for filedir in fileDir: PLOG.debug('start generate file info of dir "%s"...' % (root + filedir)) dictFileItems = self.listFile(root + filedir) primaryFilename = "" primaryFileSize = "" primaryFileTime = "" jsFileInfo = None for (k, v) in dictFileItems.items(): if v.ext == ".json": fileinfo = [] try: self.retrlines( "RETR %s" % root + filedir + '/' + v.fname, fileinfo.append) except: PLOG.warn('retr %s except! skip it !' % v.fname) filedetailinfo = "" for linestr in fileinfo: filedetailinfo += linestr if filedetailinfo != "": filedetailinfo = filedetailinfo.decode("gbk") jsFileInfo = json.loads(filedetailinfo, 'utf8') if jsFileInfo != None: if jsFileInfo.has_key("file"): primaryFilename = jsFileInfo["file"] else: PLOG.debug( 'not find "file" node in info file %s , skip it' % (v.fname)) else: PLOG.error( 'js file %s is null,maybe path error! skip it' % (v.fname)) break if jsFileInfo != "": if primaryFilename != "": if dictFileItems.has_key(primaryFilename): primaryFileItem = dictFileItems[primaryFilename] primaryFileSize = primaryFileItem.size time = primaryFileItem.time[:primaryFileItem.time. find('.')] primaryFileTime = datetime.datetime.strptime( time, '%Y%m%d%H%M%S').strftime("%Y-%m-%d %H:%M:%S") jsFileInfo["filesize"] = primaryFileSize jsFileInfo["filetime"] = primaryFileTime jsFileInfo["id"] = str(uuid.uuid1()) filerelativedir = filedir + '/' if jsFileInfo.has_key("file"): jsFileInfo[ "file"] = filerelativedir + jsFileInfo["file"] if jsFileInfo.has_key("poster"): jsFileInfo[ "poster"] = filerelativedir + jsFileInfo[ "poster"] if jsFileInfo.has_key("thumbnail"): jsFileInfo[ "thumbnail"] = filerelativedir + jsFileInfo[ "thumbnail"] if jsFileInfo.has_key("extend"): jsextend = jsFileInfo["extend"] if jsextend.has_key("screenshot"): jsscreenshottmp = [] for picture in jsextend["screenshot"]: picture = filerelativedir + picture jsscreenshottmp.append(picture) jsextend["screenshot"] = jsscreenshottmp allJsonInfo["list"].append(jsFileInfo) PLOG.debug('generate file info of dir "%s" success' % (root + filedir)) else: PLOG.debug( "generate file info of dir %s failed,not find primary File %s" % (root + filedir, primaryFilename)) else: PLOG.debug( "generate file info of dir %s failed,primary File name is empty" % (root + filedir)) else: PLOG.debug( "generate file info of dir %s failed,not find js info file" % (root + filedir)) if (outputjsfilename == ""): PLOG.debug("unkown file type!") return 0 with open(outputjsfilename, "w") as f: json.dump(allJsonInfo, f, indent=4, ensure_ascii=False) # 将json文件传到ftpserver with open(outputjsfilename, "r") as f: try: outputdirtmp = conf.ftpJsonOutputPath.replace("ftp://", "") outputdir = outputdirtmp[outputdirtmp.find("/") + 1:] self.storlines("STOR %s" % outputdir + outputjsfilename, f) PLOG.debug('upload json file %s success !' % outputjsfilename) except: PLOG.warn('upload json file %s failed,exception !' % outputjsfilename) PLOG.debug('Type["%s"] file crawl dir %s finished' % (filetype, root))
def processData(text): #提取最后一次摇杆位置坐标 index = text.rfind("(") indexEnd = text.rfind(")") PLOG.debug( "last location is "+text[index:indexEnd+1]) text = text[index:indexEnd+1] text = text.replace("(","") text = text.replace(")","") arr = text.split(',') x = int(arr[0]) y = int(arr[1]) z = int(arr[2]) #根据坐标判断是哪种运动状态 vx = x - 512 vy = y - 512 active_operation = "" if abs(vx) < admissibleError and abs(vy) < admissibleError: active_operation = "stop" PLOG.debug( "stop") elif abs(vx) < 512*1.414/2: if vy >0 and vy -admissibleError > 0: active_operation = "up" PLOG.debug( "up") else: active_operation = "down" PLOG.debug( "down" ) elif abs(vy) < 512*1.414/2: if vx >0 and vx -admissibleError > 0: active_operation = "spin_right" PLOG.debug( "spin_right") else: active_operation = "spin_left" PLOG.debug( "spin_left") """ { "invoke_id":"调用ID", // 消息ID "msgid":"active_robot", // 机器人id "robot_id":"1212", // 运动模式 "active_mode":{ "operation":"spin", // up-前进,down-后退,spin_up-调头前进,spin_left-左旋转,spin_right-右旋转 "distance":10, // 单位cm 移动距离(up和down时有此参数) "angle":180 // 旋转角度 (spin时有此参数) } }""" if active_operation is not "": now = int(time.time()*1000) global lastSendTime global lastActive if active_operation != lastActive or now - lastSendTime >= 950: global logined if not logined: login() msg = {} msg["invoke_id"] = "1" msg["msgid"] = "active_robot" msg["robot_id"] = "37bf0a26359e37d" msg["active_mode"] = {} msg["active_mode"]["operation"] = active_operation strmsg = json.dumps(msg) global ws ws.send(strmsg) PLOG.debug( "Send: "+strmsg ) lastSendTime = now lastActive = active_operation
def heartbeatCheck(self): if self.isTimeOut(): PLOG.debug("%s websocket timeout,disconnect it" % wsname)
def scanFtpServerFiles(self, root, filetype): PLOG.debug('Type["%s"] file start crawling...ftpserver = %s ,dir = %s ' % (filetype, self.host, root)) outputjsfilename = "" filesource = "" if filetype == "movie": outputjsfilename = conf.movieOutputFile filesource = conf.ftpServerMovieSource elif filetype == "app": outputjsfilename = conf.appOutputFile filesource = conf.ftpServerAppSource # 枚举工作目录下的所有目录 fileDir = self.listdir(root) # 所有电影 or APP信息json串 allJsonInfo = {} allJsonInfo["update"] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") allJsonInfo["source"] = filesource allJsonInfo["list"] = [] for filedir in fileDir: PLOG.debug('start generate file info of dir "%s"...' % (root + filedir)) dictFileItems = self.listFile(root + filedir) primaryFilename = "" primaryFileSize = "" primaryFileTime = "" jsFileInfo = None for (k, v) in dictFileItems.items(): if v.ext == ".json": fileinfo = [] try: self.retrlines("RETR %s" % root + filedir + "/" + v.fname, fileinfo.append) except: PLOG.warn("retr %s except! skip it !" % v.fname) filedetailinfo = "" for linestr in fileinfo: filedetailinfo += linestr if filedetailinfo != "": filedetailinfo = filedetailinfo.decode("gbk") jsFileInfo = json.loads(filedetailinfo, "utf8") if jsFileInfo != None: if jsFileInfo.has_key("file"): primaryFilename = jsFileInfo["file"] else: PLOG.debug('not find "file" node in info file %s , skip it' % (v.fname)) else: PLOG.error("js file %s is null,maybe path error! skip it" % (v.fname)) break if jsFileInfo != "": if primaryFilename != "": if dictFileItems.has_key(primaryFilename): primaryFileItem = dictFileItems[primaryFilename] primaryFileSize = primaryFileItem.size time = primaryFileItem.time[: primaryFileItem.time.find(".")] primaryFileTime = datetime.datetime.strptime(time, "%Y%m%d%H%M%S").strftime("%Y-%m-%d %H:%M:%S") jsFileInfo["filesize"] = primaryFileSize jsFileInfo["filetime"] = primaryFileTime jsFileInfo["id"] = str(uuid.uuid1()) filerelativedir = filedir + "/" if jsFileInfo.has_key("file"): jsFileInfo["file"] = filerelativedir + jsFileInfo["file"] if jsFileInfo.has_key("poster"): jsFileInfo["poster"] = filerelativedir + jsFileInfo["poster"] if jsFileInfo.has_key("thumbnail"): jsFileInfo["thumbnail"] = filerelativedir + jsFileInfo["thumbnail"] if jsFileInfo.has_key("extend"): jsextend = jsFileInfo["extend"] if jsextend.has_key("screenshot"): jsscreenshottmp = [] for picture in jsextend["screenshot"]: picture = filerelativedir + picture jsscreenshottmp.append(picture) jsextend["screenshot"] = jsscreenshottmp allJsonInfo["list"].append(jsFileInfo) PLOG.debug('generate file info of dir "%s" success' % (root + filedir)) else: PLOG.debug( "generate file info of dir %s failed,not find primary File %s" % (root + filedir, primaryFilename) ) else: PLOG.debug("generate file info of dir %s failed,primary File name is empty" % (root + filedir)) else: PLOG.debug("generate file info of dir %s failed,not find js info file" % (root + filedir)) if outputjsfilename == "": PLOG.debug("unkown file type!") return 0 with open(outputjsfilename, "w") as f: json.dump(allJsonInfo, f, indent=4, ensure_ascii=False) # 将json文件传到ftpserver with open(outputjsfilename, "r") as f: try: outputdirtmp = conf.ftpJsonOutputPath.replace("ftp://", "") outputdir = outputdirtmp[outputdirtmp.find("/") + 1 :] self.storlines("STOR %s" % outputdir + outputjsfilename, f) PLOG.debug("upload json file %s success !" % outputjsfilename) except: PLOG.warn("upload json file %s failed,exception !" % outputjsfilename) PLOG.debug('Type["%s"] file crawl dir %s finished' % (filetype, root))
def on_close(ws): PLOG.debug("### closed ###" )
def on_error(ws, error): PLOG.debug(error) # 连接出错,可能连接状态不佳,隔一段时间再连接 time.sleep(5)
def scanFtpServerFiles(self,root,filetype): PLOG.debug('Type["%s"] file start crawling...ftpserver = %s ,dir = %s ' %(filetype,self.host,root)) outputjsfilename = "" filesource = "" if filetype == "movie": outputjsfilename = conf.movieOutputFile filesource = conf.ftpServerMovieSource elif filetype == "app": outputjsfilename = conf.appOutputFile filesource = conf.ftpServerAppSource # 枚举工作目录下的所有目录 fileDir = self.listdir(root) # 所有电影 or APP信息json串 allJsonInfo = {} allJsonInfo["update"] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") allJsonInfo["source"] = filesource allJsonInfo["list"] =[] for filedir in fileDir : PLOG.debug('start generate file info of dir "%s"...'%(root+filedir)) fileItems = self.listFile(root+filedir) primaryFilename = "" primaryFileSize = "" primaryFileTime = "" jsFileInfo = None for fileitem in fileItems: if fileitem[-5:] == ".json" : fileinfo = [] if fileitem.find("/") == -1 : fileitem = root+filedir+'/'+fileitem try: self.retrlines("RETR %s"%fileitem,fileinfo.append) except: PLOG.warn('retr %s except! skip it !' %fileitem) filedetailinfo = "" for linestr in fileinfo: filedetailinfo += linestr if filedetailinfo != "": try: filedetailinfo = filedetailinfo.decode("gbk") except: pass try: filedetailinfo = filedetailinfo.decode("gb2312") except: pass #PLOG.debug("decode failed! %s is not encoded by gbk") jsFileInfo = json.loads(filedetailinfo,'utf8') if jsFileInfo !=None : if jsFileInfo.has_key("file"): primaryFilename = jsFileInfo["file"] else : PLOG.debug('not find "file" node in info file %s , skip it' %(fileitem)) else: PLOG.error('js file %s is null,maybe path error! skip it' %(fileitem)) break if jsFileInfo != None and jsFileInfo != "" : if primaryFilename != "" : try: timestamp = [] self.retrlines("LIST %s"%root+filedir+'/'+primaryFilename,lambda x:timestamp.append(self.separateFileTime(x))) primaryFileSize = self.size(root+filedir+'/'+primaryFilename) primaryFileTime = timestamp.pop() jsFileInfo["filesize"] = primaryFileSize jsFileInfo["filetime"] = primaryFileTime jsFileInfo["id"] = str(uuid.uuid1()) filerelativedir = filedir + '/' if jsFileInfo.has_key("file") : jsFileInfo["file"] = filerelativedir +jsFileInfo["file"] if jsFileInfo.has_key("poster") : jsFileInfo["poster"] = filerelativedir +jsFileInfo["poster"] if jsFileInfo.has_key("thumbnail") : jsFileInfo["thumbnail"] = filerelativedir +jsFileInfo["thumbnail"] if jsFileInfo.has_key("extend") : jsextend = jsFileInfo["extend"] if jsextend.has_key("screenshot") : jsscreenshottmp = [] for picture in jsextend["screenshot"] : picture = filerelativedir + picture jsscreenshottmp.append(picture) jsextend["screenshot"] =jsscreenshottmp allJsonInfo["list"].append(jsFileInfo) PLOG.debug('generate file info of dir "%s" success'%(root+filedir)) except: PLOG.warn('retr %s except! skip it !' %(root+filedir+'/'+primaryFilename)) PLOG.debug("generate file info of dir %s failed,not find primary File %s" % (root+filedir,primaryFilename)) else: PLOG.debug("generate file info of dir %s failed,primary File name is empty"% (root+filedir)) else: PLOG.debug("generate file info of dir %s failed,not find js info file"% (root+filedir)) if(outputjsfilename == ""): PLOG.debug("unkown file type!") return 0 with open(outputjsfilename, "w") as f: json.dump(allJsonInfo, f,indent=4,ensure_ascii=False) # 将json文件传到ftpserver ttt = len(outputjsfilename) with open(outputjsfilename,"r") as f: try: outputdirtmp=conf.ftpJsonOutputPath.replace("ftp://","") outputdir = outputdirtmp[outputdirtmp.find("/")+1:] self.storlines("STOR %s"%outputdir+outputjsfilename,f) PLOG.debug('upload json file %s success !'%outputjsfilename) except: PLOG.warn('upload json file %s failed,exception !'%outputjsfilename) PLOG.debug('Type["%s"] file crawl dir %s finished' %(filetype,root))
def RemoveWebSocket(self, wsstr): self.websockets.pop(wsstr) PLOG.debug("ws manager remove %s" % wsstr)