def load(self): config = ConfigParser.ConfigParser() conf1 = assistant.SF("%s/alwayson.conf" % (os.path.dirname(__file__))) conf2 = assistant.SF("%s/alwayson.conf" % (os.getcwd())) conf3 = "/etc/alwayson.conf" if os.path.isfile(conf1):conf = conf1 elif os.path.isfile(conf2):conf = conf2 else:conf = conf3 PLOG.info("using configer file:%s" % conf) config.readfp(open(conf1, "rb")) checkinterval = config.getint("alwayson", "interval") for section in config.sections(): try: if section == "alwayson":continue name = section newprog = program(name) newprog.command = config.get(section, "command") newprog.runpath = config.get(section, "runpath") # newprog.matchingregular=config.get(section, "matchingregular") newprog.matchingstring = config.get(section, "matchingstring") newprog.pidfile = config.get(section, "pidfile") newprog.bootwait = config.getint(section, "bootwait") newprog.rebootwait = config.getint(section, "rebootwait") newprog.enabled = config.getboolean(section, "enabled") newprog.init(self); PLOG.info("confim:%s" % name) except: PLOG.error("read configerfile failed!program=%s,Pass!" % name) continue self.programlist.append(newprog)
def load(self): config = ConfigParser.ConfigParser() conf1 = assistant.SF("%s/alwayson.conf" % (os.path.dirname(__file__))) conf2 = assistant.SF("%s/alwayson.conf" % (os.getcwd())) conf3 = "/etc/alwayson.conf" if os.path.isfile(conf1): conf = conf1 elif os.path.isfile(conf2): conf = conf2 else: conf = conf3 PLOG.info("using configer file:%s" % conf) config.readfp(open(conf1, "rb")) checkinterval = config.getint("alwayson", "interval") for section in config.sections(): try: if section == "alwayson": continue name = section newprog = program(name) newprog.command = config.get(section, "command") newprog.runpath = config.get(section, "runpath") # newprog.matchingregular=config.get(section, "matchingregular") newprog.matchingstring = config.get(section, "matchingstring") newprog.pidfile = config.get(section, "pidfile") newprog.bootwait = config.getint(section, "bootwait") newprog.rebootwait = config.getint(section, "rebootwait") newprog.enabled = config.getboolean(section, "enabled") newprog.init(self) PLOG.info("confim:%s" % name) except: PLOG.error("read configerfile failed!program=%s,Pass!" % name) continue self.programlist.append(newprog)
def closeconnection(self): """ 关闭连接 """ if self.conn != None: self.conn.close() else: PLOG.error("DBOperater.closeconnection error, conn is none") return 0
def createconnection(self, host, user, passwd, dbname): """ 创建一个新连接 """ self.conn = MySQLdb.Connect(host, user, passwd, dbname, charset="utf8") if False == self.conn.open: PLOG.error("DBOperater.createconnection error") return -1 return 0
def getPIDForString(self, s): pid = None try: processList = assistant.getProcessList() for p in processList: if p[1].find(s) != -1: pid = p[0] break del processList except Exception, e: PLOG.error("%s getPIDForString except:%s" % (self.name, e))
def getCommandForPID(self, pid): cmd = None try: processList = assistant.getProcessList() for p in processList: if p[0] == pid: cmd = p[1] break del processList except Exception, e: PLOG.error("%s getCommandForPID except:%s" % (self.name, e))
def query(self, sqltext, mode = STORE_RESULT_MODE): """ 作用:使用connection对象的query方法,并返回一个元组(影响行数(int),结果集(result)) 参数:sqltext:sql语句 mode=STORE_RESULT_MODE(0) 表示返回store_result,mode=USESTORE_RESULT_MODE(1) 表示返回use_result 返回:元组(影响行数(int),结果集(result) """ if None == self.conn or False == self.conn.open: return -1 self.conn.query(sqltext) if 0 == mode: result = self.conn.store_result() elif 1 == mode: result = self.conn.use_result() else: PLOG.error("DBOperater.closeconnection error, mode value is wrong") return -1 return (self.conn.affected_rows(), result)
def update_pid(self): try: self._pid = 0 if len(self.pidfile) > 0: pidfile = open(self.pidfile, 'r') pid = int(pidfile.readline()) if pid != 0: if self.getCommandForPID(pid) != None: self._pid = pid PLOG.info("%s PID confim for pid:%d" % (self.name, pid)) elif len(self.matchingstring) > 0: pid = self.getPIDForString(self.matchingstring) if pid != None: self._pid = pid PLOG.info("%s PID confim for regular:%d" % (self.name, pid)) else: PLOG.error("%s Unknow check type!!!" % self.name) except Exception, e: PLOG.error("%s update_pid except!err=%s" % (self.name, e))
def run(self): self.status = "waiting start" if not self.enabled: self.status = "disabled" return if self.bootwait > 0: time.sleep(self.bootwait) while not self.thread_stop: self.status = "checking" # print '%s start checking at %s ...\n' %(self.name,time.ctime()) if self.processHandle != None and self.processHandle.poll( ) != None: print "recycle %s" % (self.name) self.processHandle = None if not check_pid(self._pid): self.update_pid() if not check_pid(self._pid): self._pid = 0 if self._pid == 0: self.processHandle = None PLOG.warn("%s check failed!restarting ..." % (self.name)) if self.rebootwait > 0: self.status = "waiting restart" PLOG.info("%s restarting wait %d second..." % (self.name, self.rebootwait)) time.sleep(self.rebootwait) try: self.status = "starting" # 修改当前路径 if len(self.runpath) > 0: try: if not os.path.isdir(self.runpath): os.makedirs(self.runpath) if not os.path.isdir(self.runpath): self.enabled = False PLOG.error("%s run path invalid!" % (self.name)) break os.chdir(self.runpath) except Exception, e: PLOG.error( "%s restart failed!change current path failed!err=%s" % (self.name, e)) PLOG.info("%s execute command:'%s'" % (self.name, self.command)) self.processHandle = subprocess.Popen( self.command, bufsize=0, executable=None, stdin=None, stdout=None, stderr=None, preexec_fn=None, close_fds=False, shell=True, cwd=self.runpath, env=None, universal_newlines=False, startupinfo=None, creationflags=0) self._pid = self.processHandle.pid except Exception, e: PLOG.error("%s restart failed!err=%s" % (self.name, e))
def execute(self, sqltext, args = None, mode = CURSOR_MODE, many = False): """ 作用:使用游标(cursor)的execute 执行query 参数:sqltext: 表示sql语句 args: sqltext的参数 mode:以何种方式返回数据集 CURSOR_MODE = 0 :store_result , tuple DICTCURSOR_MODE = 1 : store_result , dict SSCURSOR_MODE = 2 : use_result , tuple SSDICTCURSOR_MODE = 3 : use_result , dict many:是否执行多行操作(executemany) 返回:元组(影响行数(int),游标(Cursor)) """ if CURSOR_MODE == mode: curclass = MySQLdb.cursors.Cursor elif DICTCURSOR_MODE == mode : curclass = MySQLdb.cursors.DictCursor elif SSCURSOR_MODE == mode: curclass = MySQLdb.cursors.SSCursor elif SSDICTCURSOR_MODE == mode: curclass = MySQLdb.cursors.SSDictCursor else : PLOG.error("DBOperater.closeconnection error, mode value is wrong") return -1 cur = self.conn.cursor(cursorclass=curclass) line = 0 if False == many: if None == args: line = cur.execute(sqltext) else : line = cur.execute(sqltext,args) else : if None == args: line = cur.executemany(sqltext) else : line = cur.executemany(sqltext,args) return (line , cur)
def run(self): self.status = "waiting start" if not self.enabled: self.status = "disabled" return if self.bootwait > 0:time.sleep(self.bootwait) while not self.thread_stop: self.status = "checking" # print '%s start checking at %s ...\n' %(self.name,time.ctime()) if self.processHandle!=None and self.processHandle.poll()!=None: print "recycle %s" % (self.name) self.processHandle=None if not check_pid(self._pid):self.update_pid() if not check_pid(self._pid):self._pid = 0 if self._pid == 0: self.processHandle=None PLOG.warn("%s check failed!restarting ..." % (self.name)) if self.rebootwait > 0: self.status = "waiting restart" PLOG.info("%s restarting wait %d second..." % (self.name, self.rebootwait)) time.sleep(self.rebootwait) try: self.status = "starting" # 修改当前路径 if len(self.runpath) > 0: try: if not os.path.isdir(self.runpath):os.makedirs(self.runpath) if not os.path.isdir(self.runpath): self.enabled = False PLOG.error("%s run path invalid!"%(self.name)) break os.chdir(self.runpath) except Exception, e: PLOG.error("%s restart failed!change current path failed!err=%s" % (self.name, e)) PLOG.info("%s execute command:'%s'"%(self.name,self.command)) self.processHandle=subprocess.Popen(self.command, bufsize=0, executable=None, stdin=None, stdout=None, stderr=None, preexec_fn=None, close_fds=False, shell=True, cwd=self.runpath, env=None, universal_newlines=False, startupinfo=None, creationflags=0) self._pid=self.processHandle.pid except Exception, e: PLOG.error("%s restart failed!err=%s" % (self.name, e))
def scanFtpServerFiles(self, root, filetype): PLOG.debug( 'Type["%s"] file start crawling...ftpserver = %s ,dir = %s ' % (filetype, self.host, root)) outputjsfilename = "" filesource = "" if filetype == "movie": outputjsfilename = conf.movieOutputFile filesource = conf.ftpServerMovieSource elif filetype == "app": outputjsfilename = conf.appOutputFile filesource = conf.ftpServerAppSource # 枚举工作目录下的所有目录 fileDir = self.listdir(root) # 所有电影 or APP信息json串 allJsonInfo = {} allJsonInfo["update"] = datetime.datetime.now().strftime( "%Y-%m-%d %H:%M:%S") allJsonInfo["source"] = filesource allJsonInfo["list"] = [] for filedir in fileDir: PLOG.debug('start generate file info of dir "%s"...' % (root + filedir)) dictFileItems = self.listFile(root + filedir) primaryFilename = "" primaryFileSize = "" primaryFileTime = "" jsFileInfo = None for (k, v) in dictFileItems.items(): if v.ext == ".json": fileinfo = [] try: self.retrlines( "RETR %s" % root + filedir + '/' + v.fname, fileinfo.append) except: PLOG.warn('retr %s except! skip it !' % v.fname) filedetailinfo = "" for linestr in fileinfo: filedetailinfo += linestr if filedetailinfo != "": filedetailinfo = filedetailinfo.decode("gbk") jsFileInfo = json.loads(filedetailinfo, 'utf8') if jsFileInfo != None: if jsFileInfo.has_key("file"): primaryFilename = jsFileInfo["file"] else: PLOG.debug( 'not find "file" node in info file %s , skip it' % (v.fname)) else: PLOG.error( 'js file %s is null,maybe path error! skip it' % (v.fname)) break if jsFileInfo != "": if primaryFilename != "": if dictFileItems.has_key(primaryFilename): primaryFileItem = dictFileItems[primaryFilename] primaryFileSize = primaryFileItem.size time = primaryFileItem.time[:primaryFileItem.time. find('.')] primaryFileTime = datetime.datetime.strptime( time, '%Y%m%d%H%M%S').strftime("%Y-%m-%d %H:%M:%S") jsFileInfo["filesize"] = primaryFileSize jsFileInfo["filetime"] = primaryFileTime jsFileInfo["id"] = str(uuid.uuid1()) filerelativedir = filedir + '/' if jsFileInfo.has_key("file"): jsFileInfo[ "file"] = filerelativedir + jsFileInfo["file"] if jsFileInfo.has_key("poster"): jsFileInfo[ "poster"] = filerelativedir + jsFileInfo[ "poster"] if jsFileInfo.has_key("thumbnail"): jsFileInfo[ "thumbnail"] = filerelativedir + jsFileInfo[ "thumbnail"] if jsFileInfo.has_key("extend"): jsextend = jsFileInfo["extend"] if jsextend.has_key("screenshot"): jsscreenshottmp = [] for picture in jsextend["screenshot"]: picture = filerelativedir + picture jsscreenshottmp.append(picture) jsextend["screenshot"] = jsscreenshottmp allJsonInfo["list"].append(jsFileInfo) PLOG.debug('generate file info of dir "%s" success' % (root + filedir)) else: PLOG.debug( "generate file info of dir %s failed,not find primary File %s" % (root + filedir, primaryFilename)) else: PLOG.debug( "generate file info of dir %s failed,primary File name is empty" % (root + filedir)) else: PLOG.debug( "generate file info of dir %s failed,not find js info file" % (root + filedir)) if (outputjsfilename == ""): PLOG.debug("unkown file type!") return 0 with open(outputjsfilename, "w") as f: json.dump(allJsonInfo, f, indent=4, ensure_ascii=False) # 将json文件传到ftpserver with open(outputjsfilename, "r") as f: try: outputdirtmp = conf.ftpJsonOutputPath.replace("ftp://", "") outputdir = outputdirtmp[outputdirtmp.find("/") + 1:] self.storlines("STOR %s" % outputdir + outputjsfilename, f) PLOG.debug('upload json file %s success !' % outputjsfilename) except: PLOG.warn('upload json file %s failed,exception !' % outputjsfilename) PLOG.debug('Type["%s"] file crawl dir %s finished' % (filetype, root))
def scanFtpServerFiles(self, root, filetype): PLOG.debug('Type["%s"] file start crawling...ftpserver = %s ,dir = %s ' % (filetype, self.host, root)) outputjsfilename = "" filesource = "" if filetype == "movie": outputjsfilename = conf.movieOutputFile filesource = conf.ftpServerMovieSource elif filetype == "app": outputjsfilename = conf.appOutputFile filesource = conf.ftpServerAppSource # 枚举工作目录下的所有目录 fileDir = self.listdir(root) # 所有电影 or APP信息json串 allJsonInfo = {} allJsonInfo["update"] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") allJsonInfo["source"] = filesource allJsonInfo["list"] = [] for filedir in fileDir: PLOG.debug('start generate file info of dir "%s"...' % (root + filedir)) dictFileItems = self.listFile(root + filedir) primaryFilename = "" primaryFileSize = "" primaryFileTime = "" jsFileInfo = None for (k, v) in dictFileItems.items(): if v.ext == ".json": fileinfo = [] try: self.retrlines("RETR %s" % root + filedir + "/" + v.fname, fileinfo.append) except: PLOG.warn("retr %s except! skip it !" % v.fname) filedetailinfo = "" for linestr in fileinfo: filedetailinfo += linestr if filedetailinfo != "": filedetailinfo = filedetailinfo.decode("gbk") jsFileInfo = json.loads(filedetailinfo, "utf8") if jsFileInfo != None: if jsFileInfo.has_key("file"): primaryFilename = jsFileInfo["file"] else: PLOG.debug('not find "file" node in info file %s , skip it' % (v.fname)) else: PLOG.error("js file %s is null,maybe path error! skip it" % (v.fname)) break if jsFileInfo != "": if primaryFilename != "": if dictFileItems.has_key(primaryFilename): primaryFileItem = dictFileItems[primaryFilename] primaryFileSize = primaryFileItem.size time = primaryFileItem.time[: primaryFileItem.time.find(".")] primaryFileTime = datetime.datetime.strptime(time, "%Y%m%d%H%M%S").strftime("%Y-%m-%d %H:%M:%S") jsFileInfo["filesize"] = primaryFileSize jsFileInfo["filetime"] = primaryFileTime jsFileInfo["id"] = str(uuid.uuid1()) filerelativedir = filedir + "/" if jsFileInfo.has_key("file"): jsFileInfo["file"] = filerelativedir + jsFileInfo["file"] if jsFileInfo.has_key("poster"): jsFileInfo["poster"] = filerelativedir + jsFileInfo["poster"] if jsFileInfo.has_key("thumbnail"): jsFileInfo["thumbnail"] = filerelativedir + jsFileInfo["thumbnail"] if jsFileInfo.has_key("extend"): jsextend = jsFileInfo["extend"] if jsextend.has_key("screenshot"): jsscreenshottmp = [] for picture in jsextend["screenshot"]: picture = filerelativedir + picture jsscreenshottmp.append(picture) jsextend["screenshot"] = jsscreenshottmp allJsonInfo["list"].append(jsFileInfo) PLOG.debug('generate file info of dir "%s" success' % (root + filedir)) else: PLOG.debug( "generate file info of dir %s failed,not find primary File %s" % (root + filedir, primaryFilename) ) else: PLOG.debug("generate file info of dir %s failed,primary File name is empty" % (root + filedir)) else: PLOG.debug("generate file info of dir %s failed,not find js info file" % (root + filedir)) if outputjsfilename == "": PLOG.debug("unkown file type!") return 0 with open(outputjsfilename, "w") as f: json.dump(allJsonInfo, f, indent=4, ensure_ascii=False) # 将json文件传到ftpserver with open(outputjsfilename, "r") as f: try: outputdirtmp = conf.ftpJsonOutputPath.replace("ftp://", "") outputdir = outputdirtmp[outputdirtmp.find("/") + 1 :] self.storlines("STOR %s" % outputdir + outputjsfilename, f) PLOG.debug("upload json file %s success !" % outputjsfilename) except: PLOG.warn("upload json file %s failed,exception !" % outputjsfilename) PLOG.debug('Type["%s"] file crawl dir %s finished' % (filetype, root))
print( "没有结束统计时间,默认统计至今天(包括今天),想要统计特定日期内的数据,请输入起始和结束时间(包含起始日期和结束日期),例如:20150210 20150224" ) startdate = datetime.datetime.strptime(sys.argv[1], '%Y%m%d') enddate = enddate + datetime.timedelta(days=1) else: try: startdate = datetime.datetime.strptime(sys.argv[1], '%Y%m%d') enddate = datetime.datetime.strptime( sys.argv[2], '%Y%m%d') + datetime.timedelta(days=1) except: print("输入时间参数解析失败,参数格式不对,例:20150210") sys.exit(1) PLOG.enableControllog(False) PLOG.enableFilelog("%s/log/SARPT_$(Date8)_$(filenumber2).log" % (os.path.dirname(__file__))) loadconfig() PLOG.setlevel(SAPeakDataPublic.st.loglevel) # 加载基础数据,建立区域关系 stopsCentor = SAStopDefine.stopDc if not stopsCentor.reloadStop(): PLOG.error("加载站点基础数据失败!") sys.exit(1) while startdate < enddate: # 站点天数组数据重置 for stopid, stop in stopsCentor.stops.items(): stop.resetdata() # 统计startdate当天数据 statisticsCurrentDayData(startdate) startdate = startdate + datetime.timedelta(days=1)
def scanFtpServerFiles(self,root,filetype): PLOG.debug('Type["%s"] file start crawling...ftpserver = %s ,dir = %s ' %(filetype,self.host,root)) outputjsfilename = "" filesource = "" if filetype == "movie": outputjsfilename = conf.movieOutputFile filesource = conf.ftpServerMovieSource elif filetype == "app": outputjsfilename = conf.appOutputFile filesource = conf.ftpServerAppSource # 枚举工作目录下的所有目录 fileDir = self.listdir(root) # 所有电影 or APP信息json串 allJsonInfo = {} allJsonInfo["update"] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") allJsonInfo["source"] = filesource allJsonInfo["list"] =[] for filedir in fileDir : PLOG.debug('start generate file info of dir "%s"...'%(root+filedir)) fileItems = self.listFile(root+filedir) primaryFilename = "" primaryFileSize = "" primaryFileTime = "" jsFileInfo = None for fileitem in fileItems: if fileitem[-5:] == ".json" : fileinfo = [] if fileitem.find("/") == -1 : fileitem = root+filedir+'/'+fileitem try: self.retrlines("RETR %s"%fileitem,fileinfo.append) except: PLOG.warn('retr %s except! skip it !' %fileitem) filedetailinfo = "" for linestr in fileinfo: filedetailinfo += linestr if filedetailinfo != "": try: filedetailinfo = filedetailinfo.decode("gbk") except: pass try: filedetailinfo = filedetailinfo.decode("gb2312") except: pass #PLOG.debug("decode failed! %s is not encoded by gbk") jsFileInfo = json.loads(filedetailinfo,'utf8') if jsFileInfo !=None : if jsFileInfo.has_key("file"): primaryFilename = jsFileInfo["file"] else : PLOG.debug('not find "file" node in info file %s , skip it' %(fileitem)) else: PLOG.error('js file %s is null,maybe path error! skip it' %(fileitem)) break if jsFileInfo != None and jsFileInfo != "" : if primaryFilename != "" : try: timestamp = [] self.retrlines("LIST %s"%root+filedir+'/'+primaryFilename,lambda x:timestamp.append(self.separateFileTime(x))) primaryFileSize = self.size(root+filedir+'/'+primaryFilename) primaryFileTime = timestamp.pop() jsFileInfo["filesize"] = primaryFileSize jsFileInfo["filetime"] = primaryFileTime jsFileInfo["id"] = str(uuid.uuid1()) filerelativedir = filedir + '/' if jsFileInfo.has_key("file") : jsFileInfo["file"] = filerelativedir +jsFileInfo["file"] if jsFileInfo.has_key("poster") : jsFileInfo["poster"] = filerelativedir +jsFileInfo["poster"] if jsFileInfo.has_key("thumbnail") : jsFileInfo["thumbnail"] = filerelativedir +jsFileInfo["thumbnail"] if jsFileInfo.has_key("extend") : jsextend = jsFileInfo["extend"] if jsextend.has_key("screenshot") : jsscreenshottmp = [] for picture in jsextend["screenshot"] : picture = filerelativedir + picture jsscreenshottmp.append(picture) jsextend["screenshot"] =jsscreenshottmp allJsonInfo["list"].append(jsFileInfo) PLOG.debug('generate file info of dir "%s" success'%(root+filedir)) except: PLOG.warn('retr %s except! skip it !' %(root+filedir+'/'+primaryFilename)) PLOG.debug("generate file info of dir %s failed,not find primary File %s" % (root+filedir,primaryFilename)) else: PLOG.debug("generate file info of dir %s failed,primary File name is empty"% (root+filedir)) else: PLOG.debug("generate file info of dir %s failed,not find js info file"% (root+filedir)) if(outputjsfilename == ""): PLOG.debug("unkown file type!") return 0 with open(outputjsfilename, "w") as f: json.dump(allJsonInfo, f,indent=4,ensure_ascii=False) # 将json文件传到ftpserver ttt = len(outputjsfilename) with open(outputjsfilename,"r") as f: try: outputdirtmp=conf.ftpJsonOutputPath.replace("ftp://","") outputdir = outputdirtmp[outputdirtmp.find("/")+1:] self.storlines("STOR %s"%outputdir+outputjsfilename,f) PLOG.debug('upload json file %s success !'%outputjsfilename) except: PLOG.warn('upload json file %s failed,exception !'%outputjsfilename) PLOG.debug('Type["%s"] file crawl dir %s finished' %(filetype,root))
startdate=datetime.datetime.strptime(sys.argv[1], '%Y%m%d') enddate=enddate+datetime.timedelta(days=1) else: try: startdate=datetime.datetime.strptime(sys.argv[1], '%Y%m%d') enddate=datetime.datetime.strptime(sys.argv[2], '%Y%m%d')+datetime.timedelta(days=1) except: print ("输入时间参数解析失败,参数格式不对,例:20150210") sys.exit(1) PLOG.enableControllog(False) PLOG.enableFilelog("%s/log/SARPT_$(Date8)_$(filenumber2).log"%(os.path.dirname(__file__))) loadconfig() PLOG.setlevel(SAPeakDataPublic.st.loglevel) # 加载基础数据,建立区域关系 stopsCentor=SAStopDefine.stopDc if not stopsCentor.reloadStop(): PLOG.error("加载站点基础数据失败!") sys.exit(1) while startdate<enddate: # 站点天数组数据重置 for stopid,stop in stopsCentor.stops.items(): stop.resetdata() # 统计startdate当天数据 statisticsCurrentDayData(startdate) startdate = startdate + datetime.timedelta(days=1)