def get_pvlist(self): '''get the PVs from the XML file''' pvlist_file = self.configuration['PVLIST_FILE'] if not os.path.exists(pvlist_file): utils.logMessage('could not find file: ' + pvlist_file) return try: tree = etree.parse(pvlist_file) except: msg = 'could not parse file: ' + pvlist_file utils.logMessage(msg) raise CouldNotParseXml(msg) utils.validate(tree, XML_SCHEMA_FILE) msg = 'validated file: ' + pvlist_file utils.logMessage(msg) for key in tree.findall(".//EPICS_PV"): if key.get("_ignore_", "false").lower() == "false": mne = key.get("mne") pv = key.get("PV") desc = key.get("description") fmt = key.get("display_format", "%s") # default format as_string = key.get("as_string", False) # default format # :see: http://cars9.uchicago.edu/software/python/pyepics3/pv.html?highlight=as_string#pv.get try: self.add_pv(mne, pv, desc, fmt, as_string) except: msg = "%s: problem connecting: %s" % (pvlist_file, etree.tostring(key)) utils.logException(msg) utils.logMessage('all PVs added')
def manage_afterAdd(self, object, container): try: BaseClass.manage_afterAdd(self, object, container) # Re-read .metadata after adding so that we can do validation checks # using information in portal_form_controller. Since manage_afterAdd # is not guaranteed to run, we also call these in __init__ self._read_action_metadata(self.getId(), self.filepath) self._read_validator_metadata(self.getId(), self.filepath) except: log(summary="metadata error", text="file = %s" % self.filepath) logException() raise
def manage_afterAdd(self, object, container): try: BaseClass.manage_afterAdd(self, object, container) # Re-read .metadata after adding so that we can do validation checks # using information in portal_form_controller. Since manage_afterAdd # is not guaranteed to run, we also call these in __init__ self._read_action_metadata(self.getId(), self.filepath) self._read_validator_metadata(self.getId(), self.filepath) except: log(summary='metadata error', text='file = %s' % self.filepath) logException() raise
def start(self): '''begin receiving PV updates and posting new web content''' nextReport = utils.getTime() nextLog = nextReport delta_report = datetime.timedelta( seconds=self.configuration['REPORT_INTERVAL_S']) delta_log = datetime.timedelta( seconds=self.configuration['LOG_INTERVAL_S']) mainLoopCount = 0 while True: mainLoopCount = (mainLoopCount + 1 ) % self.configuration['MAINLOOP_COUNTER_TRIGGER'] dt = utils.getTime() epics.ca.poll() if mainLoopCount == 0: utils.logMessage( " %s times through main loop" % self.configuration['MAINLOOP_COUNTER_TRIGGER']) if dt >= nextReport: nextReport = dt + delta_report try: self.report() # write contents of pvdb to a file except Exception: utils.logException("report()") if dt >= nextLog: nextLog = dt + delta_log msg = "checkpoint, %d EPICS monitor events received" % self.monitor_counter utils.logMessage(msg) self.monitor_counter = 0 # reset time.sleep(self.configuration['SLEEP_INTERVAL_S'])
def manage_afterAdd(self, object, container): try: BaseClass.manage_afterAdd(self, object, container) except: logException() raise
def main(props, baseTime, srvList, oprList): logdb = DbConnection(__createDbInfo("logdb", props)) gmsdb = DbConnection(__createDbInfo("gmsdb", props)) logdb.connect() gmsdb.connect() baseDate = datetime.date.today() if baseTime: baseDate = datetime.datetime.strptime(baseTime, "%Y-%m-%d") allSql = [] #平台 allSql.extend(globalStatsForAll(baseDate, logdb, gmsdb)) allSql.extend(dayStatsForAll(baseDate, logdb, gmsdb)) #渠道 for opr in oprList: # 全局数据 r = globalStatsForOpr(baseDate, opr, logdb, gmsdb) allSql.extend(r) # 日数据 r = dayStatsForOpr(baseDate, opr, logdb, gmsdb) allSql.extend(r) #服 for si in srvList: # 全局数据 r = globalStatsForServer(baseDate, si, logdb, gmsdb) allSql.extend(r) # 日数据 r = dayStatsForServer(baseDate, si, logdb, gmsdb) allSql.extend(r) # 游戏数据 r = gameStats(baseDate, si, logdb, gmsdb) allSql.extend(r) for sql in allSql: utils.logInfo("sql=%s" % sql) if not utils.ONLY_SQL: try: gmsdb.setAutoCommit(False) for sql in allSql: utils.logInfo("execute sql=%s" % sql) gmsdb.update(sql) gmsdb.commit() except: gmsdb.rollback() utils.logException('Exception') finally: gmsdb.setAutoCommit(True) utils.logInfo("delete expire data...") expireSql = deleteExpireLogs(baseDate) for sql in expireSql: utils.logInfo("sql=%s" % sql) if not utils.ONLY_SQL: logdb.setAutoCommit(True) for sql in expireSql: while True: utils.logInfo("execute sql=%s" % sql) if logdb.update(sql) == 0: break utils.logInfo("all done!") # 关闭连接 logdb.disconnect() gmsdb.disconnect()
default=False, help="是否存储到目标数据库, 否则只打印sql, 默认不存储") (options, args) = parser.parse_args() baseTime = options.baseTime srvs = options.srvs oprs = options.oprs utils.ONLY_SQL = not options.save utils.OPR_GROUP = props["opr.group"] # 服务器列表 srvList = [] if srvs: srvList = map(lambda x: int(x), srvs.split(",")) if oprs: oprs = oprs.split(",") utils.logInfo("baseTime=%s, onlySql=%s, srvs=%s" % (baseTime, utils.ONLY_SQL, srvs)) if len(baseTime) < 1: sys.exit() if not srvList: sys.exit() try: main(props, baseTime, srvList, oprs) except: utils.logException('Exception')
t1 = test1() print(test1.__dict__) print(t1.__dict__) print(type([])) print(type(())) print(type({})) utils.logDebug("abc") utils.logInfo("abc") utils.logWarning("abc") utils.logError("abc") utils.logCritical("abc") utils.logException("abc") try: raise Exception, 'this is a exception' except: utils.logException('sss') # print("---".join(dict(a=1,b=2).keys())) # print(dict(a=1,b=2).values()) l1 = [] l2 = [] l1.append([1, 2, 3]) l2.append("a") print(l1) print(l2)