def getGCLogFilesFromData(self,dataDom,cubeName): gcLogFilesDom = dataDom.getElementsByTagName( 'gcLogFile' ) if len(gcLogFilesDom)==0: logger.warning('%s: 未配置gcLogFile, 不对服务的gc日志做收集' % cubeName) return [] gcLogFiles=[] for gcLogFileDom in gcLogFilesDom: file = self.getText( gcLogFileDom, 'fileName', False, cubeName ) # 读取wholeFile配置, 若未配置则为False wholeFile = self.getText( gcLogFileDom, 'wholeFile', False, cubeName ) if wholeFile.lower()=='true': wholeFile=True else: wholeFile=False # 读取监控的时间间隔, 若未配置则为DEFAULT_GCLOGFILE_TIME_INTERVAL timeInterval = self.getText( gcLogFileDom, 'timeInterval', False, cubeName ) if timeInterval!='': self.assertDigit(timeInterval, cubeName, 'timeInterval') timeInterval = int( timeInterval ) else: timeInterval=self.DEFAULT_GCLOGFILE_TIME_INTERVAL # 读取alarm keys if 'on' in self.getText(gcLogFileDom,'alarm',False,cubeName): # 可能会有多个gc files, 因此需要deepcopy keyAlarmers=self.getGCKeyAlarmers() else: keyAlarmers={} statisticTool=GCLogTool(logger,self.gcKeys) gcLogFile=LogFile(self.ldap,self.product,self.type,file,timeInterval,self.gcKeys,[],statisticTool,wholeFile,keyAlarmers,self.debug,logger,cubeName) if gcLogFile.validate(): gcLogFiles.append(gcLogFile) return gcLogFiles
def UpdateExtensionInfo(mssqlDict): mssqlHandle = MssqlAPI(server = mssqlDict['SERVER'], db = mssqlDict['DB'], user = mssqlDict['USER'], pwd = mssqlDict['PWD']) logFile = LogFile(name = 'Indirection') try: mssqlHandle.sqlExecuteProc('usp_UpdateFrom251', ()) mssqlHandle.sqlCommit() logFile.logInfo('Execute SQL Proc "usp_UpdateFrom251" succeed.') except: logFile.logInfo('Execute SQL Proc "usp_UpdateFrom251" failed.') print traceback.format_exc()
def getLogFilesFromData(self, dataDom, cubeName): logFilesDom = dataDom.getElementsByTagName('logFile') if len(logFilesDom) == 0: logger.warning('%s: 未配置logFile, 不对服务日志做收集' % cubeName) return [] logFiles = [] for logFileDom in logFilesDom: file = self.getText(logFileDom, 'fileName', False, cubeName) # 读取wholeFile配置, 若未配置则为False wholeFile = self.getText(logFileDom, 'wholeFile', False, cubeName) if wholeFile.lower() == 'true': wholeFile = True else: wholeFile = False # 读取监控的时间间隔, 若未配置则为DEFAULT_LOGFILE_TIME_INTERVAL timeInterval = self.getText(logFileDom, 'timeInterval', False, cubeName) if timeInterval != '': self.assertDigit(timeInterval, cubeName, 'timeInterval') timeInterval = int(timeInterval) else: timeInterval = self.DEFAULT_LOGFILE_TIME_INTERVAL # 读取key numberKeys = self.getText(logFileDom, 'numberKeys', False, cubeName).split(',') stringKeys = self.getText(logFileDom, 'stringKeys', False, cubeName).split(',') # 去除各字段首尾空格,以及空字段 numberKeys = [ str(item).strip() for item in numberKeys if str(item).strip() ] stringKeys = [ str(item).strip() for item in stringKeys if str(item).strip() ] # 读取beforeDays配置, 默认为0 beforeDays = 0 # 读取noTimeStamp配置, 默认为False noTimeStamp = False # 读取alarm keys keyAlarmers = self.parseAlarmKeys( self.getText(logFileDom, 'alarmKeys', False, cubeName)) statisticTool = KeyValueLogTool(numberKeys + stringKeys, timeInterval, beforeDays, noTimeStamp, logger) logFile = LogFile(self.ldap, self.product, self.type, file, timeInterval, numberKeys, stringKeys, statisticTool, wholeFile, keyAlarmers, self.debug, logger, cubeName) if logFile.validate(): logFiles.append(logFile) return logFiles
def main(argv): logsList = [LogFile(name) for name in fileNamesList] print(logsList) gen = SpamGenerator() while not isAllComplete(logsList): for log in logsList: log.addContent(gen.generateMessage(random.randrange(7)))
def dropOriginalData(mongoDict): logFile = LogFile(name = 'Indirection') for mongo in mongoDict: try: mongoHandle = MongoAPI(server = mongo['SERVER'], port = mongo['PORT']) #drop table splitTableList = ['KDayData', 'KWeeklyData', 'KMonthData'] mergeTableList = ['CandleDay', 'CandleWeek', 'CandleMonth'] for splitTable in splitTableList: mongoHandle.drop(splitTable) for mergeTable in mergeTableList: mongoHandle.drop(mergeTable) #create index for mergeTable in mergeTableList: mongoHandle.createIndex(mergeTable, 'data', [('_id.IC', pymongo.ASCENDING)]) mongoHandle.createIndex(mergeTable, 'data', [('TO', pymongo.DESCENDING)]) logFile.logInfo('Drop original data from server: ' + mongo['SERVER'] + ' succeed.') del mongoHandle except: logFile.logInfo('Drop original data from server: ' + mongo['SERVER'] + ' failed.') print traceback.format_exc()
def getLogFilesFromData(self,dataDom,cubeName): logFilesDom = dataDom.getElementsByTagName( 'logFile' ) if len(logFilesDom)==0: logger.warning('%s: 未配置logFile, 不对服务日志做收集' % cubeName) return [] logFiles=[] for logFileDom in logFilesDom: file = self.getText( logFileDom, 'fileName', False, cubeName ) # 读取wholeFile配置, 若未配置则为False wholeFile = self.getText( logFileDom, 'wholeFile', False, cubeName ) if wholeFile.lower()=='true': wholeFile=True else: wholeFile=False # 读取监控的时间间隔, 若未配置则为DEFAULT_LOGFILE_TIME_INTERVAL timeInterval = self.getText( logFileDom, 'timeInterval', False, cubeName ) if timeInterval!='': self.assertDigit(timeInterval, cubeName, 'timeInterval') timeInterval = int( timeInterval ) else: timeInterval=self.DEFAULT_LOGFILE_TIME_INTERVAL # 读取key numberKeys = self.getText( logFileDom, 'numberKeys', False, cubeName ).split( ',' ) stringKeys = self.getText( logFileDom, 'stringKeys', False, cubeName ).split( ',' ) # 去除各字段首尾空格,以及空字段 numberKeys = [ str( item ).strip() for item in numberKeys if str( item ).strip() ] stringKeys = [ str( item ).strip() for item in stringKeys if str( item ).strip() ] # 读取beforeDays配置, 默认为0 beforeDays=0 # 读取noTimeStamp配置, 默认为False noTimeStamp=False # 读取alarm keys keyAlarmers=self.parseAlarmKeys(self.getText(logFileDom,'alarmKeys',False,cubeName)) statisticTool=KeyValueLogTool(numberKeys+stringKeys,timeInterval,beforeDays,noTimeStamp,logger) logFile=LogFile(self.ldap,self.product,self.type,file,timeInterval,numberKeys,stringKeys,statisticTool,wholeFile,keyAlarmers,self.debug,logger,cubeName) if logFile.validate(): logFiles.append(logFile) return logFiles
def getGCLogFilesFromData(self, dataDom, cubeName): gcLogFilesDom = dataDom.getElementsByTagName('gcLogFile') if len(gcLogFilesDom) == 0: logger.warning('%s: 未配置gcLogFile, 不对服务的gc日志做收集' % cubeName) return [] gcLogFiles = [] for gcLogFileDom in gcLogFilesDom: file = self.getText(gcLogFileDom, 'fileName', False, cubeName) # 读取wholeFile配置, 若未配置则为False wholeFile = self.getText(gcLogFileDom, 'wholeFile', False, cubeName) if wholeFile.lower() == 'true': wholeFile = True else: wholeFile = False # 读取监控的时间间隔, 若未配置则为DEFAULT_GCLOGFILE_TIME_INTERVAL timeInterval = self.getText(gcLogFileDom, 'timeInterval', False, cubeName) if timeInterval != '': self.assertDigit(timeInterval, cubeName, 'timeInterval') timeInterval = int(timeInterval) else: timeInterval = self.DEFAULT_GCLOGFILE_TIME_INTERVAL # 读取alarm keys if 'on' in self.getText(gcLogFileDom, 'alarm', False, cubeName): # 可能会有多个gc files, 因此需要deepcopy keyAlarmers = self.getGCKeyAlarmers() else: keyAlarmers = {} statisticTool = GCLogTool(logger, self.gcKeys) gcLogFile = LogFile(self.ldap, self.product, self.type, file, timeInterval, self.gcKeys, [], statisticTool, wholeFile, keyAlarmers, self.debug, logger, cubeName) if gcLogFile.validate(): gcLogFiles.append(gcLogFile) return gcLogFiles
def __getAnalysisInfo(self, db, condition, queue): ''' private method #获取指定证券代码列表所示分析信息,以单个证券代码为单位由queue传递出去 #传出消息按如下字典格式: {'DB' :'DAY/WEEK/MONTH', 'MSG' :[{'INNER_CODE':<INNER_CODE>, 'LCLOSE':<LCLOSE1>, 'TOPEN':<TOPEN1>, 'TCLOSE':<TCLOSE1>, 'THIGH':<THIGH1>, 'TLOW':<TLOW1>, 'TVOLUME':<TVOLUME1>, 'TVALUE':<TVALUE1>, ... ... }, {'INNER_CODE':<INNER_CODE>, 'LCLOSE':<LCLOSE2>, 'TOPEN':<TOPEN2>, 'TCLOSE':<TCLOSE2>, 'THIGH':<THIGH2>, 'TLOW':<TLOW2>, 'TVOLUME':<TVOLUME2>, 'TVALUE':<TVALUE2>, ... ...}, ... ...] } ''' logFile = LogFile(name = 'Indirection') try: if ((db == 'WEEK') or (db == 'MONTH')): field = ''' [SECURITY_ANALYSIS].[FIRST_TRADE_DATE] AS FDATE, [SECURITY_ANALYSIS].[LAST_TRADE_DATE] AS LDATE, ''' else: field = ''' [SECURITY_ANALYSIS].[TRADE_DATE] AS FDATE, [SECURITY_ANALYSIS].[TRADE_DATE] AS LDATE, ''' #Instruct SQL command sqlCmd = ''' SELECT [SECURITY_ANALYSIS].[INNER_CODE] AS INNER_CODE, [SECURITY_ANALYSIS].[LCLOSE] AS LCLOSE, [SECURITY_ANALYSIS].[TOPEN] AS TOPEN, [SECURITY_ANALYSIS].[TCLOSE] AS TCLOSE, [SECURITY_ANALYSIS].[THIGH] AS THIGH, [SECURITY_ANALYSIS].[TLOW] AS TLOW, [SECURITY_ANALYSIS].[TVOLUME] AS TVOLUME, [SECURITY_ANALYSIS].[TVALUE] AS TVALUE, [SECURITY_ANALYSIS].[CHNG] AS CHNG, [SECURITY_ANALYSIS].[EXCHR] AS EXCHR, ''' + field + ''' ''' + dbDict[db]['TRADEDATE'] + ''' FROM ''' + dbDict[db]['TABLE']['ANA'] + ''' AS [SECURITY_ANALYSIS] WHERE ''' + condition + ''' ORDER BY INNER_CODE, ''' + dbDict[db]['ORDER'] records = self.mssqlHandle.sqlQuery(sqlCmd) #records = self.mssqlHandle.sqlQueryProc('p_list_all', (db,)) logFile.logInfo('Get analysis info succeed.') innerCode = '' #securityInfoDict = {} 内存较大适用 #单个证券多日衍生数据列表 securityInfoList = list() #遍历查询记录 for rec in records: infoDict = {} #遍历基础信息字段 for column in dbDict[db]['COLUMN']['ANA']: #查询结果以字段名称为key值存入有序字典 infoDict[column] = rec[column] #if ((db == 'WEEK') or (db == 'MONTH')): infoDict['FDATE'] = rec['FDATE'] infoDict['LDATE'] = rec['LDATE'] #不同股票写入不同字典键值中 if ((rec['INNER_CODE'] != innerCode) and (innerCode != '')): queue.put({'DB':db,'MSG':securityInfoList}) #securityInfoDict[innerCode] = securityInfoList 内存较大适用 securityInfoList = list() innerCode = rec['INNER_CODE'] elif (innerCode == ''): innerCode = rec['INNER_CODE'] #将有序字典追加到结果集列表中 securityInfoList.append(infoDict) else: #securityInfoDict[innerCode] = securityInfoList 内存较大适用 queue.put({'DB':db,'MSG':securityInfoList}) #securityInfoList = list() #for innerCode in securityInfoDict: 内存较大适用 # queue.put({'DB':db,'MSG':securityInfoDict[innerCode]}) logFile.logInfo('Standardize analysis info succeed.') except IndexError: logFile.logInfo('Invalid table name.') print 'Invalid table name.'
def CorrectAllSecurityInfo(mssqlDict, mongoDict, processDict, queueDict): startTime = datetime.now() print 'Start at:',startTime print 'DisposeSplitSecurity at',datetime.now() #分表名获取 splitSecurityDict = DisposeSplitSecurity(mssqlDict, mongoDict) print 'DisposeAnalysisInfoProc at',datetime.now() #证券分析数据查询 analysisQueue = Queue(queueDict['ANA']) monitorProcessDict = dict() analysisInfoProcessList = list() for db in dbDict: process = Process(target=DisposeAnalysisInfoProc, args=(mssqlDict, db, analysisQueue)) process.start() analysisInfoProcessList.append(process) monitorProcessDict['Analysis'] = analysisInfoProcessList #print 'DisposeCalculateExtensionProc at',datetime.now() #证券衍生数据计算 #extensionQueue = Queue(queueDict['EXT']) mongoQueue = Queue(queueDict['MONGO']) excludeRightDividend = ExcludeRightDividend(mssqlDict['SERVER'], mssqlDict['DB'], mssqlDict['USER'], mssqlDict['PWD']) excludeRightDividendDict = excludeRightDividend.GetExcludeRightDividendInfo() calculateExtensionProcList = DisposeCalculateExtensionProc(processDict, excludeRightDividendDict, analysisQueue, mongoQueue) monitorProcessDict['Calculate'] = calculateExtensionProcList #print 'DisposeExtensionInfoProc at',datetime.now() #启动写入MSSQL衍生数据进程 #extentionInfoProcList = DisposeExtensionInfoProc(processDict, mssqlDict, extensionQueue, mongoQueue) #monitorProcessDict['Extension'] = extensionInfoProcList #print 'DisposeInsertInfoIntoMongoDbProc at',datetime.now() #启动写入MongoDB数据进程 insertInfoIntoMongoDbProcList = DisposeInsertInfoIntoMongoDbProc(processDict, mongoDict, mongoQueue, splitSecurityDict) monitorProcessDict['Mongo'] = insertInfoIntoMongoDbProcList terminateNum = 0 while True: #移除已终止的进程 for processName in monitorProcessDict: for process in monitorProcessDict[processName]: if (process.is_alive() == False): monitorProcessDict[processName].remove(process) #检查计算,SQL写入,MongoDB写入进程,如进程意外终止,则重新启动进程 while (len(monitorProcessDict['Calculate']) < processDict['CALCULATE']): process = Process(target=CalculateExtensionProc, args=(excludeRightDividendDict, analysisQueue, mongoQueue)) process.start() monitorProcessDict['Calculate'].append(process) while (len(monitorProcessDict['Mongo']) < processDict['MONGO']): process = Process(target=InsertInfoIntoMongoDbProc, args=(mongoDict, mongoQueue, splitSecurityDict)) process.start() monitorProcessDict['Mongo'].append(process) print datetime.now() for processName in monitorProcessDict: print processName,len(monitorProcessDict[processName]) print 'analysisQueue:',analysisQueue.qsize() #print 'extensionQueue:',extensionQueue.qsize() print 'mongoQueue:',mongoQueue.qsize() if ((len(monitorProcessDict['Analysis']) == 0) and (analysisQueue.qsize() == 0) and (mongoQueue.qsize() == 0)): terminateNum += 1 if (terminateNum >= 10): #关闭所有进程 for processName in monitorProcessDict: for process in monitorProcessDict[processName]: if (process.is_alive() == True): process.terminate() break sleep(10) finalTime = datetime.now() deltaTime = finalTime - startTime totalTime = deltaTime.total_seconds() totalHour = totalTime // 3600 totalMin = (totalTime % 3600) // 60 totalSec = totalTime % 60 print("Total time: %d(h)%d(m)%d(s)" % (totalHour, totalMin, totalSec)) logFile = LogFile(name = 'Indirection') logFile.logInfo('Correct all security info succeed, total time: ' + str(totalHour) + '(h)' + str(totalMin) + '(m)' + str(totalSec) + '(s)')