def parseZipLog(fileName, resonFile: TextIOWrapper, packageName: str = DEFAULT_PACKAGE, removeDir=True, callbackMsg=None): logUtils.info("parLogZip : fileName={}, packageName={}".format( fileName.replace('\\', '/'), packageName)) callbackMsg('正在解析{}'.format(basename(fileName))) #如果不是pid文件则不解析 if not zipfile.is_zipfile(fileName): exit(-1) #获取文件路径和文件全名 (filepath, tempfilename) = os.path.split(fileName) #获取文件名和文件后缀 (name, extension) = os.path.splitext(tempfilename) #获取解压的文件路径 tempDir = sep.join([dirname(fileName), name]) #解压的文件路径如果存在就删除 if isdir(tempDir): try: rmtree(tempDir) except Exception: logUtils.logException('任务出错') #创建解压路径 makedirs(tempDir) #解压zip文件到指定路径 toolUtils.unzip_single(fileName, tempDir) #解析刚刚解压的文件 globalValues: GlobalValues = parseLogDir(tempDir, resonFile, packageName) #删除刚刚解压的临时文件夹 if removeDir: rmtree(tempDir) return globalValues
def __read_android_config__(file:str, config:dict): logUtils.info('读取配置文件{}'.format(file)) if file and file.endswith('.xml') and isfile(file): dom = minidom.parse(file) root = dom.documentElement for node in [child for child in root.getElementsByTagName("file") if child.nodeType == Node.ELEMENT_NODE]: fileName = node.getAttribute("fileName") path = node.getAttribute("path") progress = node.getAttribute("progress") action = node.getAttribute("action") start = node.getAttribute("start") clean = node.getAttribute("clean") enable = node.getAttribute("enable") if clean and 'yes'==clean: clean = True else: clean = False delayTime = node.getAttribute("delayTime").strip() if delayTime and re.match('[\d]+', delayTime): delayTime = int(delayTime) else: delayTime = 0 if enable and 'yes'==enable: enable = True else: enable = False if fileName and path: config[fileName] = AndroidFile(fileName, path, progress, action, start, clean, delayTime, enable) else: logUtils.warning('配置文件{}错误'.format(file))
def download(self, path): __createDir__(path) fileName = sep.join([path, self.logId + '.zip']) logUtils.info('下载:{}'.format(fileName.replace('\\', '/'))) if zipfile.is_zipfile(fileName): return False req: HTTPResponse = urllib.request.Request(self.getUrl(), headers=headers) resp = getOpener().open(req) if 'zip' in resp.headers['Content-Type']: data = resp.read() temp = fileName + '__temp' with open(temp, "wb") as code: code.write(data) code.flush() code.close() if zipfile.is_zipfile(temp): ##############start lock############# LockUtil.acquire() z = zipfile.ZipFile(temp, 'a') readme = self.logId + '.txt' with open(readme, "w") as code: for key, value in self.row.items(): space = '' keylen = 20 - len(key) while keylen > 0: space = '{} '.format(space) keylen = keylen - 1 code.write('{}{}: {}\n'.format(key, space, value)) code.flush() code.close() try: if isfile(readme): z.write(readme) remove(readme) except Exception as e: print('file {}, err:{} ') finally: z.close() LockUtil.release() ##############end lock############# move(temp, fileName) return True else: rmtree(temp) elif 'text' in resp.headers['Content-Type']: err = '--url={}, resp={}, jira={}, version={}'.format( self.getUrl(), resp.read().decode('utf-8'), self.jiraId, self.productVersion) downLoadErrs.append(err) print(err) return False
def getAndroidFileConfig(configFile:str=sep.join([dirname(sys.argv[0]), 'config', 'android.xml'])): # 对配置文件进行监听, if configFile and not configFile in GLOBAL_VALUES.androidConfigFiles: GLOBAL_VALUES.androidConfigFiles.append(configFile) addFileObserver(FileObserver(configFile, __onAndroidConfigChange__)) # 读取系统配置中的值 if isfile(ANDROID_CONFIG_XML): __read_android_config__(ANDROID_CONFIG_XML, ANDROID_FILE_CONFIG) # 读取配置文件中的值 if isfile(configFile): __read_android_config__(configFile, ANDROID_FILE_CONFIG) else: logUtils.info('文件不存在 ANDROID_FILE_CONFIG={}'.format(configFile)) return ANDROID_FILE_CONFIG
def getUserFileConfig(configFile:str=sep.join([dirname(sys.argv[0]), 'config', 'config.ini'])): # 对配置文件进行监听, if configFile and not configFile in GLOBAL_VALUES.userConfigFiles: GLOBAL_VALUES.userConfigFiles.append(configFile) addFileObserver(FileObserver(configFile, __onUserConfigChange__)) # 读取系统配置中的值 if isfile(USER_CONFIG_INI): __read_user_config__(USER_CONFIG_INI, USER_FILE_CONFIG) # 读取配置文件中的值 if isfile(configFile): __read_user_config__(configFile, USER_FILE_CONFIG) else: logUtils.info('文件不存在 USER_FILE_CONFIG={}'.format(configFile)) return USER_FILE_CONFIG
def downloadJira(self): if self.check(): def callbackMsg(msg: str): if self.gressBar: self.gressBar.updateMsg(msg) def downCallback(): time.sleep(1) if self.gressBar: self.gressBar.quit() if len(downloadLog.downLoadErrs) > 0: file = sep.join([self.savePath, 'downloadError.txt']) with open(file, mode='w') as errFile: errFile.write('\n'.join(downloadLog.downLoadErrs)) startfile(self.savePath) addWorkDoneCallback(downCallback) self.gressBar = widget.GressBar() if len(self.jiras) == 0: self.jiras = [''] logUtils.info('jira={}, model={}, version={}'.format( self.jiras, self.models, self.versions)) def getAction(outPath, callback, jiraId, models, versions, anrParse): def downloadAction(): downloadLog.download(outPath=outPath, callbackMsg=callback, jiraId=jiraId, productModels=models, productVersions=versions, parse=anrParse, async_=(len(self.jiras) <= 1)) return downloadAction for jiraId in self.jiras: postAction( getAction(self.savePath, callbackMsg, jiraId, self.models, self.versions, self.anrParse)) self.gressBar.start()
def work(thread:LooperThread): msg = 'working start in thread name : {}'.format(thread.getName()) logUtils.info(msg) try: action() except Exception: logUtils.logException('任务出错') msg = 'working end in thread name : {}'.format(thread.getName()) logUtils.info(msg) if not __allWork__.empty(): for work in __WORK_THREADS__: if work.queue.qsize() == 0 and not work.working: msg = 'post working name : {}'.format(work) logUtils.info(msg) work.post(__allWork__.get()) return if __allWork__.empty(): LockUtil.acquireLock(__WORK_THREAD_LOCK__) workCount = 0 for t in __WORK_THREADS__: if t.working: workCount = workCount+1 if workCount == 1: while not __Work_Done__.empty(): callback = __Work_Done__.get() callback() LockUtil.releaseLock(__WORK_THREAD_LOCK__)
def ssh(): logUtils.info('APP_CONFIG_PATH={}'.format(toolConfig.APP_CONFIG_PATH)) username = USER_FILE_CONFIG[toolConfig.LABORATORY_FTP][toolConfig.USER_NAME] password = USER_FILE_CONFIG[toolConfig.LABORATORY_FTP][toolConfig.PASS_WORD] host = USER_FILE_CONFIG[toolConfig.LABORATORY_FTP][toolConfig.HOST_NAME] port = USER_FILE_CONFIG[toolConfig.LABORATORY_FTP][toolConfig.PORT] print(username) print(password) print(host) print(port) transport = paramiko.Transport((host, int(port))) transport.banner_timeout = 30 transport.connect(username=username, password=password) sftp = paramiko.SFTPClient.from_transport(transport) print(sftp) while True: print(sftp.listdir()) # sftp.put('text1', '/home/pi/python_code/python_ssh/socketsever.py') # sftp.get('remove_path', 'local_path') # print( '\n,'.join([str(item) for item in ANDROID_FILE_CONFIG.values()])) time.sleep(20) transport.close()
def parJson(cls, resp: dict, url=None): code: int = resp['code'] message: str = resp['message'] if code == 0 and 'data' in resp: data: dict = resp['data'] total: int = data['total'] # offset:int = data['offset'] # limit:int = data['limit'] # sort:str = data['sort'] rows = data['rows'] logs: __JiraLog__ = [] logUtils.info('getAllJiraLog rows={}'.format(rows)) for row in rows: log = __JiraLog__(row) logs.append(log) return total, logs else: errMsg = 'url={}, resp={}'.format(url, resp) downLoadErrs.append(errMsg) print(errMsg) return 0, []
def __onUserConfigChange__(event:FileEvent): if event and event.file != USER_CONFIG_INI: logUtils.info(event) if event.action == FileEvent.MODIFIED or event.action == FileEvent.CREATED: __read_user_config__(event.file, USER_FILE_CONFIG) __write_user_config__(USER_CONFIG_INI, USER_FILE_CONFIG)
def __read_user_config__(configFile:str, config:dict): #配置默认参数 def defConfig(): if not LABORATORY in config: config[LABORATORY] = dict() if not HOST_NAME in config[LABORATORY]: config[LABORATORY][HOST_NAME] = '10.204.80.68' if not USER_PATH in config[LABORATORY]: config[LABORATORY][USER_PATH] = '软件一部\肖良5131' if not USER_NAME in config[LABORATORY]: config[LABORATORY][USER_NAME] = 'swlab\ztemt-sw1' if not PASS_WORD in config[LABORATORY]: config[LABORATORY][PASS_WORD] = 'C10*98765#' if not LABORATORY_FTP in config: config[LABORATORY_FTP] = dict() if not HOST_NAME in config[LABORATORY_FTP]: config[LABORATORY_FTP][HOST_NAME] = '10.204.80.68' if not PORT in config[LABORATORY_FTP]: config[LABORATORY_FTP][PORT] = '9018' if not ENCODING in config[LABORATORY_FTP]: config[LABORATORY_FTP][ENCODING] = 'gbk' if not USER_PATH in config[LABORATORY_FTP]: config[LABORATORY_FTP][USER_PATH] = 'to实验室' if not USER_NAME in config[LABORATORY_FTP]: config[LABORATORY_FTP][USER_NAME] = 'ztemt-sw1' if not PASS_WORD in config[LABORATORY_FTP]: config[LABORATORY_FTP][PASS_WORD] = 'C10*98765#' if not COMPILER_SSH in config: config[COMPILER_SSH] = dict() if not HOST_NAME in config[COMPILER_SSH]: config[COMPILER_SSH][HOST_NAME] = '192.168.1.130' if not PORT in config[COMPILER_SSH]: config[COMPILER_SSH][PORT] = '22' if not PRIVATE_KEY in config[COMPILER_SSH]: config[COMPILER_SSH][PRIVATE_KEY] = '' if not USER_NAME in config[COMPILER_SSH]: config[COMPILER_SSH][USER_NAME] = 'xiaoliang' if not PASS_WORD in config[COMPILER_SSH]: config[COMPILER_SSH][PASS_WORD] = '123456' if not COMPILER_SAMBA in config: config[COMPILER_SAMBA] = dict() if not HOST_NAME in config[COMPILER_SAMBA]: config[COMPILER_SAMBA][HOST_NAME] = '192.168.1.130' if not USER_NAME in config[COMPILER_SAMBA]: config[COMPILER_SAMBA][USER_NAME] = 'xiaoliang' if not PASS_WORD in config[COMPILER_SAMBA]: config[COMPILER_SAMBA][PASS_WORD] = '1234' if USER_PATH in config[COMPILER_SAMBA]: config[COMPILER_SAMBA][USER_PATH] = 'share' #读取参数 if isfile(configFile) and configFile.endswith('.ini'): logUtils.info('读取配置文件{}'.format(configFile)) customerConf = configparser.ConfigParser() customerConf.readfp(codecs.open(configFile, mode='r', encoding='utf-8-sig')) # customerConf.read(configFile, encoding='utf-8-sig') #配置拷出文件的ftp if not LABORATORY in config: config[LABORATORY] = dict() if LABORATORY in customerConf: if HOST_NAME in customerConf[LABORATORY]: config[LABORATORY][HOST_NAME] = customerConf[LABORATORY][HOST_NAME] if USER_PATH in customerConf[LABORATORY]: config[LABORATORY][USER_PATH] = customerConf[LABORATORY][USER_PATH] if USER_NAME in customerConf[LABORATORY]: config[LABORATORY][USER_NAME] = customerConf[LABORATORY][USER_NAME] if PASS_WORD in customerConf[LABORATORY]: config[LABORATORY][PASS_WORD] = customerConf[LABORATORY][PASS_WORD] if not LABORATORY_FTP in config: config[LABORATORY_FTP] = dict() if LABORATORY_FTP in customerConf: if HOST_NAME in customerConf[LABORATORY_FTP]: config[LABORATORY_FTP][HOST_NAME] = customerConf[LABORATORY_FTP][HOST_NAME] if PORT in customerConf[LABORATORY_FTP]: config[LABORATORY_FTP][PORT] = customerConf[LABORATORY_FTP][PORT] if ENCODING in customerConf[LABORATORY_FTP]: config[LABORATORY_FTP][ENCODING] = customerConf[LABORATORY_FTP][ENCODING] if USER_PATH in customerConf[LABORATORY_FTP]: config[LABORATORY_FTP][USER_PATH] = customerConf[LABORATORY_FTP][USER_PATH] if USER_NAME in customerConf[LABORATORY_FTP]: config[LABORATORY_FTP][USER_NAME] = customerConf[LABORATORY_FTP][USER_NAME] if PASS_WORD in customerConf[LABORATORY_FTP]: config[LABORATORY_FTP][PASS_WORD] = customerConf[LABORATORY_FTP][PASS_WORD] #配置编译器的ssh if not COMPILER_SSH in config: config[COMPILER_SSH] = dict() if COMPILER_SSH in customerConf: if HOST_NAME in customerConf[COMPILER_SSH]: config[COMPILER_SSH][HOST_NAME] = customerConf[COMPILER_SSH][HOST_NAME] if PORT in customerConf[COMPILER_SSH]: config[COMPILER_SSH][PORT] = customerConf[COMPILER_SSH][PORT] if PRIVATE_KEY in customerConf[COMPILER_SSH]: config[COMPILER_SSH][PRIVATE_KEY] = customerConf[COMPILER_SSH][PRIVATE_KEY] if USER_NAME in customerConf[COMPILER_SSH]: config[COMPILER_SSH][USER_NAME] = customerConf[COMPILER_SSH][USER_NAME] if PASS_WORD in customerConf[COMPILER_SSH]: config[COMPILER_SSH][PASS_WORD] = customerConf[COMPILER_SSH][PASS_WORD] #配置编译器的samba if not COMPILER_SAMBA in config: config[COMPILER_SAMBA] = dict() if COMPILER_SAMBA in customerConf: if HOST_NAME in customerConf[COMPILER_SAMBA]: config[COMPILER_SAMBA][HOST_NAME] = customerConf[COMPILER_SAMBA][HOST_NAME] if PRIVATE_KEY in customerConf[COMPILER_SAMBA]: config[COMPILER_SAMBA][USER_NAME] = customerConf[COMPILER_SAMBA][USER_NAME] if PASS_WORD in customerConf[COMPILER_SAMBA]: config[COMPILER_SAMBA][PASS_WORD] = customerConf[COMPILER_SAMBA][PASS_WORD] if USER_PATH in config[COMPILER_SAMBA]: config[COMPILER_SAMBA][USER_PATH] = customerConf[COMPILER_SAMBA][USER_PATH] else: logUtils.warning('配置文件{}错误'.format(configFile))
def __onAndroidConfigChange__(event:FileEvent): if event and event.file != ANDROID_CONFIG_XML: logUtils.info(event) if event.action == FileEvent.MODIFIED or event.action == FileEvent.CREATED: __read_android_config__(event.file, ANDROID_FILE_CONFIG) __write_android_config__(ANDROID_CONFIG_XML, ANDROID_FILE_CONFIG)
print(sftp) while True: print(sftp.listdir()) # sftp.put('text1', '/home/pi/python_code/python_ssh/socketsever.py') # sftp.get('remove_path', 'local_path') # print( '\n,'.join([str(item) for item in ANDROID_FILE_CONFIG.values()])) time.sleep(20) transport.close() if __name__ == '__main__1': ANDROID_FILE_CONFIG host = USER_FILE_CONFIG[toolConfig.LABORATORY][toolConfig.HOST_NAME] path = USER_FILE_CONFIG[toolConfig.LABORATORY][toolConfig.USER_PATH] print(os.listdir('//{}/{}'.format(host, path))) logUtils.info('APP_CONFIG_PATH={}'.format(toolConfig.APP_CONFIG_PATH)) username = USER_FILE_CONFIG[toolConfig.LABORATORY_FTP][toolConfig.USER_NAME] password = USER_FILE_CONFIG[toolConfig.LABORATORY_FTP][toolConfig.PASS_WORD] host = USER_FILE_CONFIG[toolConfig.LABORATORY_FTP][toolConfig.HOST_NAME] port = int(USER_FILE_CONFIG[toolConfig.LABORATORY_FTP][toolConfig.PORT]) print(username) print(password) print(host) print(port) ftp = FTP() # 设置变量 ftp.encoding = 'gbk' ftp.set_debuglevel(0) # 打开调试级别2,显示详细信息 ftp.connect(host, port) # 连接的ftp sever和端口 ftp.login(username, password) # 连接的用户名,密码 class FtpFile:
def getAllJiraLog(jiraId: str = None, productModel: str = None, callbackMsg=None, order: str = 'asc', limit: int = 30, productVersion=None, tfsId=None, hasFile='Y', keyInfo=None): ''' :param jiraId: :param productModel: 机器型号 :param order: reportDate desc asc 多个使用空格隔开 :param limit: 请求分页数目为多少 :param productVersion:版本号 :param tfsId:logid :param hasFile:服务器是否有保存文件 :return:所有可下载的log信息 ''' 'order=asc&limit=30&offset=0&productModel=NX629J&jiraId=LOG-67680&productVersion=NX629J_Z0_CN_VLF0P_V234&hasFile=Y&rooted=y' if callbackMsg: callbackMsg('获取jira信息。。。') filters = list() #{productVersion:[{hbaseRowid:json},{hbaseRowid:json}]} allLog: __JiraLog__ = list() logD = dict() #{proedctVersion:[log]} filters.append('order={}'.format(order)) filters.append('limit={}'.format(limit)) filters.append('offset={}') if productModel and len(productModel) > 0: filters.append('productModel={}'.format(productModel)) if tfsId and len(tfsId) > 0: filters.append('tfsId={}'.format(tfsId)) if jiraId and len(jiraId) > 0: filters.append('jiraId={}'.format(jiraId)) if productVersion and len(productVersion) > 0: filters.append('productVersion={}'.format(productVersion)) if keyInfo and len(keyInfo) > 0: filters.append('keyInfo={}'.format(keyInfo)) filters.append('hasFile={}'.format(hasFile)) ''' url = 'https://log-list.server.nubia.cn/log/list.do?order=asc&limit=30&' \ 'offset=0&productModel=NX629J&tfsId=jEUd8c.RhJxQN&jiraId=LOG-495986&productVersion=NX629J_Z0_CN_VLF0P_V235&hasFile=Y' ''' size = 0 while (True): url = __LIST__URL__ + '&'.join(filters).format(size * limit) size = size + 1 logUtils.info('getAllJiraLog url={}'.format(url)) req = urllib.request.Request(url, headers=headers) resp: HTTPResponse = getOpener().open(req) text = json.loads(resp.read().decode('utf-8')) total, logs = __JiraLog__.parJson(text, url) print('all log total={} size={}'.format(total, len(logs))) if not logs: break for log in logs: if not inList(log, allLog): allLog.append(log) if len(logs) == 0 or len(allLog) >= total: return allLog return allLog
def parseLogDir(destDir: str, resonFile: TextIOWrapper, packageName: str = DEFAULT_PACKAGE): #保存所有公共变量 globalValues = GlobalValues() #获取目录下的所有文件 allFiles = toolUtils.getAllFileName(destDir) #获取所有的 system log文件 systemFiles = [file for file in allFiles if 'system.txt' in file] systemFiles.sort(reverse=True) #获取所有的 events log文件 eventFiles = [file for file in allFiles if 'events.txt' in file] eventFiles.sort(reverse=True) #获取所有的 main log文件 mainFiles = [file for file in allFiles if 'main.txt' in file] mainFiles.sort(reverse=True) #获取所有的 radio log文件 radioFiles = [file for file in allFiles if 'radio.txt' in file] radioFiles.sort(reverse=True) #获取所有的 kernel log文件 kernelFiles = [file for file in allFiles if 'kernel.txt' in file] kernelFiles.sort(reverse=True) #获取所有的 crash log文件 crashFiles = [file for file in allFiles if 'crash.txt' in file] #获取所有的 anr trace文件 anrFiles = [ file for file in allFiles if sep.join(['anr', 'anr_' + str(packageName)]) in file ] anrFiles.sort(reverse=False) #获取所有的 system.prop文件 propFiles = [file for file in allFiles if 'system.prop' in file] #解析prop文件获取手机信息 propMsg = toolUtils.parseProp(propFiles) #添加所有需要需要解析的log文件 parseFiles = [] for f in systemFiles: parseFiles.append(f) for f in eventFiles: parseFiles.append(f) for f in mainFiles: parseFiles.append(f) for f in radioFiles: parseFiles.append(f) for f in kernelFiles: parseFiles.append(f) #用于保存重要的信息行LogLine对象 allLine = [] #用于保存所有的Anr对象 allAnr = [] # 从systemui解析有多少个anr systemLog = SystemLog(systemFiles, allAnr, globalValues, packageName) systemLog.findAllAnr() if len(allAnr) == 0: eventLog = EventLog(eventFiles, allAnr, globalValues, packageName) eventLog.findAllAnr() #解析所有的anr trace mainStacks = list() blockStacks = dict() pattern = 'anr_[\w|\.]+_([\d]+)_([\d|-]+)' parseTracesPids = list() tracesLogs = [] for file in anrFiles: match = re.match(pattern, basename(file)) if match: pid = match.group(1) if not pid in parseTracesPids: parseTracesPids.append(pid) willParser = False for anr in allAnr: if str(pid) == str(anr.pid): willParser = True if willParser: log(file) trace = TracesLog(file, globalValues, packageName) trace.parser() tracesLogs.append(trace) stack: ThreadStack = trace.getMainStack() #如果堆栈出现两次相同则加入到数列中 if stack != None: mainStacks.append(stack) #最后一行main log,用于验证main log是否包含anr时间 mainLine = None #保存最后发生anr的时间,当mainLine时间小于anr时间则main log不全 anrTimeFloat = 0 for file in parseFiles: log('--' + file + '--') with open(file, encoding=toolUtils.checkFileCode(file)) as mFile: #全局变量,当前解析的文件 globalValues.currentFile = file linenum = 0 #是否在解析main log isMainLine = True if ('main.txt' in file) else False while True: line = mFile.readline() linenum = linenum + 1 if not line: break else: line = line.strip() temp = LogLine(line, linenum, globalValues) if temp.isLogLine: #保存最后一行main log if isMainLine: if (mainLine == None or temp.timeFloat > mainLine.timeFloat): mainLine = temp if temp.pid == temp.tid: for anr in allAnr: temp.addAnrMainLog(anr) #解析该行 parseLine(allAnr, allLine, temp, packageName) log('####################start write######################') if GLOBAL_VALUES.only_filter: for line in allLine: if line.filter: start = len(dirname(dirname(dirname(destDir)))) + 1 resonFile.writelines("filter: in file {} -> line={}\n".format( line.file[start:], line.linenum)) resonFile.writelines("\t{}\n".format(line.line.strip())) return globalValues #将手机的信息写入到文件 for (key, value) in propMsg.items(): temp = "{}:{}\n".format(key, value) globalValues.showMessage.append(temp) resonFile.writelines(temp) temp = '\n' globalValues.showMessage.append(temp) resonFile.writelines(temp) #讲对应的am anr添加到主要信息中 for anr in allAnr: if not anr.anrCoreLine and anr.anrCoreReserveLine: anr.setCoreLine(anr.anrCoreReserveLine) anr.computerAnrTime() anr.findAllCoreLine(allLine) if len(anr.systemAnr.lines) >= 8: for line in anr.systemAnr.lines[0:8]: allLine.append(line) #保存发生anr的pid,从堆栈trace中查找对应的pid pids = [] #将所有的anr信息输出到文件 for anr in allAnr: pids.append(anr.pid) temp = "pid:" + str(anr.pid) + '\n' + "发生时间:" + str( anr.anrTimeStr) + '\n' + "发生原因:" + anr.anrReason + '\n\n' globalValues.showMessage.append(temp) resonFile.writelines(temp) mainMsg: [] = anr.addMainLogBlock(allLine) if mainMsg: font = mainMsg[0] back = mainMsg[1] temp = ('主线程阻塞:{} ==> {}\n\t{}\n\t{}'.format( font.timeStr, back.timeStr, font.line, back.line)) + '\n\n' globalValues.showMessage.append(temp) resonFile.writelines(temp) startDelayLine = anr.anrCoreLine key = lambda line: line.delayStartTimeFloat if anr.anrCoreLines: temp = '核心log:\n' globalValues.showMessage.append(temp) resonFile.writelines(temp) delayLines = [ delayLine for delayLine in anr.anrCoreLines if delayLine.isDelayLine ] for line in delayLines: temp = '\t' + line.line + '\n' globalValues.showMessage.append(temp) resonFile.writelines(temp) temp = "\t\tstartTime:{}\n".format(line.delayStartTimeStr) globalValues.showMessage.append(temp) resonFile.writelines(temp) delayLines = sorted(delayLines, key=key, reverse=True) for line in delayLines: if startDelayLine == None or ( line.delayStartTimeFloat < startDelayLine.delayStartTimeFloat and line.timeFloat > startDelayLine.delayStartTimeFloat): startDelayLine = line temp = '\n' globalValues.showMessage.append(temp) resonFile.writelines(temp) # 输出阻塞的堆栈 for stack in [ stack for item in mainStacks if str(item.pid) == str(anr.pid) ]: if stack: temp = '\t\nmain pid=' + str(stack.pid) + ' time=' + str( stack.pidStack.time) + ' java栈:' + '\t\n\t' + str( stack.top) + '\n' globalValues.showMessage.append(temp) resonFile.writelines(temp) temp = '\t\t' + '\n\t\t'.join(stack.javaStacks if len( stack.javaStacks) < 10 else stack.javaStacks[0:10]) globalValues.showMessage.append(temp) resonFile.writelines(temp) temp = '\n\n' globalValues.showMessage.append(temp) resonFile.writelines(temp) if startDelayLine: temp = '起始阻塞log:\n' + '\t' + startDelayLine.line + "\n\t\tstartTime:{}\n".format( startDelayLine.delayStartTimeStr) + '\n' globalValues.showMessage.append(temp) resonFile.writelines(temp) log(anr.anrTimeStr) log(anr.anrTimeFloat) #获取最后发生anr的时间,用于推断main log是否全 if anr.anrTimeFloat > anrTimeFloat: anrTimeFloat = anr.anrTimeFloat log(anr.anrReason) # 将主要信息按时间排序 allLine.sort(key=lambda line: line.timeFloat) #判断是否main log不足 if mainLine != None and (mainLine.timeFloat < anrTimeFloat): log("main log 不足") temp = "main log 不足 time:" + str( toolUtils.getTimeStamp(mainLine.timeFloat)) + '\n\n' globalValues.showMessage.append(temp) resonFile.writelines(temp) #输出pid和线程名称到文件 if len(globalValues.pidMap) > 0: temp = "线程名称:\n\t" resonFile.writelines(temp) globalValues.showMessage.append(temp) count = 0 temp = '' for key in sorted(globalValues.pidMap.keys()): temp = temp + 'pid={} : name={},\t\t'.format( key, globalValues.pidMap[key]) count = count + 1 if len(temp) > 80: temp = temp + '\n\t' globalValues.showMessage.append(temp) resonFile.writelines(temp) temp = '' if len(temp) > 0: resonFile.writelines(temp) globalValues.showMessage.append(temp) #查找最异常binder hungerBinder = dict() maxBinderNum = 0 maxBinder = '' for key, value in globalValues.hungerBinders.items(): newKey = '{}:{}'.format(key.split(':')[0], value.split(':')[0]) if newKey in hungerBinder.keys(): hungerBinder[newKey] = hungerBinder[newKey] + 1 else: hungerBinder[newKey] = 1 if maxBinderNum < hungerBinder[newKey]: maxBinder = newKey maxBinderNum = hungerBinder[newKey] if hungerBinder: temp = '\n\ndump时候异常binder 等待binder共有{}个:'.format( len(globalValues.hungerBinders)) for key, value in hungerBinder.items(): if maxBinderNum == value or value > 3 or len(hungerBinder) == 1: pids = key.split(':') fromPid = int(pids[0]) if fromPid in globalValues.pidMap: fromPid = '{}({})'.format(fromPid, globalValues.pidMap[fromPid]) toPid = int(pids[1]) if toPid in globalValues.pidMap: toPid = '{}({})'.format(toPid, globalValues.pidMap[toPid]) temp = temp + '\n\t其中 binder form pid:{} to pid:{}, 数量 = {}。'.format( fromPid, toPid, value) globalValues.showMessage.append(temp) resonFile.writelines(temp) if len(tracesLogs) > 0 and len(tracesLogs[0].suspiciousStack) > 0: temp = '\n\n阻塞线程\n' globalValues.showMessage.append(temp) resonFile.writelines(temp) for tracesLog in tracesLogs: temp = '\n' for title, stack in tracesLog.suspiciousStack.items(): pidStack: PidStack = stack if len(pidStack.javaStacks) < 10: temp = '{}\t{}\n\t\t{}\n'.format( temp, title, '\n\t\t'.join(pidStack.javaStacks)) else: temp = '{}\t{}\n\t\t{}\n\t\t{}\n\t\t{}\n'.format( temp, title, '\n\t\t'.join(pidStack.javaStacks[:5]), '......', '\n\t\t'.join(pidStack.javaStacks[-4:])) globalValues.showMessage.append(temp) resonFile.writelines(temp) temp = '\n' globalValues.showMessage.append(temp) resonFile.writelines(temp) log("len == " + str(len(allLine))) #未找到相关log if (len(allLine)) == 0 and mainLine != None: log(mainLine.timeFloat) log(anrTimeFloat) else: #输出所有的分析行信息到文件 resonFile.writelines("\n关键log:\n") for line in allLine: if line.filter: start = len(dirname(dirname(dirname(destDir)))) + 1 resonFile.writelines( "\n filter: in file {} -> line={}\n".format( line.file[start:], line.linenum)) resonFile.writelines("\t{}\n".format(line.line.strip())) else: if line.isAnrCore: start = len(dirname(dirname(dirname(destDir)))) + 1 resonFile.writelines( "\n My Anr core: in file {} -> line={}\n\n".format( line.file[start:], line.linenum)) resonFile.writelines("\t{}\n".format(line.line.strip())) if line.isDelayLine: resonFile.writelines("\t\tstartTime:{}\n".format( line.delayStartTimeStr)) resonFile.writelines("\n") # 判断是否有anr if len(allAnr) == 0: temp = ("{}未找到anr报错信息\n".format(basename(destDir))) logUtils.info(temp) globalValues.showMessage.append(temp) resonFile.writelines(temp) log('####################end write######################') return globalValues
current = sep.join(['anr_papser', 'papser', 'LOG-494778']) if len(current) > 0: papserPath = sep.join(['D:', 'workspace', current]) if isfile(papserPath): foldPath = dirname(abspath(papserPath)) resonFile = open(file=sep.join([foldPath, basename(foldPath)]), mode='w', encoding='utf-8') resonFile.writelines('{}.{}\n\n'.format( str(1), abspath(papserPath)[len(dirname(foldPath)) + 1:])) parseZipLog(papserPath, resonFile, removeDir=True, callbackMsg=lambda msg: logUtils.info(msg)) resonFile.writelines('\n\n') else: parserZipLogDir(papserPath, removeDir=True, callbackMsg=lambda msg: logUtils.info(msg)) end = time.clock() time.strftime("%b %d %Y %H:%M:%S", ) logUtils.info('---used {}----'.format( toolUtils.getUsedTimeStr(start, end))) else: papserPath = sep.join( ['C:', 'Users', 'Administrator', 'Downloads', 'parse']) for foldPath in [ sep.join([papserPath, child]) for child in listdir(papserPath)
def download(outPath: str, callbackMsg, jiraId: str, productModels: str, parse=False, async_=False, order: str = 'asc', limit: int = 30, productVersions=[], tfsId=None, hasFile='Y', keyInfo=None): logUtils.info( 'download outPath={}, jiraId={}, productModels={}, parse={}, async_={}, order={}, limit={}, productVersions={}, tfsId={}, hasFile={}, keyInfo={}' .format(outPath, jiraId, productModels, parse, async_, order, limit, productVersions, tfsId, hasFile, keyInfo)) downLoadErrs.clear() ''' 最终下载路径outPath/jiraId/productModel/productVersion/logId.zip outPath/LOG-67680/NX629J_Z0_CN_VLF0P_V234/YroBCa.Rah5LxM.zip ''' opener = getOpener() time.sleep(2) if not isdir(outPath): __createDir__(outPath) logs: __JiraLog__ = [] GLOBAL_VALUES.packageNameDown = (jiraId == None or len(jiraId) == 0) if outPath.__contains__('systemui'): keyInfo = 'com.android.systemui' GLOBAL_VALUES.packageNameDown = False if not productModels and productVersions: for productVersion in productVersions: for log in getAllJiraLog(jiraId, None, callbackMsg, order, limit, productVersion, tfsId=tfsId, hasFile=hasFile, keyInfo=keyInfo): logs.append(log) elif productModels and not productVersions: for productModel in productModels: for log in getAllJiraLog(jiraId, productModel, callbackMsg, order, limit, None, tfsId=tfsId, hasFile=hasFile, keyInfo=keyInfo): logs.append(log) else: for productModel in productModels: for productVersion in productVersions: for log in getAllJiraLog(jiraId, productModel, callbackMsg, order, limit, productVersion, tfsId=tfsId, hasFile=hasFile, keyInfo=keyInfo): logs.append(log) if callbackMsg: callbackMsg('开始下载。。。') logDict = dict() #{productModel:{productVersion:[logId]}} parserPaths = [] parserLog = dict() packageName = None for log in logs: if GLOBAL_VALUES.packageNameDown: parserPath = sep.join( [outPath, 'ANR', log.packageName, log.jiraId]) else: parserPath = sep.join([outPath, 'ANR', log.jiraId]) if not parserPath in parserPaths: parserPaths.append(parserPath) parserLog[parserPath] = log if not packageName or len(packageName) == 0: packageName = log.packageName model = log.productModel version = log.productVersion if not model in logDict.keys(): logDict[model] = dict() modelDict = logDict[model] if not version in modelDict.keys(): modelDict[version] = list() logList = modelDict[version] if not inList(log, logList): logList.append(log) GLOBAL_VALUES.downOkCount = 0 GLOBAL_VALUES.downNumber = 0 if async_: queue = Queue(1) for model, versions in logDict.items(): for version in sorted(versions.keys(), reverse=True): def getDownAction(__model__, __version__): workThread.LockUtil.acquire() GLOBAL_VALUES.downNumber = GLOBAL_VALUES.downNumber + 1 workThread.LockUtil.release() def downloadAction(): logs = logDict[__model__][__version__] path = None for log in logs: willDown = False if not productVersions or len(productVersions) == 0: willDown = True elif log.productVersion in productVersions: willDown = True print('willDown={}, productVersions={}'.format( willDown, productVersions)) if willDown: if GLOBAL_VALUES.packageNameDown: path = sep.join([ outPath, log.logType, log.packageName, log.jiraId, __version__ ]) else: path = sep.join([ outPath, log.logType, log.jiraId, __version__ ]) if callbackMsg: callbackMsg('下载{}'.format(log.logId)) log.download(path) if path and isdir(path) and len(listdir(path)) == 0: rmtree(path) workThread.LockUtil.acquire() GLOBAL_VALUES.downOkCount = GLOBAL_VALUES.downOkCount + 1 workThread.LockUtil.release() print('downOkCount={},downNumber={}'.format( GLOBAL_VALUES.downOkCount, GLOBAL_VALUES.downNumber)) if async_ and GLOBAL_VALUES.downOkCount >= GLOBAL_VALUES.downNumber: queue.put('{}下载完成'.format(outPath.replace('\\', '/'))) return downloadAction action = getDownAction(model, version) if async_: postAction(action) else: action() if async_: print(queue.get()) time.sleep(1) if parse: GLOBAL_VALUES.parserOkCount = 0 GLOBAL_VALUES.parserNumber = 0 def getParserAction(path, packageName): workThread.LockUtil.acquire() GLOBAL_VALUES.parserNumber = GLOBAL_VALUES.parserNumber + 1 workThread.LockUtil.release() def action(): if path and isdir(path): parserZipLogDir(path, packageName=packageName, removeDir=True, callbackMsg=callbackMsg) workThread.LockUtil.acquire() GLOBAL_VALUES.parserOkCount = GLOBAL_VALUES.parserOkCount + 1 workThread.LockUtil.release() print('parserOkCount={},workNumber={}'.format( GLOBAL_VALUES.parserOkCount, GLOBAL_VALUES.parserNumber)) count = GLOBAL_VALUES.parserOkCount - GLOBAL_VALUES.parserNumber if async_ and count == 0: queue.put('{} 解析完成'.format(outPath.replace('\\', '/'))) return action for path in parserPaths: log: __JiraLog__ = parserLog[path] if log and log.isAnr(): action = getParserAction(path, log.packageName) if async_: postAction(action) else: action() if async_: logUtils.info(queue.get()) time.sleep(1) return True else: return True