def main(): start_time = datetime.datetime.now() if (len(sys.argv) != 3) and (len(sys.argv) != 4): flymeprint.error( 'invalid arguments! two or three parameter needed!\n--anr ' 'root_dir or ' '--android_reboot root_dir or --ouc_excel excel_filename [' 'dest_path]') return if os.path.isabs(sys.argv[0]): cdir = os.path.dirname(sys.argv[0]) os.chdir(cdir) else: cdir = os.path.dirname(os.path.join(os.getcwd(), sys.argv[0])) os.chdir(cdir) flymeprint.debug('current dir:' + cdir) root_path = sys.argv[2] flymeprint.debug('root dir:' + root_path) if sys.argv[1] == '--anr': anrmanager.start(root_path) elif sys.argv[1] == '--android_reboot': if swtmanager.start(root_path)['is_swt']: pass elif jemanager.start(root_path)['is_je']: pass elif nemanager.start(root_path)['is_ne']: pass else: flymeprint.warning('unkonwn reboot type...') elif sys.argv[1] == '--ouc_excel': if len(sys.argv) == 4: dest_dir = sys.argv[3] else: dest_dir = None oucmanager.start(sys.argv[2], dest_dir) else: flymeprint.error('use --anr or --android_reboot or --ouc_excel') return end_time = datetime.datetime.now() flymeprint.debug('Time took: ' + str((end_time - start_time).seconds) + ' seconds')
def parse_swt(root_path): flymeprint.debug('try swt...') cachemanager.root_path = root_path res = dict() watchdog_raw_dict = parse_event_log_for_wd(root_path) if not watchdog_raw_dict: watchdog_raw_dict = parse_db_watchdog(root_path) if not watchdog_raw_dict: res['is_swt'] = False flymeprint.debug('no watchdog keyword found, not swt') return res res['is_swt'] = True flymeprint.debug('swt detected...') watchdog_formated_dict = parse_watchdog_raw_dict(watchdog_raw_dict) is_sf_hang = True for time_str in watchdog_formated_dict: if '__is_sf_hang__' not in watchdog_formated_dict[time_str] or not \ watchdog_formated_dict[time_str]['__is_sf_hang__']: is_sf_hang = False if not is_sf_hang: if not watchdog_formated_dict: flymeprint.error('parse_wachdog_raw_dict error') return res system_server_trace_time_dict = parse_data_anr_trace(root_path) if not system_server_trace_time_dict: system_server_trace_time_dict = parse_db_trace(root_path) if not system_server_trace_time_dict: flymeprint.error('no system_server trace time') return res elif len(system_server_trace_time_dict) <= 1: flymeprint.error( 'only one system_server trace found, not enough to analysis') return res matched_trace_time = get_matched_trace_time( watchdog_formated_dict, system_server_trace_time_dict, False) if not matched_trace_time: flymeprint.error('no matched time') return res pm_matched_trace_time = get_pm_matched_trace_time( system_server_trace_time_dict, watchdog_formated_dict) swtobj_dict = get_swtobj_dict(watchdog_formated_dict, matched_trace_time, pm_matched_trace_time) else: flymeprint.debug('sf hang...') swtobj_dict = get_swtobj_dict(watchdog_formated_dict, None, None) res['brief_trace'] = generate_report(swtobj_dict, root_path) return res
def parse_data_anr_entries(data_anr_entries): flymeprint.debug('parsing data anr trace...') if not data_anr_entries: flymeprint.error('no data anr files found') trace_time_dict = dict() for file_name in data_anr_entries: # fo = open(file_name, encoding='utf-8') # content = fo.read() # fo.close() content = cachemanager.get_file_content(file_name) if not content: continue # match = re.findall( # '^----- pid ' + '\d+' + ' at (\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{ # 2}) ' # '-----\nCmd line: ' + 'system_server', # content, re.M) matched_list = flymeparser.get_trace_time_pid_for_wd(content) if not matched_list: continue for entry in matched_list: head = entry[0] pid = entry[1] time_str = entry[2] trace_time = datetime.strptime(time_str, '%Y-%m-%d %H:%M:%S') trace_time_dict[time_str] = { 'time_struct': trace_time, 'file_name': file_name, 'content': content, 'pid': pid, 'head': head } if len(trace_time_dict) == 0 and len(data_anr_entries) != 0: flymeprint.error('no system_server trace matches') return trace_time_dict
def extract_log_to_db(cursor, table_name, row, zip_file, zip_md5sum): # zip_list = os.listdir(dest_dir) # for zip_file in zip_list: # if os.path.isdir(os.path.join(dest_dir, zip_file)): # continue # fname = os.path.join(dest_dir, zip_file) fname = zip_file dname = fname + '_' zf = zipfile.ZipFile(fname, 'r') os.makedirs(dname, exist_ok=True) zf.extractall(dname) for root, dirs, files in os.walk(dname): for entry in dirs: if flymeparser.is_fname_match(entry, 'db\..*?dbg\.DEC'): flymeparser.clean_files(os.path.join(root, entry)) for entry in files: if flymeparser.is_fname_match(entry, 'db\..*dbg'): to_extract = os.path.join(root, entry) flymeparser.extract_db(to_extract) fd = open(to_extract, mode='rb') db_md5_sum = hashlib.md5(fd.read()).hexdigest() db_dir = os.path.join(root, entry + '.DEC') brief_trace_list = list() fd = open(os.path.join(db_dir, '__exp_main.txt'), encoding='utf-8') exp_main_content = fd.read() exception_class = flymeparser.get_exclass(exp_main_content) subject = flymeparser.get_sj(exp_main_content) exception_log_time = flymeparser.get_exlt(exp_main_content) if exception_class == 'SWT': flymeprint.debug('exception class:SWT') res_dict = swtmanager.start(root) if res_dict and res_dict['is_swt'] and res_dict[ 'brief_trace']: for i in res_dict['brief_trace']: if os.path.dirname(i) == db_dir: brief_trace_list.append( res_dict['brief_trace'][i]) elif exception_class == 'Java (JE)': flymeprint.debug('exception class:Java (JE)') res_dict = jemanager.start(root) if res_dict and res_dict['is_je'] and res_dict[ 'brief_trace']: brief_trace_list.append( res_dict['brief_trace'].popitem()[1]) elif exception_class == 'Native (NE)': flymeprint.debug('exception class:Native (NE)') res_dict = nemanager.start(root) if res_dict and res_dict['is_ne'] and res_dict[ 'brief_trace']: brief_trace_list.append( res_dict['brief_trace'].popitem()[1]) else: flymeprint.debug('exception class:' + exception_class + ',ignored...') reason = 'ignored exception class' brief_trace_list.append(reason) if not brief_trace_list: brief_trace_list.append('null brief trace') if os.path.exists(db_dir): for brief_trace in brief_trace_list: if not brief_trace: brief_trace = 'null brief trace' additional_dict = { 'db_dir': db_dir, 'zip_md5': zip_md5sum, 'db_md5': db_md5_sum, 'brief_trace': brief_trace.replace('\'', '\'\''), 'exception_class': exception_class, 'subject': subject, 'exception_log_time': exception_log_time } try: flymeparser.insert_to_database( cursor, table_name, row, additional_dict) except Exception as ex: if type(ex) is pymysql.err.IntegrityError: flymeprint.warning(ex) else: traceback.print_exc(file=sys.stdout) else: flymeprint.error('db dir not match with db file') fd.close()
def generate_report(self, dir): file = os.path.join(dir, self.time.replace(':', '_') + '_swt.txt') flymeparser.clean_files(file) # fd = open(file, 'a', encoding='utf-8') si = io.StringIO() si.write(self.file_name + '\n') si.write(self.event_log + '\n\n') ex_reason = '' ex_final_trace = '' ex_brief_trace = '' if self.is_sf_hang: ex_brief_trace = flymeparser.get_sf_hang_brief_trace( self.event_log) if ex_brief_trace is None: ex_brief_trace = 'null' ex_reason = 'sf hang' ex_final_trace = 'refer to trace file' else: self.handler_list.extend(self.monitor_list) has_dead_lock = False dead_lock_brief_trace = None dead_lock_message = None dead_lock_error_trace = None has_binder_full = False binder_full_brief_trace = None binder_full_message = None binder_full_error_trace = None has_hang = False hang_brief_trace = list() hang_message = list() hang_error_trace = list() has_unknown = False unknown_brief_trace = list() unknown_message = None unknown_error_trace = list() for checker in self.handler_list: checker.generate_report(si) si.write('\n') if checker.is_dead_lock: has_dead_lock = True dead_lock_message = checker.error_message dead_lock_error_trace = checker.error_trace dead_lock_brief_trace = checker.brief_trace break elif checker.is_binder_full: if has_binder_full: continue has_binder_full = True binder_full_message = checker.error_message binder_full_error_trace = checker.error_trace binder_full_brief_trace = checker.brief_trace elif checker.is_hang: has_hang = True if checker.brief_trace is None: continue if checker.brief_trace in hang_brief_trace and \ checker.error_message in hang_message and \ hang_brief_trace.index( checker.brief_trace) == \ hang_message.index( checker.error_message): flymeprint.debug('ignore duplication') continue hang_brief_trace.append(checker.brief_trace) if checker.error_trace is None: continue hang_error_trace.append(checker.error_trace) if checker.error_message is None: continue hang_message.append(checker.error_message) elif checker.is_unknown: has_unknown = True if checker.brief_trace is None: unknown_message = checker.error_message continue if checker.brief_trace in unknown_brief_trace: continue unknown_brief_trace.append(checker.brief_trace) unknown_error_trace.append(checker.error_trace) unknown_message = checker.error_message else: flymeprint.error( 'should not be in this case, check your code!!!') if has_dead_lock: ex_reason = dead_lock_message ex_final_trace = dead_lock_error_trace ex_brief_trace = dead_lock_brief_trace elif has_binder_full: ex_reason = binder_full_message ex_final_trace = binder_full_error_trace ex_brief_trace = binder_full_brief_trace elif has_hang: ex_reason = ', '.join(hang_message) ex_final_trace = '\n'.join(hang_error_trace) ex_brief_trace = '_'.join(hang_brief_trace) elif has_unknown: ex_reason = unknown_message ex_final_trace = '\n'.join(unknown_error_trace) ex_brief_trace = '_'.join(unknown_brief_trace) if ex_brief_trace: ex_brief_trace = ex_brief_trace.rstrip('_') else: ex_brief_trace = 'null' if ex_final_trace: ex_final_trace = ex_final_trace.rstrip('\n') else: ex_final_trace = 'null' if ex_reason: ex_reason = ex_reason.rstrip(',') else: ex_reason = 'null' self.ex_reason = ex_reason self.ex_brief_trace = ex_brief_trace self.ex_final_trace = ex_final_trace # for monitor in self.monitor_list: # monitor.generate_report(si) # si.write('\n') flymeprint.debug('exception reason:' + self.ex_reason) flymeprint.debug('exception brief trace:\n' + self.ex_brief_trace) fd = open(file, 'a', encoding='utf-8') flymeparser.write_exception_head(fd, self.ex_type, self.ex_reason, self.ex_final_trace, None) fd.write(si.getvalue().rstrip('\n')) fd.close() si.close() return self.ex_brief_trace
def parse_dropbox(root_path): cachemanager.root_path = root_path try: is_dir = os.path.isdir(root_path) if not is_dir: flymeprint.warning('invalid root dir:' + root_path) except Exception as ex: traceback.print_exc(file=sys.stdout) return # anranalyser anranalyser = os.path.join(root_path, '__anranalyser__') # extract extractall = os.path.join(anranalyser, 'extractall') # use main stack to merge content merge = os.path.join(anranalyser, 'merge') # report bug according to policy bug = os.path.join(anranalyser, 'bug') # undetermined entry which should by analysed manually undetermined = os.path.join(anranalyser, 'undetermined') # notbug directory notbug = os.path.join(anranalyser, 'notbug') if not flymeparser.clean_and_build_dir(extractall, merge, bug, undetermined, notbug): flymeprint.warning('can not cleanAndBuildDir') return state_obj_dict = dict() drop_box_entries = cachemanager.get_dropbox_files() if len(drop_box_entries) == 0: flymeprint.warning('no dropbox files found') data_anr_entries = cachemanager.get_data_anr_trace_files() if len(data_anr_entries) == 0: flymeprint.warning('no data anr files found') event_log_entries = cachemanager.get_event_log_files() if len(event_log_entries) == 0: flymeprint.warning('no event log files found') for dfile in drop_box_entries: entry = os.path.basename(dfile) flymeprint.debug('start process ---> ' + entry) dest_file = os.path.join(extractall, entry.rstrip('.gz')) file_content = cachemanager.get_file_content(dfile, dest_file) if file_content is None: flymeprint.error(dest_file + ' content empty') continue anrobj = parse_dfile(dfile, root_path) if not anrobj: continue anrobj.time_and_filepath['dropbox_file_name'] = dest_file if not is_a_valid_anr_obj(anrobj): flymeprint.error(entry + ' ---> incomplete information!!!') continue repair_pid_if_needed(anrobj) if anrobj.mainTrace["thread_state"] == 'Blocked': obj = Blocked(anrobj) elif anrobj.mainTrace["thread_state"] == 'Native': obj = Native(anrobj) elif anrobj.mainTrace["thread_state"] == 'Runnable': obj = Runnable(anrobj) elif anrobj.mainTrace["thread_state"] == 'Suspended': obj = Suspended(anrobj) elif anrobj.mainTrace["thread_state"] == 'TimedWaiting': obj = TimedWaiting(anrobj) elif anrobj.mainTrace["thread_state"] == 'Waiting': obj = Waiting(anrobj) elif anrobj.mainTrace["thread_state"] == 'WaitingForGcToComplete': obj = WaitingForGcToComplete(anrobj) elif anrobj.mainTrace["thread_state"] == 'WaitingPerformGc': obj = WaitingPerformingGc(anrobj) else: obj = Otherstate(anrobj) append_to_merge_or_match(obj, state_obj_dict) flymeprint.debug('end process ---> ' + entry) generate_report(state_obj_dict.values(), merge, bug, undetermined, notbug)
def start(root_path): whole_target_dir = os.path.join(root_path, target_dir) ne_dict = parse_ne(root_path, whole_target_dir) if not ne_dict: flymeprint.error('error parsing ne') return ne_dict
def start(root_path): whole_target_dir = os.path.join(root_path, target_dir) je_dict = parse_je(root_path, whole_target_dir) if not je_dict: flymeprint.error('error parsing je') return je_dict
def get_matched_trace_time(watchdog_formated_dict, system_server_trace_time_dict, is_pm): flymeprint.debug('getting best-matched time...') if is_pm: middle_t = 300 pre_trunc = 600 else: middle_t = 30 pre_trunc = 120 later_trunc = 120 matched_time = dict() if len(system_server_trace_time_dict) < 2: flymeprint.error('system_server trace less than 2') return matched_time for watchdog_time_str in watchdog_formated_dict.keys(): pid = watchdog_formated_dict[watchdog_time_str]['pid'] ss_pid_matched_trace_dict = get_pid_matched_ss_trace_dict( pid, system_server_trace_time_dict) if not ss_pid_matched_trace_dict: flymeprint.warning('pid:' + pid + ' no trace') continue system_server_trace_time_list = ss_pid_matched_trace_dict.keys() best_previous_time_str = min(system_server_trace_time_list) best_previous_item = ss_pid_matched_trace_dict.pop( best_previous_time_str) best_previous_time_struct = best_previous_item['time_struct'] best_previous_time_count = best_previous_time_struct.timestamp() best_previous_file_name = best_previous_item['file_name'] best_previous_content = best_previous_item['content'] best_previou_time_head = best_previous_item['head'] if not system_server_trace_time_list: flymeprint.error('only one system trace found') continue best_later_time_str = max(system_server_trace_time_list) best_later_item = ss_pid_matched_trace_dict.pop(best_later_time_str) max_time_struct = best_later_time_struct = best_later_item[ 'time_struct'] best_later_time_count = best_later_time_struct.timestamp() best_later_file_name = best_later_item['file_name'] best_later_content = best_later_item['content'] best_later_time_head = best_later_item['head'] matched_time[watchdog_time_str] = dict() no_best_previous_time = True no_best_later_time = True no_best_time = True no_alternative_time = True new_time_struct = watchdog_formated_dict[watchdog_time_str][ 'time_struct'].replace(year=max_time_struct.year) watchdog_formated_dict[watchdog_time_str][ 'time_struct'] = new_time_struct new_time_count = new_time_struct.timestamp() if new_time_count > best_previous_time_count: no_alternative_time = False if new_time_count - best_previous_time_count >= middle_t: no_best_previous_time = False if new_time_count <= best_later_time_count: no_best_later_time = False if no_alternative_time: flymeprint.error('watchdog time and trace time not match ---> ' + watchdog_time_str) continue if no_best_previous_time: flymeprint.warning('no best previous time ---> ' + watchdog_time_str) if no_best_later_time: flymeprint.warning('no best later time ---> ' + watchdog_time_str) if (not no_best_previous_time) and (not no_best_later_time): no_best_time = False watchdog_time_struct = watchdog_formated_dict[watchdog_time_str][ 'time_struct'] watchdog_time_count = watchdog_time_struct.timestamp() for system_server_time_str in ss_pid_matched_trace_dict.keys(): system_server_trace_time_struct = ss_pid_matched_trace_dict[ system_server_time_str]['time_struct'] system_server_time_count = \ system_server_trace_time_struct.timestamp() current_time_interval = watchdog_time_count - \ system_server_time_count previous_time_interval = watchdog_time_count - \ best_previous_time_count later_time_interval = watchdog_time_count - best_later_time_count current_file_name = \ ss_pid_matched_trace_dict[system_server_time_str][ 'file_name'] current_content = \ ss_pid_matched_trace_dict[system_server_time_str]['content'] current_head = ss_pid_matched_trace_dict[system_server_time_str][ 'head'] if no_best_time: change_best_previous = False change_best_later = False if no_best_previous_time: if (current_time_interval > 0) and ( previous_time_interval < current_time_interval): change_best_previous = True else: if (current_time_interval - middle_t >= 0) and ( previous_time_interval > current_time_interval): change_best_previous = True if change_best_previous: best_previous_time_str = system_server_time_str best_previous_time_count = system_server_time_count best_previous_file_name = current_file_name best_previous_content = current_content best_previous_time_struct = system_server_trace_time_struct best_previou_time_head = current_head if no_best_later_time: if current_time_interval < later_time_interval: change_best_later = True else: if (current_time_interval <= 0) and ( current_time_interval > later_time_interval): change_best_later = True if change_best_later: best_later_time_str = system_server_time_str best_later_time_count = system_server_time_count best_later_file_name = current_file_name best_later_content = current_content best_later_time_struct = system_server_trace_time_struct best_later_time_head = current_head else: if (current_time_interval - middle_t >= 0) and ( current_time_interval < previous_time_interval): best_previous_time_str = system_server_time_str best_previous_time_count = system_server_time_count best_previous_file_name = current_file_name best_previous_content = current_content best_previous_time_struct = system_server_trace_time_struct best_previou_time_head = current_head if (current_time_interval <= 0) and (current_time_interval > later_time_interval): best_later_time_str = system_server_time_str best_later_time_count = system_server_time_count best_later_file_name = current_file_name best_later_content = current_content best_later_time_struct = system_server_trace_time_struct best_later_time_head = current_head if no_best_time: matched_time[watchdog_time_str][ 'best_alternative_previous_time_str'] = best_previous_time_str matched_time[watchdog_time_str][ 'best_alternative_later_time_str'] = best_later_time_str matched_time[watchdog_time_str][ 'best_alternative_previous_time_struct'] = \ best_previous_time_struct matched_time[watchdog_time_str][ 'best_alternative_later_time_struct'] = \ best_later_time_struct matched_time[watchdog_time_str][ 'best_alternative_previous_head'] = best_previou_time_head matched_time[watchdog_time_str][ 'best_alternative_later_head'] = best_later_time_head else: matched_time[watchdog_time_str][ 'best_previous_time_str'] = best_previous_time_str matched_time[watchdog_time_str][ 'best_later_time_str'] = best_later_time_str matched_time[watchdog_time_str][ 'best_previous_time_struct'] = best_previous_time_struct matched_time[watchdog_time_str][ 'best_later_time_struct'] = best_later_time_struct matched_time[watchdog_time_str][ 'best_previous_head'] = best_previou_time_head matched_time[watchdog_time_str][ 'best_later_head'] = best_later_time_head current_prev_trunc = watchdog_time_struct.timestamp() - \ best_previous_time_struct.timestamp() current_later_trunc = best_later_time_struct.timestamp() - \ watchdog_time_struct.timestamp() is_previous_valid = True is_later_valid = True p_ivalid_reason = None l_ivalid_reason = None if current_prev_trunc > pre_trunc: p_ivalid_reason = 'trace is ' + str( int(current_prev_trunc)) + 's long to ' \ 'watchdog ' \ 'time,' \ 'no need to ' \ 'print' is_previous_valid = False if current_later_trunc > later_trunc: is_later_valid = False l_ivalid_reason = 'trace is ' + str( int(current_later_trunc)) + 's long ' \ 'after ' \ '' \ '' \ 'watchdog ' \ 'time,' \ 'no need ' \ 'to print' matched_time[watchdog_time_str][ 'previous_file_name'] = best_previous_file_name matched_time[watchdog_time_str][ 'previous_content'] = best_previous_content matched_time[watchdog_time_str][ 'is_previous_valid'] = is_previous_valid matched_time[watchdog_time_str][ 'later_file_name'] = best_later_file_name matched_time[watchdog_time_str]['later_content'] = best_later_content matched_time[watchdog_time_str]['is_later_valid'] = is_later_valid matched_time[watchdog_time_str]['p_i_r'] = p_ivalid_reason matched_time[watchdog_time_str]['l_i_r'] = l_ivalid_reason return matched_time
def get_swtobj_dict(watchdog_formated_dict, matched_trace_time, pm_matched_trace_time): swtobj_dict = dict() for time_str in watchdog_formated_dict: pid = watchdog_formated_dict[time_str]['pid'] is_sf_hang = False if watchdog_formated_dict[time_str]['__is_sf_hang__']: handler_list = list() monitor_list = list() is_sf_hang = True else: whole_trace_dict = get_whole_trace_dict(time_str, matched_trace_time) if pm_matched_trace_time: pm_whole_trace_dict = get_whole_trace_dict( time_str, pm_matched_trace_time) pm_whole_previous_trace = pm_whole_trace_dict['previous_trace'] pm_whole_later_trace = pm_whole_trace_dict['later_trace'] if not whole_trace_dict: flymeprint.error('empty whole_trace_dict') continue whole_previous_trace = whole_trace_dict['previous_trace'] whole_later_trace = whole_trace_dict['later_trace'] # swtobj_dict[time_str] = dict() # swtobj_dict[time_str]['event_log'] = watchdog_formated_dict[ # time_str][ # 'event_log'] checker_list = watchdog_formated_dict[time_str]['checker_list'] # swtobj_dict[time_str]['monitor_list'] = list() # swtobj_dict[time_str]['handler_list'] = list() monitor_list = list() handler_list = list() for checker in checker_list: checker_name = checker['checker_name'] thread_name = checker['thread_name'] event_log = checker['event_log'] if checker['checker_type'] == 'handler': if ( thread_name == 'PackageManager') and \ pm_matched_trace_time: handler = Handler( watchdog_formated_dict[time_str]['time_struct'], pid, checker_name, thread_name, event_log, pm_whole_previous_trace, pm_whole_later_trace) else: handler = Handler( watchdog_formated_dict[time_str]['time_struct'], pid, checker_name, thread_name, event_log, whole_previous_trace, whole_later_trace) # swtobj_dict[time_str]['handler_list'].append(handler) handler_list.append(handler) elif checker['checker_type'] == 'monitor': class_name = checker['checker_class_name'] monitor = Monitor( watchdog_formated_dict[time_str]['time_struct'], pid, class_name, checker_name, thread_name, event_log, whole_previous_trace, whole_later_trace) # swtobj_dict[time_str]['monitor_list'].append(monitor) monitor_list.append(monitor) else: continue swtobj = SwtObj(pid, time_str, watchdog_formated_dict[time_str]['event_log'], watchdog_formated_dict[time_str]['file_name'], handler_list, monitor_list, is_sf_hang) swtobj_dict[time_str] = swtobj return swtobj_dict