Exemplo n.º 1
0
def parse_db_watchdog(root_path):
    flymeprint.debug('parsing db watchdog...')
    cachemanager.root_path = root_path
    db_event_log_files = cachemanager.get_db_event_log_files()
    if not db_event_log_files:
        flymeprint.warning('no db event log files found')
    return parse_event_log_for_wd_by_entries(db_event_log_files)
Exemplo n.º 2
0
def append_to_merge_or_match(stateObj, state_obj_dict):
    key = stateObj.get_key()
    if key in state_obj_dict:
        flymeprint.debug('duplication for:' + key)
        state_obj_dict[key].matched_state_list.append(stateObj)
    else:
        state_obj_dict[key] = stateObj
Exemplo n.º 3
0
def get_whole_trace_for_anr(content, packageName, matchedTime, anrTime,
                            anr_in_time, root_path):
    if len(anrTime) == 0:
        return dict()
    if len(matchedTime) == 0:
        flymeprint.debug('parse data anr trace')
        return parse_data_anr(packageName, anrTime, anr_in_time, root_path)
    return flymeparser.get_whole_trace_final(matchedTime, content)
Exemplo n.º 4
0
def repair_pid_if_needed(anrobj):
    pid = anrobj.pid
    repaired = False
    if pid is None or pid == '0':
        flymeprint.warning('pid:' + str(pid) + ', not valid, try to repair')
        anr_time = anrobj.time_and_filepath['anr_time']
        if 'event_log_path' in anrobj.time_and_filepath:
            # find pid in event log
            content = cachemanager.get_am_anr_cache(
                anrobj.time_and_filepath['event_log_path'])
            # match = re.search(
            #    '^\d{2}-\d{2} ' + anr_time + '.*?am_anr.*?\[\d+,(\d+),'
            #                                 '' + anrobj.packageName,
            #    content, re.M)
            pid = flymeparser.get_anr_pid_event_log(content, anr_time,
                                                    anrobj.packageName)
            if pid and pid != '0':
                repaired = True
            else:
                repaired = False
                # if match:
                #    pid = match.group(1)
                #    if not pid or pid == '0':
                #        repaired = False
                #    else:
                #        repaired = True
                #        anrobj.pid = pid
                # else:
                #    repaired = False
        if not repaired:
            trace_time = anrobj.time_and_filepath['trace_time']
            if 'anr_trace_file_name' in anrobj.time_and_filepath:
                # find pid in data anr trace
                content = cachemanager.get_file_content(
                    anrobj.time_and_filepath['anr_trace_file_name'])
                # repaired = flymeparser.fix_anr_obj_with_content(anrobj,
                #  content)
            else:
                # find pid in dropbox trace
                content = anrobj.content
            pid = flymeparser.get_trace_pid(content, trace_time,
                                            anrobj.packageName)
            if pid:
                repaired = True
                anrobj.pid = pid
            else:
                repaired = False
        if not repaired:
            flymeprint.warning('repair pid failed')
        else:
            flymeprint.debug('repair pid successfully ---> pid:' + anrobj.pid)
Exemplo n.º 5
0
def parse_swt(root_path):
    flymeprint.debug('try swt...')
    cachemanager.root_path = root_path
    res = dict()
    watchdog_raw_dict = parse_event_log_for_wd(root_path)
    if not watchdog_raw_dict:
        watchdog_raw_dict = parse_db_watchdog(root_path)
    if not watchdog_raw_dict:
        res['is_swt'] = False
        flymeprint.debug('no watchdog keyword found, not swt')
        return res
    res['is_swt'] = True
    flymeprint.debug('swt detected...')
    watchdog_formated_dict = parse_watchdog_raw_dict(watchdog_raw_dict)
    is_sf_hang = True
    for time_str in watchdog_formated_dict:
        if '__is_sf_hang__' not in watchdog_formated_dict[time_str] or not \
                watchdog_formated_dict[time_str]['__is_sf_hang__']:
            is_sf_hang = False
    if not is_sf_hang:
        if not watchdog_formated_dict:
            flymeprint.error('parse_wachdog_raw_dict error')
            return res
        system_server_trace_time_dict = parse_data_anr_trace(root_path)
        if not system_server_trace_time_dict:
            system_server_trace_time_dict = parse_db_trace(root_path)
        if not system_server_trace_time_dict:
            flymeprint.error('no system_server trace time')
            return res
        elif len(system_server_trace_time_dict) <= 1:
            flymeprint.error(
                'only one system_server trace found, not enough to analysis')
            return res
        matched_trace_time = get_matched_trace_time(
            watchdog_formated_dict, system_server_trace_time_dict, False)
        if not matched_trace_time:
            flymeprint.error('no matched time')
            return res
        pm_matched_trace_time = get_pm_matched_trace_time(
            system_server_trace_time_dict, watchdog_formated_dict)
        swtobj_dict = get_swtobj_dict(watchdog_formated_dict,
                                      matched_trace_time,
                                      pm_matched_trace_time)
    else:
        flymeprint.debug('sf hang...')
        swtobj_dict = get_swtobj_dict(watchdog_formated_dict, None, None)
    res['brief_trace'] = generate_report(swtobj_dict, root_path)
    return res
Exemplo n.º 6
0
def parse_data_anr_entries(data_anr_entries):
    flymeprint.debug('parsing data anr trace...')
    if not data_anr_entries:
        flymeprint.error('no data anr files found')
    trace_time_dict = dict()
    for file_name in data_anr_entries:
        # fo = open(file_name, encoding='utf-8')
        # content = fo.read()
        # fo.close()
        content = cachemanager.get_file_content(file_name)
        if not content:
            continue
        # match = re.findall(
        #    '^----- pid ' + '\d+' + ' at (\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{
        # 2}) '
        #                            '-----\nCmd line: ' + 'system_server',
        #    content, re.M)
        matched_list = flymeparser.get_trace_time_pid_for_wd(content)
        if not matched_list:
            continue
        for entry in matched_list:
            head = entry[0]
            pid = entry[1]
            time_str = entry[2]
            trace_time = datetime.strptime(time_str, '%Y-%m-%d %H:%M:%S')
            trace_time_dict[time_str] = {
                'time_struct': trace_time,
                'file_name': file_name,
                'content': content,
                'pid': pid,
                'head': head
            }

    if len(trace_time_dict) == 0 and len(data_anr_entries) != 0:
        flymeprint.error('no system_server trace matches')
    return trace_time_dict
Exemplo n.º 7
0
def parse_je(root_path, report_dir):
    flymeprint.debug('try je...')
    cachemanager.root_path = root_path
    res_dict = dict()
    index = 0
    if cachemanager.mtk_db_only:
        res_dict['is_je'] = True
        flymeparser.clean_and_build_dir(report_dir)
        res_dict['brief_trace'] = dict()
        exp_main_files = cachemanager.get_db_exp_main_files()
        for file_name in exp_main_files:
            content = cachemanager.get_file_content(file_name)
            je_trace = flymeparser.get_je_db_trace(content)
            je = JE(je_trace, file_name)
            index += 1
            je.generate_report(report_dir, 'je_' + str(index) + '.txt')
            res_dict['brief_trace'][file_name] = je.get_brief_trace()
    else:
        main_files = cachemanager.get_main_log_files()
        is_je = False
        time_list = list()
        for main_file in main_files:
            flymeprint.debug('parsing: ' + main_file)
            content = cachemanager.get_file_content(main_file)
            je_dict = flymeparser.parse_ss_je(content)
            if not je_dict:
                continue
            if je_dict['time'] in time_list:
                continue
            time_list.append(je_dict['time'])
            is_je = True
            flymeprint.debug('je detected...')
            flymeparser.clean_and_build_dir(report_dir)
            je = JE(je_dict['trace'], main_file)
            index += 1
            je.generate_report(report_dir, 'je_' + str(index) + '.txt')
            # res_dict['brief_trace'][main_file] = je.get_brief_trace()
            break
        if not is_je:
            flymeprint.debug('not je...')
        res_dict['is_je'] = is_je
    return res_dict
Exemplo n.º 8
0
def parse_excel(excel_fn, ouc_dest_dir):
    try:
        db = pymysql.connect(host='localhost',
                             user='******',
                             password='******',
                             db='flyme_stability_1')
        table_name = 'pro7_urge_2'
        cursor = db.cursor()
        create_table_sql = 'create table if not exists ' + table_name + '(download_uri varchar(256) not null,zip_md5 varchar(40) not null,db_md5 varchar(40) not null,primary key(db_md5),db_dir varchar(1024),fdevice varchar(30),fpackage_name varchar(50),fflyme_ver varchar(80),index index_fpackage_name(fpackage_name),fimei varchar(50),index index_fimei(fimei),fos_version varchar(10),froot varchar(10),fcountry varchar(10),fnetwork varchar(10),fcrashtime varchar(40),fupload_time varchar(40),inside_id varchar(40),stat_date varchar(20), exception_class varchar(20), index index_exception_class(exception_class),subject varchar(512),index index_subject(subject),brief_trace varchar(512),index index_brief_trace(brief_trace),exception_log_time varchar(100))'
        flymeprint.debug('excuting create table sql:\n' + create_table_sql)
        cursor.execute(create_table_sql)
        db.commit()
        df = pandas.read_excel(excel_fn, sheetname=0)
        try:
            loop_count = 0
            it = df.iterrows()
            while (True):
                entry_tuple = it.__next__()
                row = entry_tuple[1]
                flymeprint.debug('downloading ' + row[0])
                zip_file_tuple = flymeparser.download_log(row[0], ouc_dest_dir)
                zip_file = zip_file_tuple[0]
                md5sum = zip_file_tuple[1]
                try:
                    extract_log_to_db(cursor, table_name, row, zip_file,
                                      md5sum)
                    db.commit()
                except Exception as ex:
                    traceback.print_exc(file=sys.stdout)
                finally:
                    cachemanager.free_cache()
                    # loop_count += 1
                    # if loop_count == 10:
                    #    break
        except StopIteration as stop_ex:
            flymeprint.debug('parse excel done')
            # ouc_dest_dir = '/home/liucong/temp/ouc/1503385916.620414'
    except Exception as ex:
        traceback.print_exc(file=sys.stdout)
    finally:
        if not db:
            db.close()
Exemplo n.º 9
0
def main():
    start_time = datetime.datetime.now()
    if (len(sys.argv) != 3) and (len(sys.argv) != 4):
        flymeprint.error(
            'invalid arguments! two or three parameter needed!\n--anr '
            'root_dir or '
            '--android_reboot root_dir or --ouc_excel excel_filename ['
            'dest_path]')
        return
    if os.path.isabs(sys.argv[0]):
        cdir = os.path.dirname(sys.argv[0])
        os.chdir(cdir)
    else:
        cdir = os.path.dirname(os.path.join(os.getcwd(), sys.argv[0]))
        os.chdir(cdir)
    flymeprint.debug('current dir:' + cdir)
    root_path = sys.argv[2]
    flymeprint.debug('root dir:' + root_path)
    if sys.argv[1] == '--anr':
        anrmanager.start(root_path)
    elif sys.argv[1] == '--android_reboot':
        if swtmanager.start(root_path)['is_swt']:
            pass
        elif jemanager.start(root_path)['is_je']:
            pass
        elif nemanager.start(root_path)['is_ne']:
            pass
        else:
            flymeprint.warning('unkonwn reboot type...')
    elif sys.argv[1] == '--ouc_excel':
        if len(sys.argv) == 4:
            dest_dir = sys.argv[3]
        else:
            dest_dir = None
        oucmanager.start(sys.argv[2], dest_dir)
    else:
        flymeprint.error('use --anr or --android_reboot or --ouc_excel')
        return
    end_time = datetime.datetime.now()
    flymeprint.debug('Time took: ' + str((end_time - start_time).seconds) +
                     ' seconds')
Exemplo n.º 10
0
def extract_log_to_db(cursor, table_name, row, zip_file, zip_md5sum):
    # zip_list = os.listdir(dest_dir)
    # for zip_file in zip_list:
    #    if os.path.isdir(os.path.join(dest_dir, zip_file)):
    #        continue
    #    fname = os.path.join(dest_dir, zip_file)
    fname = zip_file
    dname = fname + '_'
    zf = zipfile.ZipFile(fname, 'r')
    os.makedirs(dname, exist_ok=True)
    zf.extractall(dname)
    for root, dirs, files in os.walk(dname):
        for entry in dirs:
            if flymeparser.is_fname_match(entry, 'db\..*?dbg\.DEC'):
                flymeparser.clean_files(os.path.join(root, entry))
        for entry in files:
            if flymeparser.is_fname_match(entry, 'db\..*dbg'):
                to_extract = os.path.join(root, entry)
                flymeparser.extract_db(to_extract)
                fd = open(to_extract, mode='rb')
                db_md5_sum = hashlib.md5(fd.read()).hexdigest()
                db_dir = os.path.join(root, entry + '.DEC')
                brief_trace_list = list()
                fd = open(os.path.join(db_dir, '__exp_main.txt'),
                          encoding='utf-8')
                exp_main_content = fd.read()
                exception_class = flymeparser.get_exclass(exp_main_content)
                subject = flymeparser.get_sj(exp_main_content)
                exception_log_time = flymeparser.get_exlt(exp_main_content)
                if exception_class == 'SWT':
                    flymeprint.debug('exception class:SWT')
                    res_dict = swtmanager.start(root)
                    if res_dict and res_dict['is_swt'] and res_dict[
                            'brief_trace']:
                        for i in res_dict['brief_trace']:
                            if os.path.dirname(i) == db_dir:
                                brief_trace_list.append(
                                    res_dict['brief_trace'][i])
                elif exception_class == 'Java (JE)':
                    flymeprint.debug('exception class:Java (JE)')
                    res_dict = jemanager.start(root)
                    if res_dict and res_dict['is_je'] and res_dict[
                            'brief_trace']:
                        brief_trace_list.append(
                            res_dict['brief_trace'].popitem()[1])
                elif exception_class == 'Native (NE)':
                    flymeprint.debug('exception class:Native (NE)')
                    res_dict = nemanager.start(root)
                    if res_dict and res_dict['is_ne'] and res_dict[
                            'brief_trace']:
                        brief_trace_list.append(
                            res_dict['brief_trace'].popitem()[1])
                else:
                    flymeprint.debug('exception class:' + exception_class +
                                     ',ignored...')
                    reason = 'ignored exception class'
                    brief_trace_list.append(reason)
                if not brief_trace_list:
                    brief_trace_list.append('null brief trace')
                if os.path.exists(db_dir):
                    for brief_trace in brief_trace_list:
                        if not brief_trace:
                            brief_trace = 'null brief trace'
                        additional_dict = {
                            'db_dir': db_dir,
                            'zip_md5': zip_md5sum,
                            'db_md5': db_md5_sum,
                            'brief_trace': brief_trace.replace('\'', '\'\''),
                            'exception_class': exception_class,
                            'subject': subject,
                            'exception_log_time': exception_log_time
                        }
                        try:
                            flymeparser.insert_to_database(
                                cursor, table_name, row, additional_dict)
                        except Exception as ex:
                            if type(ex) is pymysql.err.IntegrityError:
                                flymeprint.warning(ex)
                            else:
                                traceback.print_exc(file=sys.stdout)
                else:
                    flymeprint.error('db dir not match with db file')
                fd.close()
Exemplo n.º 11
0
 def generate_report(self, dir):
     file = os.path.join(dir, self.time.replace(':', '_') + '_swt.txt')
     flymeparser.clean_files(file)
     # fd = open(file, 'a', encoding='utf-8')
     si = io.StringIO()
     si.write(self.file_name + '\n')
     si.write(self.event_log + '\n\n')
     ex_reason = ''
     ex_final_trace = ''
     ex_brief_trace = ''
     if self.is_sf_hang:
         ex_brief_trace = flymeparser.get_sf_hang_brief_trace(
             self.event_log)
         if ex_brief_trace is None:
             ex_brief_trace = 'null'
         ex_reason = 'sf hang'
         ex_final_trace = 'refer to trace file'
     else:
         self.handler_list.extend(self.monitor_list)
         has_dead_lock = False
         dead_lock_brief_trace = None
         dead_lock_message = None
         dead_lock_error_trace = None
         has_binder_full = False
         binder_full_brief_trace = None
         binder_full_message = None
         binder_full_error_trace = None
         has_hang = False
         hang_brief_trace = list()
         hang_message = list()
         hang_error_trace = list()
         has_unknown = False
         unknown_brief_trace = list()
         unknown_message = None
         unknown_error_trace = list()
         for checker in self.handler_list:
             checker.generate_report(si)
             si.write('\n')
             if checker.is_dead_lock:
                 has_dead_lock = True
                 dead_lock_message = checker.error_message
                 dead_lock_error_trace = checker.error_trace
                 dead_lock_brief_trace = checker.brief_trace
                 break
             elif checker.is_binder_full:
                 if has_binder_full:
                     continue
                 has_binder_full = True
                 binder_full_message = checker.error_message
                 binder_full_error_trace = checker.error_trace
                 binder_full_brief_trace = checker.brief_trace
             elif checker.is_hang:
                 has_hang = True
                 if checker.brief_trace is None:
                     continue
                 if checker.brief_trace in hang_brief_trace and \
                                 checker.error_message in hang_message and \
                                 hang_brief_trace.index(
                                     checker.brief_trace) == \
                                 hang_message.index(
                                     checker.error_message):
                     flymeprint.debug('ignore duplication')
                     continue
                 hang_brief_trace.append(checker.brief_trace)
                 if checker.error_trace is None:
                     continue
                 hang_error_trace.append(checker.error_trace)
                 if checker.error_message is None:
                     continue
                 hang_message.append(checker.error_message)
             elif checker.is_unknown:
                 has_unknown = True
                 if checker.brief_trace is None:
                     unknown_message = checker.error_message
                     continue
                 if checker.brief_trace in unknown_brief_trace:
                     continue
                 unknown_brief_trace.append(checker.brief_trace)
                 unknown_error_trace.append(checker.error_trace)
                 unknown_message = checker.error_message
             else:
                 flymeprint.error(
                     'should not be in this case, check your code!!!')
         if has_dead_lock:
             ex_reason = dead_lock_message
             ex_final_trace = dead_lock_error_trace
             ex_brief_trace = dead_lock_brief_trace
         elif has_binder_full:
             ex_reason = binder_full_message
             ex_final_trace = binder_full_error_trace
             ex_brief_trace = binder_full_brief_trace
         elif has_hang:
             ex_reason = ', '.join(hang_message)
             ex_final_trace = '\n'.join(hang_error_trace)
             ex_brief_trace = '_'.join(hang_brief_trace)
         elif has_unknown:
             ex_reason = unknown_message
             ex_final_trace = '\n'.join(unknown_error_trace)
             ex_brief_trace = '_'.join(unknown_brief_trace)
         if ex_brief_trace:
             ex_brief_trace = ex_brief_trace.rstrip('_')
         else:
             ex_brief_trace = 'null'
         if ex_final_trace:
             ex_final_trace = ex_final_trace.rstrip('\n')
         else:
             ex_final_trace = 'null'
         if ex_reason:
             ex_reason = ex_reason.rstrip(',')
         else:
             ex_reason = 'null'
     self.ex_reason = ex_reason
     self.ex_brief_trace = ex_brief_trace
     self.ex_final_trace = ex_final_trace
     # for monitor in self.monitor_list:
     # monitor.generate_report(si)
     #    si.write('\n')
     flymeprint.debug('exception reason:' + self.ex_reason)
     flymeprint.debug('exception brief trace:\n' + self.ex_brief_trace)
     fd = open(file, 'a', encoding='utf-8')
     flymeparser.write_exception_head(fd, self.ex_type, self.ex_reason,
                                      self.ex_final_trace, None)
     fd.write(si.getvalue().rstrip('\n'))
     fd.close()
     si.close()
     return self.ex_brief_trace
Exemplo n.º 12
0
def parse_dropbox(root_path):
    cachemanager.root_path = root_path
    try:
        is_dir = os.path.isdir(root_path)
        if not is_dir:
            flymeprint.warning('invalid root dir:' + root_path)
    except Exception as ex:
        traceback.print_exc(file=sys.stdout)
        return
    # anranalyser
    anranalyser = os.path.join(root_path, '__anranalyser__')
    # extract
    extractall = os.path.join(anranalyser, 'extractall')
    # use main stack to merge content
    merge = os.path.join(anranalyser, 'merge')
    # report bug according to policy
    bug = os.path.join(anranalyser, 'bug')
    # undetermined entry which should by analysed manually
    undetermined = os.path.join(anranalyser, 'undetermined')
    # notbug directory
    notbug = os.path.join(anranalyser, 'notbug')

    if not flymeparser.clean_and_build_dir(extractall, merge, bug,
                                           undetermined, notbug):
        flymeprint.warning('can not cleanAndBuildDir')
        return

    state_obj_dict = dict()
    drop_box_entries = cachemanager.get_dropbox_files()
    if len(drop_box_entries) == 0:
        flymeprint.warning('no dropbox files found')
    data_anr_entries = cachemanager.get_data_anr_trace_files()
    if len(data_anr_entries) == 0:
        flymeprint.warning('no data anr files found')
    event_log_entries = cachemanager.get_event_log_files()
    if len(event_log_entries) == 0:
        flymeprint.warning('no event log files found')
    for dfile in drop_box_entries:
        entry = os.path.basename(dfile)
        flymeprint.debug('start process ---> ' + entry)
        dest_file = os.path.join(extractall, entry.rstrip('.gz'))
        file_content = cachemanager.get_file_content(dfile, dest_file)
        if file_content is None:
            flymeprint.error(dest_file + ' content empty')
            continue

        anrobj = parse_dfile(dfile, root_path)
        if not anrobj:
            continue
        anrobj.time_and_filepath['dropbox_file_name'] = dest_file
        if not is_a_valid_anr_obj(anrobj):
            flymeprint.error(entry + ' ---> incomplete information!!!')
            continue
        repair_pid_if_needed(anrobj)
        if anrobj.mainTrace["thread_state"] == 'Blocked':
            obj = Blocked(anrobj)
        elif anrobj.mainTrace["thread_state"] == 'Native':
            obj = Native(anrobj)
        elif anrobj.mainTrace["thread_state"] == 'Runnable':
            obj = Runnable(anrobj)
        elif anrobj.mainTrace["thread_state"] == 'Suspended':
            obj = Suspended(anrobj)
        elif anrobj.mainTrace["thread_state"] == 'TimedWaiting':
            obj = TimedWaiting(anrobj)
        elif anrobj.mainTrace["thread_state"] == 'Waiting':
            obj = Waiting(anrobj)
        elif anrobj.mainTrace["thread_state"] == 'WaitingForGcToComplete':
            obj = WaitingForGcToComplete(anrobj)
        elif anrobj.mainTrace["thread_state"] == 'WaitingPerformGc':
            obj = WaitingPerformingGc(anrobj)
        else:
            obj = Otherstate(anrobj)
        append_to_merge_or_match(obj, state_obj_dict)
        flymeprint.debug('end process ---> ' + entry)

    generate_report(state_obj_dict.values(), merge, bug, undetermined, notbug)
Exemplo n.º 13
0
def get_matched_trace_time(watchdog_formated_dict,
                           system_server_trace_time_dict, is_pm):
    flymeprint.debug('getting best-matched time...')
    if is_pm:
        middle_t = 300
        pre_trunc = 600
    else:
        middle_t = 30
        pre_trunc = 120
    later_trunc = 120
    matched_time = dict()
    if len(system_server_trace_time_dict) < 2:
        flymeprint.error('system_server trace less than 2')
        return matched_time
    for watchdog_time_str in watchdog_formated_dict.keys():
        pid = watchdog_formated_dict[watchdog_time_str]['pid']
        ss_pid_matched_trace_dict = get_pid_matched_ss_trace_dict(
            pid, system_server_trace_time_dict)
        if not ss_pid_matched_trace_dict:
            flymeprint.warning('pid:' + pid + ' no trace')
            continue
        system_server_trace_time_list = ss_pid_matched_trace_dict.keys()

        best_previous_time_str = min(system_server_trace_time_list)
        best_previous_item = ss_pid_matched_trace_dict.pop(
            best_previous_time_str)
        best_previous_time_struct = best_previous_item['time_struct']
        best_previous_time_count = best_previous_time_struct.timestamp()
        best_previous_file_name = best_previous_item['file_name']
        best_previous_content = best_previous_item['content']
        best_previou_time_head = best_previous_item['head']
        if not system_server_trace_time_list:
            flymeprint.error('only one system trace found')
            continue
        best_later_time_str = max(system_server_trace_time_list)
        best_later_item = ss_pid_matched_trace_dict.pop(best_later_time_str)
        max_time_struct = best_later_time_struct = best_later_item[
            'time_struct']
        best_later_time_count = best_later_time_struct.timestamp()
        best_later_file_name = best_later_item['file_name']
        best_later_content = best_later_item['content']
        best_later_time_head = best_later_item['head']

        matched_time[watchdog_time_str] = dict()
        no_best_previous_time = True
        no_best_later_time = True
        no_best_time = True
        no_alternative_time = True
        new_time_struct = watchdog_formated_dict[watchdog_time_str][
            'time_struct'].replace(year=max_time_struct.year)
        watchdog_formated_dict[watchdog_time_str][
            'time_struct'] = new_time_struct
        new_time_count = new_time_struct.timestamp()
        if new_time_count > best_previous_time_count:
            no_alternative_time = False
        if new_time_count - best_previous_time_count >= middle_t:
            no_best_previous_time = False
        if new_time_count <= best_later_time_count:
            no_best_later_time = False
        if no_alternative_time:
            flymeprint.error('watchdog time and trace time not match ---> ' +
                             watchdog_time_str)
            continue
        if no_best_previous_time:
            flymeprint.warning('no best previous time ---> ' +
                               watchdog_time_str)
        if no_best_later_time:
            flymeprint.warning('no best later time ---> ' + watchdog_time_str)
        if (not no_best_previous_time) and (not no_best_later_time):
            no_best_time = False

        watchdog_time_struct = watchdog_formated_dict[watchdog_time_str][
            'time_struct']
        watchdog_time_count = watchdog_time_struct.timestamp()
        for system_server_time_str in ss_pid_matched_trace_dict.keys():
            system_server_trace_time_struct = ss_pid_matched_trace_dict[
                system_server_time_str]['time_struct']
            system_server_time_count = \
                system_server_trace_time_struct.timestamp()
            current_time_interval = watchdog_time_count - \
                                    system_server_time_count
            previous_time_interval = watchdog_time_count - \
                                     best_previous_time_count
            later_time_interval = watchdog_time_count - best_later_time_count
            current_file_name = \
                ss_pid_matched_trace_dict[system_server_time_str][
                    'file_name']
            current_content = \
                ss_pid_matched_trace_dict[system_server_time_str]['content']
            current_head = ss_pid_matched_trace_dict[system_server_time_str][
                'head']
            if no_best_time:
                change_best_previous = False
                change_best_later = False
                if no_best_previous_time:
                    if (current_time_interval > 0) and (
                            previous_time_interval < current_time_interval):
                        change_best_previous = True
                else:
                    if (current_time_interval - middle_t >= 0) and (
                            previous_time_interval > current_time_interval):
                        change_best_previous = True
                if change_best_previous:
                    best_previous_time_str = system_server_time_str
                    best_previous_time_count = system_server_time_count
                    best_previous_file_name = current_file_name
                    best_previous_content = current_content
                    best_previous_time_struct = system_server_trace_time_struct
                    best_previou_time_head = current_head
                if no_best_later_time:
                    if current_time_interval < later_time_interval:
                        change_best_later = True
                else:
                    if (current_time_interval <= 0) and (
                            current_time_interval > later_time_interval):
                        change_best_later = True
                if change_best_later:
                    best_later_time_str = system_server_time_str
                    best_later_time_count = system_server_time_count
                    best_later_file_name = current_file_name
                    best_later_content = current_content
                    best_later_time_struct = system_server_trace_time_struct
                    best_later_time_head = current_head
            else:
                if (current_time_interval - middle_t >= 0) and (
                        current_time_interval < previous_time_interval):
                    best_previous_time_str = system_server_time_str
                    best_previous_time_count = system_server_time_count
                    best_previous_file_name = current_file_name
                    best_previous_content = current_content
                    best_previous_time_struct = system_server_trace_time_struct
                    best_previou_time_head = current_head
                if (current_time_interval <= 0) and (current_time_interval >
                                                     later_time_interval):
                    best_later_time_str = system_server_time_str
                    best_later_time_count = system_server_time_count
                    best_later_file_name = current_file_name
                    best_later_content = current_content
                    best_later_time_struct = system_server_trace_time_struct
                    best_later_time_head = current_head
        if no_best_time:
            matched_time[watchdog_time_str][
                'best_alternative_previous_time_str'] = best_previous_time_str
            matched_time[watchdog_time_str][
                'best_alternative_later_time_str'] = best_later_time_str
            matched_time[watchdog_time_str][
                'best_alternative_previous_time_struct'] = \
                best_previous_time_struct
            matched_time[watchdog_time_str][
                'best_alternative_later_time_struct'] = \
                best_later_time_struct
            matched_time[watchdog_time_str][
                'best_alternative_previous_head'] = best_previou_time_head
            matched_time[watchdog_time_str][
                'best_alternative_later_head'] = best_later_time_head
        else:
            matched_time[watchdog_time_str][
                'best_previous_time_str'] = best_previous_time_str
            matched_time[watchdog_time_str][
                'best_later_time_str'] = best_later_time_str
            matched_time[watchdog_time_str][
                'best_previous_time_struct'] = best_previous_time_struct
            matched_time[watchdog_time_str][
                'best_later_time_struct'] = best_later_time_struct
            matched_time[watchdog_time_str][
                'best_previous_head'] = best_previou_time_head
            matched_time[watchdog_time_str][
                'best_later_head'] = best_later_time_head
        current_prev_trunc = watchdog_time_struct.timestamp() - \
                             best_previous_time_struct.timestamp()
        current_later_trunc = best_later_time_struct.timestamp() - \
                              watchdog_time_struct.timestamp()
        is_previous_valid = True
        is_later_valid = True
        p_ivalid_reason = None
        l_ivalid_reason = None
        if current_prev_trunc > pre_trunc:
            p_ivalid_reason = 'trace is ' + str(
                int(current_prev_trunc)) + 's long to ' \
                                           'watchdog ' \
                                           'time,' \
                                           'no need to ' \
                                           'print'
            is_previous_valid = False
        if current_later_trunc > later_trunc:
            is_later_valid = False
            l_ivalid_reason = 'trace is ' + str(
                int(current_later_trunc)) + 's long ' \
                                            'after ' \
                                            '' \
                                            '' \
                                            'watchdog ' \
                                            'time,' \
                                            'no need ' \
                                            'to print'
        matched_time[watchdog_time_str][
            'previous_file_name'] = best_previous_file_name
        matched_time[watchdog_time_str][
            'previous_content'] = best_previous_content
        matched_time[watchdog_time_str][
            'is_previous_valid'] = is_previous_valid
        matched_time[watchdog_time_str][
            'later_file_name'] = best_later_file_name
        matched_time[watchdog_time_str]['later_content'] = best_later_content
        matched_time[watchdog_time_str]['is_later_valid'] = is_later_valid
        matched_time[watchdog_time_str]['p_i_r'] = p_ivalid_reason
        matched_time[watchdog_time_str]['l_i_r'] = l_ivalid_reason
    return matched_time
Exemplo n.º 14
0
def parse_db_trace(root_path):
    flymeprint.debug('parsing db trace...')
    trace_files = cachemanager.get_db_trace_files()
    return parse_data_anr_entries(trace_files)
Exemplo n.º 15
0
def parse_watchdog_raw_dict(watchdog_lists):
    watchdog_formated_dict = dict()
    for file_name in watchdog_lists.keys():
        max_time_str = None
        prev_content = None
        for content in watchdog_lists[file_name]:
            time_str = flymeparser.get_watchdog_time_event_log(content)
            if max_time_str is None:
                max_time_str = time_str
                prev_content = content
            elif time_str > max_time_str:
                max_time_str = time_str
                watchdog_lists[file_name].remove(prev_content)
                prev_content = content
        for content in watchdog_lists[file_name]:
            # matched_list = re.search('(^\d{2}-\d{2} (\d{2}):(\d{2}):(\d{
            # 2}))\.(\d{
            # 3})',
            #                  content, re.M)
            # if not matched_list:
            #    continue
            # flymeparser.get_anr_time_event_log()
            # time_str = matched_list.group(1)
            time_str = flymeparser.get_watchdog_time_event_log(content)
            watchdog_ss_pid = flymeparser.get_wd_ss_pid(content)
            if not time_str:
                continue
            if time_str in watchdog_formated_dict:
                continue
            watchdog_formated_dict[time_str] = dict()
            watchdog_formated_dict[time_str]['checker_list'] = list()
            watchdog_formated_dict[time_str]['file_name'] = file_name
            watchdog_formated_dict[time_str]['event_log'] = content
            time_struct = datetime.strptime(time_str, '%m-%d %H:%M:%S')
            watchdog_formated_dict[time_str]['time_struct'] = time_struct
            watchdog_formated_dict[time_str]['pid'] = watchdog_ss_pid
            # matched_list = re.findall(
            #    '(Blocked in handler on (?P<handler_name>.*?) \(('
            #    '?P<thread_name>.*?)\))',
            #    content)
            if flymeparser.is_sf_hang(content):
                watchdog_formated_dict[time_str]['__is_sf_hang__'] = True
                continue
            watchdog_formated_dict[time_str]['__is_sf_hang__'] = False
            matched_list = flymeparser.get_watchdog_hlist_event_log(content)
            if matched_list:
                for i in matched_list:
                    watchdog_formated_dict[time_str]['checker_list'].append({
                        'checker_type':
                        'handler',
                        'event_log':
                        i[0],
                        'checker_name':
                        i[1],
                        'thread_name':
                        i[2]
                    })
                    # matched_list = re.findall(
                    #    '(Blocked in monitor (?P<class_name>.*?) on ('
                    #    '?P<checker_name>.*?) '
                    #    '\((?P<thread_name>.*?)\))',
                    #    content)
            matched_list = flymeparser.get_watchdog_mlist_event_log(content)
            if matched_list:
                for i in matched_list:
                    watchdog_formated_dict[time_str]['checker_list'].append({
                        'checker_type':
                        'monitor',
                        'event_log':
                        i[0],
                        'checker_class_name':
                        i[1],
                        'checker_name':
                        i[2],
                        'thread_name':
                        i[3]
                    })
    if len(watchdog_formated_dict) == 0:
        flymeprint.debug('no watchdog found in event log')
    return watchdog_formated_dict
Exemplo n.º 16
0
def parse_event_log_for_wd(root_path):
    flymeprint.debug('parsing event log...')
    event_log_entries = cachemanager.get_event_log_files()
    if not event_log_entries:
        flymeprint.warning('no event log files found')
    return parse_event_log_for_wd_by_entries(event_log_entries)