def regenerate_st_stat(pairs: list, cache: str) -> None: st_targets = [ 'system\.cpu\.(ipc::0)', ] matrix = {} for p in pairs: matrix[p] = c.get_stats(pjoin(st_stat_dir, p, 'stats.txt'), st_targets, re_targets=True) df = pd.DataFrame(matrix) df.to_csv(cache, index=True) df = pd.read_csv(cache, index_col=0) print(df)
def main(): parser = argparse.ArgumentParser(usage='specify stat directory') parser.add_argument('-s', '--stat-dir', action='store', required=True, help='gem5 output directory' ) parser.add_argument('-o', '--output', action='store', help='csv to save results' ) parser.add_argument('--branch', action='store_true') parser.add_argument('-v', '--verbose', action='store_true', help='whether output intermediate result' ) parser.add_argument('-b', '--error-bound', action='store', type=float, default=0.0, help='Threshold to output an entry' ) parser.add_argument('-i', '--ipc-only', action='store_true', default=0.0, help='Only extract ipc' ) parser.add_argument('--st', action='store_true', help='processing ST stats' ) parser.add_argument('--pair-filter', action='store', default='', help='file that filt pairs' ) parser.add_argument('-f', '--stat-file', action='store', help='name of stats file', default='stats.txt' ) parser.add_argument('-l', '--fanout', action='store_true', help='print fanout' ) parser.add_argument('-k', '--breakdown', action='store_true', help='print breakdown' ) parser.add_argument('--op', action='store_true', help='print operand busy state' ) parser.add_argument('--flow', action='store_true', help='print bandwidth usages' ) parser.add_argument('-p', '--packet', action='store_true', help='print type and number of different packets' ) opt = parser.parse_args() pairs = c.pairs(opt.stat_dir, return_path=False) if (opt.pair_filter): pairs = c.pair_filt(pairs, opt.pair_filter) paths = c.pair_to_full_path(opt.stat_dir, pairs) pairs, paths = c.stat_filt(pairs, paths, opt.stat_file) # paths = c.time_filt(paths) paths = [pjoin(x, opt.stat_file) for x in paths] # make_st_stat_cache() matrix = {} for pair, path in zip(pairs, paths): # print(pair) if opt.ipc_only: d = c.get_stats(path, ipc_target, re_targets=True) else: targets = brief_targets if opt.branch: targets += branch_targets if opt.fanout: targets += fanout_targets if opt.breakdown: targets += breakdown_targets if opt.op: targets += operand_targets if opt.packet: targets += packet_targets if opt.flow: targets += flow_target d = c.get_stats(path, targets, re_targets=True) if len(d): if not opt.st: matrix[pair] = further_proc(pair, d, opt.verbose) else: matrix[pair] = d if opt.branch: c.add_branch_mispred(d) if opt.fanout: c.add_fanout(d) if opt.packet: c.add_packet(d) df = pd.DataFrame.from_dict(matrix, orient='index') df = df.sort_index() df = df.sort_index(1) df = df.sort_values(['ipc']) # for x in df.index: # print(x) if len(df): df.loc['mean'] = df.mean() df = df.fillna(0) print(df) if opt.output: df.to_csv(opt.output, index=True)
width = 0.9 interval = 0.1 rects = [] shift = 0.0 i = 0 num_points = 0 num_configs = len(stat_dirs) stat_files = [ osp.join(baseline_stat_dir, point, 'stats.txt') for point in points ] matrix = {} for point, stat_file in zip(points, stat_files): d = c.get_stats(stat_file, t.breakdown_targets, re_targets=True) matrix[point] = d baseline_df = pd.DataFrame.from_dict(matrix, orient='index') baseline = baseline_df['queueingD'].values for config in configs_ordered: stat_dir = stat_dirs[config] stat_dir = osp.expanduser(stat_dir) stat_files = [osp.join(stat_dir, point, 'stats.txt') for point in points] matrix = {} for point, stat_file in zip(points, stat_files): d = c.get_stats(stat_file, t.breakdown_targets, re_targets=True) matrix[point] = d df = pd.DataFrame.from_dict(matrix, orient='index') if num_points == 0:
import common as c import target_stats as t stat_dir = osp.expanduser('~/gem5-results-2017/xbar-rand-hint') benchmarks = [*c.get_spec2017_int(), *c.get_spec2017_fp()] points = [] for b in benchmarks: for i in range(0, 3): points.append(f'{b}_{i}') stat_files = [osp.join(stat_dir, point, 'stats.txt') for point in points] matrix = {} for point, stat_file in zip(points, stat_files): d = c.get_stats(stat_file, t.packet_targets, re_targets=True) matrix[point] = d df = pd.DataFrame.from_dict(matrix, orient='index') for i in df.columns.values: if i != 'TotalP': df[i] = df[i] / df['TotalP'] df.drop('TotalP', axis=1, inplace=True) names = df.columns.values.tolist() names.remove('DestOpP') names.append('DestOpP') names.remove('KeySrcP') names.append('KeySrcP') print(type(names)) print(names) df = df.reindex(columns=names)
def main(): parser = argparse.ArgumentParser(usage='specify stat directory') parser.add_argument('-s', '--stat-dir', action='store', required=True, help='gem5 output directory') parser.add_argument('-o', '--output', action='store', help='csv to save results') parser.add_argument('-v', '--verbose', action='store_true', help='whether output intermediate result') parser.add_argument('-b', '--error-bound', action='store', type=float, default=0.0, help='Threshold to output an entry') parser.add_argument('-i', '--ipc-only', action='store_true', default=0.0, help='Only extract ipc') parser.add_argument('--st', action='store_true', help='processing ST stats') parser.add_argument('--pair-filter', action='store', default='', help='file than filt pairs') opt = parser.parse_args() pairs = c.pairs(opt.stat_dir, return_path=False) if (opt.pair_filter): pairs = c.pair_filt(pairs, opt.pair_filter) paths = c.pair_to_full_path(opt.stat_dir, pairs) pairs, paths = c.stat_filt(pairs, paths) # paths = c.time_filt(paths) paths = [pjoin(x, 'stats.txt') for x in paths] # make_st_stat_cache() matrix = {} for pair, path in zip(pairs, paths): if opt.ipc_only: d = c.get_stats(path, ipc_target, re_targets=True) else: #d = c.get_stats(path, branch_targets, re_targets=True) d = c.get_stats(path, branch_targets, re_targets=True) if len(d): if not opt.st: matrix[pair] = further_proc(pair, d, opt.verbose) else: matrix[pair] = d df = pd.DataFrame.from_dict(matrix, orient='index') #if df['iew.branchMispredict'] != None: # df['mpki'] = df['iew.branchMispredict'] / df['Insts'] * 1000 # df['Mispredict Rate'] = df['branchPred.condIncorrect'] / df['branchPred.condPredicted'] pd.set_option('display.max_columns', None) #print(df.sort_index(1)) try: df['Misp%'] = df['Misp'] / df['cond'] * 100 df['MPKI'] = df['Misp'] / df['Insts'] * 1000 df['Status'] = 'running' df.loc[df['Insts'] > 30000000, ['Status']] = 'done' print(df[['Status', 'ipc', 'indirectMis', 'Misp', 'Misp%', 'MPKI']]) hmean_mpki = len(df['MPKI']) / np.sum(1.0 / df['MPKI']) hmean_misp = len(df['Misp%']) / np.sum(1.0 / df['Misp%']) gmean_mpki = df['MPKI'].prod()**(1.0 / len(df['MPKI'])) gmean_misp = df['Misp%'].prod()**(1.0 / len(df['Misp%'])) print('Totally %d tests' % len(df['Status'])) print( 'Mean MPKI is: %.2f, Hmean MPKI is: %.4f, Gmean MPKI is: %.4f' % (df['MPKI'].mean().round(2), hmean_mpki, gmean_mpki)) print( 'Mean Misp%% is: %.2f, Hmean Misp%% is: %.4f, Gmean Misp%% is: %.4f' % (df['Misp%'].mean().round(2), hmean_misp, gmean_misp)) print('Mean IPC is: %.4f' % df['ipc'].mean()) df.drop(['Status'], axis=1, inplace=True) except Exception: print("No data yet") exit() if not opt.st: errors = df['IPC prediction error'].values print('Mean: {}'.format(np.mean(np.abs(errors)))) # df.sort_values(['QoS prediction error'], ascending=False, inplace=True) print(df['overall QoS'][ abs(df['IPC prediction error']) > opt.error_bound]) if opt.output: df.to_csv(opt.output, index=True)
shift = 0.0 i = 0 num_points = 0 bounds = ['by_bw', 'by_chasing', 'by_crit_ptr'] num_configs = len(bounds) for bound in bounds: stat_dir = stat_dirs['Xbar4'] stat_dir = osp.expanduser(stat_dir) stat_files = [osp.join(stat_dir, point, 'stats.txt') for point in points] matrix = {} for point, stat_file in zip(points, stat_files): d = c.get_stats(stat_file, t.standard_targets + t.packet_targets, re_targets=True) c.add_packet(d) matrix[point] = d df = pd.DataFrame.from_dict(matrix, orient='index') if num_points == 0: num_points = len(df) # print(len(df)) tick_starts = np.arange(0, num_points * num_configs, (width + interval) * num_configs) + shift # print(tick_starts) rect = plt.bar(tick_starts, df[bound].values, edgecolor=colors[i],
interval = 0.4 rects = [] num_points = 0 num_configs = len(stat_dirs) dfs = dict() for config in configs_ordered: print(config) stat_dir = stat_dirs[config] stat_dir = osp.expanduser(stat_dir) stat_files = [osp.join(stat_dir, point, 'stats.txt') for point in points] matrix = {} for point, stat_file in zip(points, stat_files): d = c.get_stats(stat_file, t.ipc_target, re_targets=True) matrix[point] = d df = pd.DataFrame.from_dict(matrix, orient='index') dfs[config] = df if num_points == 0: num_points = len(df) baseline = 'Xbar4' dfs['Xbar4'].loc['rel_geo_mean'] = [1.0] print('Xbar4') print(dfs['Xbar4']) for config in configs_ordered: if config != 'Xbar4':
df = None count = 0 for point in points: shift = 0.0 sub_ax = axs[count // n_cols][count % n_cols] plt.axes(sub_ax) for j, config in enumerate(configs_ordered): print(config) stat_dir = stat_dirs[config] stat_dir = osp.expanduser(stat_dir) stat_file = osp.join(stat_dir, point, 'stats.txt') print(stat_dir) matrix = {} d = c.get_stats(stat_file, t.flow_target, re_targets=True) print(d) d.pop('WKFlowUsage::total') d.pop('WKFlowUsage::0') # cycle to packets for k in d: n = int(k.split('::')[1]) * d[k] d[k] = n matrix[point] = d df = pd.DataFrame.from_dict(matrix, orient='index') if num_x == 0: num_x = len(df.columns) # print(len(df))