def main(): conf = Conf() usf_file = open_sample_file(conf.ifile_name, conf.line_size) burst_hists = utils.usf_read_events(usf_file, line_size=conf.line_size, filter=conf.filter) usf_file.close() cache_size_range = default_range_func() line_size = 64 cache_size_range = map(lambda x: x / line_size, cache_size_range) pref_pcs = prefetchable_pcs(burst_hists) rdist_hist = rdist_hist_original(burst_hists) mr = lrumodel.miss_ratio_range([rdist_hist], cache_size_range, filtered_rdist_hist_list = [rdist_hist]) rdist_hist_w_pf = rdist_hist_after_prefetching(burst_hists, pref_pcs) mr_w_pf = lrumodel.miss_ratio_range([rdist_hist], cache_size_range, filtered_rdist_hist_list = [rdist_hist_w_pf]) print mr print mr_w_pf
def main(): conf = Conf() usf_file = open_sample_file(conf.ifile_name, conf.line_size) burst_hists = utils.usf_read_events(usf_file, line_size=conf.line_size, filter=conf.filter) usf_file.close() hist = generate_sdist_hist(burst_hists) data = hist.items() data.sort(key=lambda x: x[0]) l1_sd_samples = 0 l2_sd_samples = 0 l3_sd_samples = 0 mem_sd_samples = 0 non_l1_sd_samples = 0 total_sd_samples = 0 for (sd, count) in data: # print "%i %i" % (sd * conf.line_size, count) total_sd_samples += count if sd < 1024: # 1024 number of cache lines in 64kB L1 cache l1_sd_samples += count continue elif sd < 8192: l2_sd_samples += count elif sd < 98304: l3_sd_samples += count else: mem_sd_samples += count non_l1_sd_samples += count l1_bound_samples = (float(l1_sd_samples) / float(total_sd_samples) * 100) l2_bound_samples = (float(l2_sd_samples) / float(non_l1_sd_samples) * 100) l3_bound_samples = (float(l3_sd_samples) / float(non_l1_sd_samples) * 100) mem_bound_samples = (float(mem_sd_samples) / float(non_l1_sd_samples) * 100) non_l1_bound_samples = (float(non_l1_sd_samples) / float(total_sd_samples) * 100) print "L1 %.2f %% of total" % (l1_bound_samples) print "L2 %.2f %% of %.2f %%" % (l2_bound_samples, non_l1_bound_samples) print "L3 %.2f %% of %.2f %%" % (l3_bound_samples, non_l1_bound_samples) print "Memory %.2f %% of %.2f %%" % (mem_bound_samples, non_l1_bound_samples)
def main(): conf = Conf() usf_file = open_sample_file(conf.ifile_name, conf.line_size) burst_hists = utils.usf_read_events(usf_file, line_size=conf.line_size, filter=conf.filter) usf_file.close() hist = generate_sdist_hist(burst_hists) data = hist.items() data.sort(key=lambda x: x[0]) x_values = map(lambda (x, y): x * conf.line_size, data) y_values = map(lambda (x, y): y, data) if not x_values: print >> sys.stderr, "WARNING: Filter result is empty. Nothing to plot." sys.exit(0) if x_values[0] == 0: print >> sys.stderr, "WARNING: Not showing stack distance 0 in hist." print >> sys.stderr, "sdist: 0, count: %d" % (y_values[0]) x_values = x_values[1:] y_values = y_values[1:] if not x_values: print >> sys.stderr, "WARNING: Nothing to plot. Exiting." sys.exit(0) # plot the histogram pyplot.hold(True) pyplot.title("Stack distance histogram") pyplot.ylabel("Samples") pyplot.xlabel("Stack distance (bytes)") pyplot.yscale('log', basey=10) pyplot.xscale('log', basex=2) pyplot.bar(x_values, y_values) for marker in conf.markers: print "Marker: %i" % marker pyplot.axvline(marker) pyplot.axis([1, float(max(x_values)), 1, float(max(y_values))]) pyplot.hold(False) pyplot.show()
def main(): conf = Conf() usf_file = open_sample_file(conf.ifile_name, conf.line_size) burst_hists = utils.usf_read_events(usf_file, line_size=conf.line_size, filter=conf.filter) usf_file.close() hist = generate_sdist_hist(burst_hists) data = hist.items() data.sort(key=lambda x: x[0]) x_values = map(lambda (x, y): x * conf.line_size, data) y_values = map(lambda (x, y): y, data) if not x_values: print >> sys.stderr, "WARNING: Filter result is empty. Nothing to plot." sys.exit(0) if x_values[0] == 0: print >> sys.stderr, "WARNING: Not showing stack distance 0 in hist." print >> sys.stderr, "sdist: 0, count: %d" % (y_values[0]) x_values = x_values[1:] y_values = y_values[1:] if not x_values: print >> sys.stderr, "WARNING: Nothing to plot. Exiting." sys.exit(0) # plot the histogram pyplot.hold(True) pyplot.title("Stack distance histogram") pyplot.ylabel("Samples") pyplot.xlabel("Stack distance (bytes)") pyplot.yscale('log', basey = 10) pyplot.xscale('log', basex = 2) pyplot.bar(x_values, y_values) for marker in conf.markers: print "Marker: %i" % marker pyplot.axvline(marker); pyplot.axis([ 1, float(max(x_values)), 1, float(max(y_values)) ]) pyplot.hold(False) pyplot.show()
def main(): conf = Conf() usf_file = open_sample_file(conf.ifile_name, conf.line_size) burst_hists = utils.usf_read_events(usf_file, line_size=conf.line_size, filter=conf.filter) usf_file.close() hist = generate_sdist_hist(burst_hists) data = hist.items() data.sort(key=lambda x: x[0]) l1_sd_samples = 0 l2_sd_samples = 0 l3_sd_samples = 0 mem_sd_samples = 0 non_l1_sd_samples = 0 total_sd_samples = 0 for (sd, count) in data: # print "%i %i" % (sd * conf.line_size, count) total_sd_samples += count if sd < 1024: # 1024 number of cache lines in 64kB L1 cache l1_sd_samples += count continue elif sd < 8192: l2_sd_samples += count elif sd < 98304: l3_sd_samples += count else: mem_sd_samples += count non_l1_sd_samples += count l1_bound_samples = (float(l1_sd_samples)/float(total_sd_samples)*100) l2_bound_samples = (float(l2_sd_samples)/float(non_l1_sd_samples)*100) l3_bound_samples = (float(l3_sd_samples)/float(non_l1_sd_samples)*100) mem_bound_samples = (float(mem_sd_samples)/float(non_l1_sd_samples)*100) non_l1_bound_samples = (float(non_l1_sd_samples)/float(total_sd_samples) * 100) print "L1 %.2f %% of total"%(l1_bound_samples) print "L2 %.2f %% of %.2f %%"%(l2_bound_samples, non_l1_bound_samples) print "L3 %.2f %% of %.2f %%"%(l3_bound_samples, non_l1_bound_samples) print "Memory %.2f %% of %.2f %%"%(mem_bound_samples, non_l1_bound_samples)
def main(): conf = Conf() usf_file = open_sample_file(conf.ifile_name, conf.line_size) burst_hists = utils.usf_read_events(usf_file, line_size=conf.line_size, filter=conf.filter) usf_file.close() hist = generate_rdist_hist(burst_hists) data = hist.items() data.sort(key=lambda x: x[0]) #Print reuse distance histogram distribution f = open('.temp.csv', 'w') for (rdist, count) in data: value = rdist, count s = str(value) f.write(s) f.write('\n') x_values = map(lambda (x, y): x, data) y_values = map(lambda (x, y): y, data) if not x_values: print >> sys.stderr, "WARNING: Filter result is empty. Nothing to plot." sys.exit(0) if not x_values: print >> sys.stderr, "WARNING: Nothing to plot. Exiting." sys.exit(0) # plot the histogram pyplot.hold(True) pyplot.title("Reuse distance distance histogram") pyplot.ylabel("Samples") pyplot.xlabel("Reuse distance (accesses)") pyplot.yscale('log', basey = 10) pyplot.xscale('log', basex = 2) pyplot.bar(x_values, y_values) pyplot.axis([ 1, float(max(x_values)), 1, float(max(y_values)) ]) pyplot.hold(False) pyplot.show()
def main(): conf = Conf() usf_file = open_sample_file(conf.ifile_name, conf.line_size) burst_hists = utils.usf_read_events(usf_file, line_size=conf.line_size, filter=conf.filter) usf_file.close() hist = generate_rdist_hist(burst_hists) data = hist.items() data.sort(key=lambda x: x[0]) #Print reuse distance histogram distribution f = open('.temp.csv', 'w') for (rdist, count) in data: value = rdist, count s = str(value) f.write(s) f.write('\n') x_values = map(lambda (x, y): x, data) y_values = map(lambda (x, y): y, data) if not x_values: print >> sys.stderr, "WARNING: Filter result is empty. Nothing to plot." sys.exit(0) if not x_values: print >> sys.stderr, "WARNING: Nothing to plot. Exiting." sys.exit(0) # plot the histogram pyplot.hold(True) pyplot.title("Reuse distance distance histogram") pyplot.ylabel("Samples") pyplot.xlabel("Reuse distance (accesses)") pyplot.yscale('log', basey=10) pyplot.xscale('log', basex=2) pyplot.bar(x_values, y_values) pyplot.axis([1, float(max(x_values)), 1, float(max(y_values))]) pyplot.hold(False) pyplot.show()
def main(): conf = Conf() listing = os.listdir(conf.path) cache_size_range = default_range_func() win_count = 0 mr = {} mr_w_pf = {} pref_pcs_win = {} full_pc_stride_hist = {} global_pc_fwd_sdist_hist = {} global_pc_sdist_hist = {} global_pc_corr_hist = {} global_pc_recur_hist = {} global_prefetchable_pcs = [] global_pc_smptrace_hist = {} if not conf.num_samples is None: num_sample_files = len(listing) files_required = math.ceil(float(conf.num_samples) / float(1200)) + 1 # 1200 is the number of samples per file files_sapcing = int(math.ceil(float(num_sample_files) / float(files_required))) print >> sys.stderr, "files spacing: %d"%(files_sapcing) if files_sapcing == 0: files_sapcing = 1 file_no = 0 listing = [] while file_no < num_sample_files: file_name = "sample."+str(file_no) listing.append(file_name) file_no += files_sapcing for infile in listing: infile = conf.path + infile usf_file = open_sample_file(infile, conf.line_size) if usf_file == None: continue try: burst_hists = utils.usf_read_events(usf_file, line_size=conf.line_size, filter=conf.filter) except IOError, e: continue usf_file.close() pref_pcs_sdist_recur_list = prefetchable_pcs(burst_hists, conf) pref_pcs = pref_pcs_sdist_recur_list[0] pc_sdist_hist = pref_pcs_sdist_recur_list[1] pc_recur_hist = pref_pcs_sdist_recur_list[2] pc_fwd_sdist_hist = pref_pcs_sdist_recur_list[3] build_global_prefetchable_pcs(global_prefetchable_pcs, pref_pcs) pc_corr_hist = burst_hists[0][4] build_global_pc_corr_hist(global_pc_corr_hist, pc_corr_hist) build_global_pc_fwd_sdist_recur_hist(global_pc_fwd_sdist_hist, global_pc_recur_hist, pc_fwd_sdist_hist, pc_recur_hist, global_pc_sdist_hist, pc_sdist_hist) build_full_pc_stride_hist(burst_hists, full_pc_stride_hist) pc_smptrace_hist = burst_hists[0][6] ins_trace_analysis.add_trace_to_global_pc_smptrace_hist(global_pc_smptrace_hist, pc_smptrace_hist)
def main(): ins_src_regs_dict = {} ins_dst_regs_dict = {} ins_tags_dict = {} branch_dict = {} routine_BB_dict = {} # information maps for Memory operations ins_base_reg_dict = {} ins_mem_dis_dict = {} ins_idx_reg_dict = {} ins_mem_scale_dict = {} global_prefetchable_pcs = [] global_pc_smptrace_hist = {} global_pc_stride_hist = {} conf = Conf() if not conf.hex_address == None: delinq_load_addr = int(conf.hex_address, 16) else: delinq_load_addr = int(conf.dec_address, 10) delinq_load_address_list = get_delinq_load_address_list(conf) if delinq_load_address_list == None: delinq_load_address_list = [delinq_load_addr] listing = os.listdir(conf.path) if not conf.num_samples is None: num_sample_files = len(listing) files_required = math.ceil(float(conf.num_samples) / float(1200)) + 1 # 1200 is the number of samples per file files_sapcing = int(math.ceil(float(num_sample_files) / float(files_required))) print >> sys.stderr, "files spacing: %d"%(files_sapcing) if files_sapcing == 0: files_sapcing = 1 file_no = 0 listing = [] while file_no < num_sample_files: file_name = "sample."+str(file_no) listing.append(file_name) file_no += files_sapcing for infile in listing: infile = conf.path + infile usf_file = open_sample_file(infile, conf.line_size) if usf_file == None: continue try: burst_hists = utils.usf_read_events(usf_file, line_size=conf.line_size, filter=conf.filter) except IOError, e: continue usf_file.close() for (pc_rdist_hist, pc_stride_hist, pc_freq_hist, pc_time_hist, pc_corr_hist, pc_fwd_rdist_hist, pc_smptrace_hist) in burst_hists: continue ins_trace_ptr_nobj_analysis.add_trace_to_global_pc_smptrace_hist(global_pc_smptrace_hist, pc_smptrace_hist) ins_trace_ptr_nobj_analysis.add_to_pc_stride_hist(pc_stride_hist, global_pc_stride_hist)