def stacked_bar_plot(filenames, vecs, xticklabels=None): cycles = [] for i, filename in enumerate(filenames): pltutils.set_corpus_stats(dirname(filename)) _, _, _, cls, _, _ = pltutils.read_one_output(filename, vec=vecs[i]) cycles.append(cls) pass cycles = np.array(cycles) cycles = np.transpose(cycles) width = 0.75 ind = np.arange(len(filenames)) bottom = np.zeros(len(ind)) fig, ax = plt.subplots() p = [None] * len(fns) for i in range(len(fns)): p[i] = ax.bar(ind, cycles[i], width, bottom=bottom, color=colors[fns[i]]) bottom = np.sum([bottom, cycles[i]], axis=0) plt.legend(p, fns) if xticklabels is not None: ax.set_xticklabels(xticklabels) ax.set_ylabel('cycle component') ax.set_title('#cycle counts for different runs per group') ax.set_xticks(ind) plt.show()
def perf_plot(filenames, vecs, legends=None): width = 1 / (len(filenames) + 1.) # width of bars ind = np.arange(len(fns[:-3])) fig, ax = plt.subplots() p = [None] * len(filenames) #print(fns) for i, filename in enumerate(filenames): pltutils.set_corpus_stats(dirname(filename)) _, _, flp, cls, _, perf = pltutils.read_one_output(filename, vec=vecs[i]) p[i] = ax.bar(ind + i * width, perf[:-3], width, color=colors2[i]) ax.set_xticks(ind) ax.set_xticklabels(fns) plt.setp(ax.get_xticklabels(), rotation=30, horizontalalignment='center') ax.set_ylabel('Performance [Flops/Cycle]', rotation="0", size=28) ax.yaxis.set_label_coords(0.22, 1.03) fig.tight_layout() # ax.set_title('performance for different runs per group') ax.tick_params(axis='x', labelsize=20) ax.tick_params(axis='y', labelsize=24) ax.set_facecolor((211.0 / 255, 211.0 / 255, 211.0 / 255)) if legends is not None: plt.legend(p, legends, prop={'size': 22}) ax.grid(linestyle='--', linewidth=2, axis='y') plt.show()
def conv_plot(filenames, vecs, legends=None): width = 1 / (len(filenames) + 1.) # width of bars its = list(pltutils.iters.keys()) ind = np.arange(len(its)) fig, ax = plt.subplots() p = [None] * len(filenames) for i, filename in enumerate(filenames): pltutils.set_corpus_stats(dirname(filename)) pltutils.read_one_output(filename, vec=vecs[i]) convs = [pltutils.iters[it] for it in its] p[i] = ax.bar(ind + i * width, convs, width, color=colors2[i]) ax.set_ylabel('#convergence iters') ax.set_title('#convergence iters for different runs per group') ax.set_xticks(ind) ax.set_xticklabels(its) if legends is not None: plt.legend(p, legends) ax.grid(linestyle='--', linewidth=2, axis='y') plt.show()
def read_one_timer_all_Ns(path, timer_name, label="", xaxis='N'): data = {'x' : [], 'y' : []} pltutils.set_corpus_stats(path) for filename in listdir(path): if("timings" in filename): fname = join(path, filename) K, N, _, _, _, perf = pltutils.read_one_output(fname) data['x'].append(K if xaxis == 'K' else N) data['y'].append(perf[fns.index(timer_name)]) # amend the label afterwards so we can obtain the automated cycle count for the timer return {label + " " + timer_name : data}
def pie_chart(filenames, vecs, xticklabels=None): if len(filenames) > 1: print("Pie chart can only be done for one file") sys.exit() cycles = [] pltutils.set_corpus_stats(dirname(filenames[0])) _, _, _, cls, _, _ = pltutils.read_one_output(filenames[0], vec=vecs[0]) cycles = cls # cycles = np.array(cycles) # cycles = np.transpose(cycles) # print(cycles) select_cycles = np.array(cycles) # Manually pick the functions we want select_cycles = select_cycles[np.array([1, 2, 3, 5, 6])] # print(select_cycles) # width = 0.75 ind = np.arange(len(filenames)) # bottom = np.zeros(len(ind)) print(plt.style.available) plt.style.use('seaborn-colorblind') fig, ax = plt.subplots() # p = [None] * len(fns) select_fns = np.array(fns) select_fns = select_fns[np.array([1, 2, 3, 5, 6])] # print(select_fns) # p[i] = ax.bar(ind, cycles[i], width, bottom=bottom, color=colors[fns[i]]) ax.pie(select_cycles, explode=None, labels=select_fns, autopct='%1.1f%%', pctdistance=0.7, startangle=45, labeldistance=1.1, textprops={'fontsize': 13}) ax.axis( 'equal') # Equal aspect ratio ensures that pie is drawn as a circle. # bottom = np.sum([bottom, cycles[i]], axis=0) # plt.legend(p, fns) # if xticklabels is not None: # ax.set_xticklabels(xticklabels) # ax.set_ylabel('cycle component') # ax.set_title('Distribution of the total runtime over functions', fontdict={'fontsize': 18}) # ax.set_xticks(ind) plt.show()
def avgc_plot(filenames, vecs, legends=None): width = 1 / (len(filenames) + 1.) # width of bars ind = np.arange(len(fns)) fig, ax = plt.subplots() p = [None] * len(filenames) for i, filename in enumerate(filenames): pltutils.set_corpus_stats(dirname(filename)) _, _, _, _, avg_cycles, _ = pltutils.read_one_output(filename, vec=vecs[i]) p[i] = ax.bar(ind + i * width, avg_cycles, width, color=colors2[i]) ax.set_ylabel('avg cycle count') ax.set_title('Avg Cycles counts for different runs per group') ax.set_xticks(ind) ax.set_xticklabels(fns) ax.set_yscale("log") if legends is not None: plt.legend(p, legends) ax.grid(linestyle='--', linewidth=2, axis='y') plt.show()
def benchmark(dirpath, vec=False, xaxis='N'): pltutils.set_corpus_stats(dirpath) data_1 = {} # "RUN_EM" : { x : [10, 15, 20], y : [3.2, 4.5, 6.7] } data_2 = {} # "RUN_EM" : { x : [10, 15, 20], y : [3.2, 4.5, 6.7] } _, axes = plt.subplots() for filename in listdir(dirpath): if filename.startswith("fast") or filename.startswith("slow"): K, N, _, _, _, perf = pltutils.read_one_output(join(dirpath, filename), vec=vec) dic = data_1 if filename[0] == 'f' else data_2 for i, fn in enumerate(pltutils.fns): if not fn in dic: dic[fn] = {'x' : [], 'y' : []} dic[fn]['x'].append(K if xaxis == 'K' else N) dic[fn]['y'].append(perf[i]) set_up_perf_plot(axes, xaxis) plot_line(plt, data_1) plot_line(plt, data_2) plt.show()
def bar_plot(filenames, vecs, legends=None): width = 1 / (len(filenames) + 1.) # width of bars ind = np.arange(len(fns[:7])) fig, ax = plt.subplots() p = [None] * len(filenames) for i, filename in enumerate(filenames): pltutils.set_corpus_stats(dirname(filename)) _, _, _, cycles, _, _ = pltutils.read_one_output(filename, vec=vecs[i]) p[i] = ax.bar(ind + i * width, cycles[:7], width, color=colors2[i]) ax.set_ylabel('Runtime [Cycles]', rotation="0", size=28) ax.yaxis.set_label_coords(0, 1.03) #ax.set_title('performance for different runs per group') ax.tick_params(labelsize=24) ax.set_facecolor((211.0 / 255, 211.0 / 255, 211.0 / 255)) ax.set_xticklabels(fns[:7]) ax.set_xticks(ind) ax.set_yscale("log") if legends is not None: plt.legend(p, legends, prop={'size': 24}) ax.grid(linestyle='--', linewidth=2, axis='y') plt.show()
def run_comp(filenames, vecs, legends=None): ind = np.arange(len(filenames)) fig, ax = plt.subplots() perfs = [] for i, filename in enumerate(filenames): pltutils.set_corpus_stats(dirname(filename)) _, _, flp, cls, _, perf = pltutils.read_one_output(filename, vec=vecs[i]) perfs.append(perf[0]) p = ax.plot(perfs, linestyle='solid', marker='o', linewidth=2, color='deeppink') ax.set_ylabel('Performance [Flops/Cycle]', rotation="0", size=28) ax.yaxis.set_label_coords(0.1, 1.03) #ax.set_title('performance for different runs per group') ax.tick_params(labelsize=24) ax.set_facecolor((211.0 / 255, 211.0 / 255, 211.0 / 255)) ax.set_xticklabels(legends) ax.set_xticks(ind) ax.grid(linestyle='--', linewidth=2, axis='y') plt.show()
def parse_perf_files(dir_path): pltutils.set_corpus_stats(dir_path) operational_intensity = [] memory_reads = [] memory_writes = [] flop_count = [] num_docs = [] with open(dir_path + '/info.txt') as f: for ln in f: if ln.startswith('Comment: '): #Remove beginning and last quote/newline comment = ln[len('Comment: "') : -2] break #Get the memory transfers from perf files regex = re.compile(r'\d+') for filename in os.listdir(dir_path): if "perf" in filename: K, N = map(int, re.findall(regex, filename)) num_docs.append((K, N)) for line in open(dir_path + "/" + filename): if "LLC-load-misses" in line: tokens = line.split() number_parts = tokens[0].split(",") memory_reads.append(float(''.join(number_parts))) if "LLC-store-misses" in line: tokens = line.split() number_parts = tokens[0].split(",") memory_writes.append(float(''.join(number_parts))) memory_transfers = [x + y for x, y in zip(memory_reads, memory_writes)] #Consider the number of bytes transfered as the number of cache misses * the cache line size cache_line_size = 64.0 bytes_transfers = [x * cache_line_size for x in memory_transfers] flop_count = [0] * len(bytes_transfers) tuples_sorted = sorted(zip(num_docs, bytes_transfers)) num_docs = [x[0] for x in tuples_sorted] bytes_transfers = [x[1] for x in tuples_sorted] #Get the flop count and the performance from the timings data = {'x' : [], 'y' : []} for filename in os.listdir(dir_path): if("timings" in filename): fullname = join(dir_path, filename) # Extract K and N from the filename K, N = map(int, re.findall(regex, filename)) if (K, N) in num_docs: k1, n1, flops, _, _, perf = pltutils.read_one_output(fullname) assert k1 == K and n1 == N, "Wrong file" flop_count[ num_docs.index((K, N)) ] = flops[ fns.index("RUN_EM") ] data['x'].append((K,N)) data['y'].append(perf[ fns.index("RUN_EM") ]) #print(flop_count) pass pass pass operational_intensity = [x / y for x, y in zip(flop_count, bytes_transfers)] plt_op = [] plt_perf = [] for i in range(len(num_docs)): kn = num_docs[i] if kn in data['x']: idx = data['x'].index(kn) assert flop_count[i] is not 0 plt_op.append(operational_intensity[i]) plt_perf.append(data['y'][idx]) return Run(plt_op, plt_perf, num_docs, comment)