def main(): drop = ['0.01', '0.05', '0.1', '0.15', '0.3'] fs = [1, 2, 4, 8, 16, 32] Yavg = {} Y99 = {} Y999 = {} for s in fs: Yavg[s] = [] Y99[s] = [] Y999[s] = [] for s in fs: for d in drop: f1 = open('a_' + str(s) + '_' + d + '.txt').readlines() f = [x for x in f1 if 'finished' not in x] norm = sorted([float(x.split()[3]) for x in f]) print d, s, numpy.mean(norm), norm[99*len(norm)/100],\ norm[999*len(norm)/1000] Yavg[s].append(numpy.mean(norm)) Y99[s].append(norm[99 * len(norm) / 100]) Y999[s].append(norm[999 * len(norm) / 1000]) Y1 = [] Y2 = [] Y3 = [] X = [] for s in fs: Y1.append(Yavg[s]) Y2.append(Y99[s]) Y3.append(Y999[s]) X.append([float(x) for x in drop]) lb = [str(s) for s in fs] #print Y1 #print Y2 #print Y3 plotter.PlotN(X, Y1, \ YTitle='Norm FCT (Avg)', XTitle='Drop Probability', labels=lb, legendLoc='upper left', legendOff=False,\ figSize=[7.8, 2.6], onlyLine=False,\ lWidth=2, mSize=8, legendSize=14,\ xAxis=[0, 0.3001], yAxis=[1, 4], outputFile="FCT_avg_fb_1Gbps") plotter.PlotN(X, Y2, \ YTitle='Norm FCT (99%)', XTitle='Drop Probability', labels=lb, legendLoc='upper left', legendOff=False,\ figSize=[7.8, 2.6], onlyLine=False,\ lWidth=2, mSize=8, legendSize=14,\ xAxis=[0, 0.3001], yAxis=[1, 4], outputFile="FCT_99_fb_1Gbps") plotter.PlotN(X, Y3, \ YTitle='Norm FCT (99.9%)', XTitle='Drop Probability', labels=lb, legendLoc='upper left', legendOff=False,\ figSize=[7.8, 2.6], onlyLine=False,\ lWidth=2, mSize=8, legendSize=14,\ xAxis=[0, 0.3001], yAxis=[1, 6], outputFile="FCT_99_9_fb_1Gbps")
def main(): loads = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8] norm_fct_ideal_all = [] norm_fct_pfabric_all = [] norm_fct_inversion_all = [] norm_fct_ideal_large = [] norm_fct_pfabric_large = [] norm_fct_inversion_large = [] for l in loads: print "plotting on", l, "load" f = open("./Dataset/flow_"+str(l)+"Load_inversion.tr").readlines() p = open("./Dataset/flow_"+str(l)+"Load_large_pfabric.tr").readlines() #Flow info is array: [size fct oracle_fct norm_fct] flow_info = parse.get_flow_info_new(f, 2.5) #Dictionary with indices as keys p_flow_info = parse.get_flow_info_new(p, 2.5) f = open("./Dataset/ideal_"+str(l)+"Result_large.txt").readlines() ideal_fct = [float(x) for x in f] f = open("./Dataset/ideal_"+str(l)+"Result_large.txt").readlines() norm_fct_ideal = [x / y[2] for x, y in zip(ideal_fct, flow_info)] norm_fct_ideal_all.append(numpy.mean(norm_fct_ideal)) norm_fct_inversion_all.append(numpy.mean([x[3] for x in flow_info])) norm_fct_pfabric_all.append(numpy.mean([x[3] for x in p_flow_info])) large_indices = [i for i in range(len(flow_info)) if flow_info[i][0] >= 10000000] large_ideal_fct = [ideal_fct[i] for i in large_indices] large_flow_info = [flow_info[i] for i in large_indices] p_large_indices = [i for i in range(len(p_flow_info)) if p_flow_info[i][0] >= 10000000] p_large_ideal_fct = [ideal_fct[i] for i in large_indices] p_large_flow_info = [p_flow_info[i] for i in large_indices] norm_large_fct_ideal = [x / y[2] for x, y in zip(large_ideal_fct, large_flow_info)] norm_fct_ideal_large.append(numpy.mean(norm_large_fct_ideal)) norm_fct_inversion_large.append(numpy.mean([x[3] for x in large_flow_info])) norm_fct_pfabric_large.append(numpy.mean([x[3] for x in p_large_flow_info])) x = loads plotter.PlotN([x, x, x], [norm_fct_ideal_all, norm_fct_pfabric_all, norm_fct_inversion_all], YTitle='Normalized FCT', XTitle='Load', labels=['Ideal', 'pFabric', 'Prio Inversion'], xAxis=[0.099, max(x)], yAxis=[0, max(max(norm_fct_ideal_all), max(norm_fct_pfabric_all), max(norm_fct_inversion_all))], outputFile="Figure7AllFlowsLarge") plotter.PlotN([x, x, x], [norm_fct_ideal_large, norm_fct_pfabric_large, norm_fct_inversion_large], YTitle='Normalized FCT', XTitle='Load', labels=['Ideal', 'pFabric', 'Prio Inversion'], xAxis=[0.099, max(x)], yAxis=[0, max(max(norm_fct_ideal_large), max(norm_fct_pfabric_large), max(norm_fct_inversion_large))], outputFile="Figure8LargeFlowsLarge")
def plot_norm_binned(f_pf, f_id, f_pf_no_cwnd_reset, output): max_size = max([x[0] for x in f_pf]) # split in 10KB chunks bin_size = 100000 num_bins = 1 + int(max_size / bin_size) pf_binned = [[] for i in range(num_bins)] ideal_binned = [[] for i in range(num_bins)] pf_no_cwnd_reset_binned = [[] for i in range(num_bins)] for i in range(len(f_pf)): pf = f_pf[i] id = f_id[i] pf_no_cwnd_reset = f_pf_no_cwnd_reset[i] pf_binned[int(pf[0] / bin_size)].append(pf[3]) ideal_binned[int(pf[0] / bin_size)].append(id / pf[2]) pf_no_cwnd_reset_binned[int(pf_no_cwnd_reset[0] / bin_size)].append( pf_no_cwnd_reset[3]) avg_pf_binned = [numpy.mean(x) for x in pf_binned] avg_id_binned = [numpy.mean(x) for x in ideal_binned] avg_pf_no_cwnd_reset_binned = [ numpy.mean(x) for x in pf_no_cwnd_reset_binned ] x = range(num_bins) plotter.PlotN([x, x, x], [avg_pf_binned, avg_id_binned, avg_pf_no_cwnd_reset_binned], \ YTitle='Mean Slowdown', XTitle='Bin (10KB)', \ labels=['pFabric', 'Ideal', 'No Cwnd Reset'], legendLoc='upper left', legendOff=False,\ figSize=[7.8, 2.6], onlyLine=True,\ lWidth=2, mSize=8, legendSize=18,\ xAxis=[0, num_bins], yAxis=[0, max(avg_pf_no_cwnd_reset_binned)], outputFile=output)
def plot(inputFile, title, outputFile): dps = getDataPoints(inputFile) plotter.PlotN([queueSizes,queueSizes,queueSizes, queueSizes], dps, YTitle=title, XTitle='Queue Size', labels=['RTO 2x', 'RTO 5x', 'RTO 10x', 'RTO 20x'], xAxis=[min(queueSizes), max(queueSizes)], yAxis=[0, max(max(i) for i in dps)], legendLoc = 'upper right', outputFile=outputFile)
def plot_cdf_large_flows(flow_info, large_flow_info, output): Y1 = [1.0 * i / len(flow_info) for i in range(len(flow_info))] X1 = sorted([x[3] for x in flow_info]) Y2 = [1.0 * i / len(large_flow_info) for i in range(len(large_flow_info))] X2 = sorted([x[3] for x in large_flow_info]) plotter.PlotN([X1, X2], [Y1, Y2], \ YTitle='CDF', XTitle='Normalized FCT', \ labels=['All', '>10MB'], legendLoc='lower right', legendOff=False,\ figSize=[7.8, 2.6], onlyLine=True,\ lWidth=2, mSize=8, legendSize=18,\ xAxis=[0, max(max(X1), max(X2))], yAxis=[0, 1], outputFile=output)
def main(): loads = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8] norm_fct_ideal_all = [] norm_fct_ideal_sf_all = [] norm_fct_pfabric_all = [] norm_fct_ideal_large = [] norm_fct_ideal_sf_large = [] norm_fct_pfabric_large = [] for l in loads: print "plotting on", l, "load" f = open("./Dataset/flow_"+str(l)+"Load_large.tr").readlines() #Flow info is array: [size fct oracle_fct norm_fct] flow_info = parse.get_flow_info_new(f, 2.5) #Dictionary with indices as keys f = open("./Dataset/ideal_"+str(l)+"Result_large.txt").readlines() ideal_fct = [float(x) for x in f] f = open("./Dataset/ideal_"+str(l)+"Result_large.txt").readlines() ideal_sf_fct = [float(x) for x in f] norm_fct_ideal_all.append(numpy.mean(ideal_fct)/numpy.mean([x[2] for x in flow_info])) norm_fct_ideal_sf_all.append(numpy.mean(ideal_sf_fct)/numpy.mean([x[2] for x in flow_info])) norm_fct_pfabric_all.append(numpy.mean([x[1] for x in flow_info])/numpy.mean([x[2] for x in flow_info])) large_indices = [i for i in range(len(flow_info)) if flow_info[i][0] >= 10000000] large_ideal_fct = [ideal_fct[i] for i in large_indices] large_ideal_sf_fct = [ideal_sf_fct[i] for i in large_indices] large_flow_info = [flow_info[i] for i in large_indices] norm_fct_ideal_large.append(numpy.mean(large_ideal_fct)/numpy.mean([x[2] for x in large_flow_info])) norm_fct_ideal_sf_large.append(numpy.mean(large_ideal_sf_fct)/numpy.mean([x[2] for x in large_flow_info])) norm_fct_pfabric_large.append(numpy.mean([x[1] for x in large_flow_info])/numpy.mean([x[2] for x in large_flow_info])) x = loads plotter.PlotN([x, x, x], [norm_fct_ideal_all, norm_fct_ideal_sf_all, norm_fct_pfabric_all], YTitle='Normalized Ave FCT', XTitle='Load', labels=['Ideal High', 'Ideal Low', 'pFabric'], xAxis=[0.099, max(x)], yAxis=[0, max(max(norm_fct_ideal_all), max(norm_fct_ideal_sf_all), max(norm_fct_pfabric_all))], outputFile="Figure7AllFlowsIonLargeTrace") plotter.PlotN([x, x, x], [norm_fct_ideal_large, norm_fct_ideal_sf_large, norm_fct_pfabric_large], YTitle='Normalized Ave FCT', XTitle='Load', labels=['Ideal High', 'Ideal Low', 'pFabric'], xAxis=[0.099, max(x)], yAxis=[0, max(max(norm_fct_ideal_large), max(norm_fct_ideal_sf_large), max(norm_fct_pfabric_large))], outputFile="Figure8LargeFlowsIonLargeTrace")
def plot_cdf(f1, f2, f3, output): Y = [1.0 * i / len(f1) for i in range(len(f1))] X1 = sorted(f1) X2 = sorted(f2) X3 = sorted(f3) print numpy.mean(X1), numpy.mean(X2), numpy.mean(X3) plotter.PlotN([X1, X2, X3], \ [Y, Y, Y], \ YTitle='CDF', XTitle='Slowdown in FCT', \ labels=['pFabric', 'Ideal', 'No Cwnd Reset'], \ legendLoc='lower right', legendOff=False,\ figSize=[7.8, 2.6], onlyLine=True,\ lWidth=2, mSize=8, legendSize=18,\ xAxis=[0, max(max(X3), max(X2))], yAxis=[0, 1], outputFile=output)
def plot_cdf(fct, fct_large, output): Y1 = [1.0 * i / len(fct) for i in range(len(fct))] Y2 = [1.0 * i / len(fct_large) for i in range(len(fct_large))] X1 = sorted(fct) X2 = sorted(fct_large) #X3 = sorted(ideal_fct) #X4 = sorted(ideal_fct_large) plotter.PlotN([X1, X2], \ [Y1, Y2], \ YTitle='CDF', XTitle='Normalized FCT', \ labels=['All', '>10MB'], \ legendLoc='lower right', legendOff=False,\ figSize=[7.8, 2.6], onlyLine=True,\ lWidth=2, mSize=8, legendSize=18,\ xAxis=[0, max(max(X1), max(X2))], yAxis=[0, 1], outputFile=output)
def plot_norm_timed(flow_info, ideal_fct, output): # split in 100 chunks num_bins = 100 bin_size = len(flow_info) / num_bins norm_fct_binned = [[] for i in range(num_bins)] for i in range(len(flow_info)): f = flow_info[i] bin = int(i / bin_size) norm_fct_binned[bin].append(f[3]) avg_norm_fct_binned = [numpy.mean(x) for x in norm_fct_binned] x = range(num_bins) plotter.PlotN([x], [avg_norm_fct_binned], \ YTitle='Avg. Norm FCT', XTitle='Bin (' + str(bin_size) + ' flows)', \ labels=['All', '>10MB'], legendLoc='lower right', legendOff=True,\ figSize=[7.8, 2.6], onlyLine=True,\ lWidth=2, mSize=8, legendSize=18,\ xAxis=[0, num_bins], yAxis=[0, max(avg_norm_fct_binned)], outputFile=output)
def plot_size_timed(flow_info, ideal_fct, output): # split in 100 chunks num_bins = 100 bin_size = len(flow_info) / num_bins size_timed = [[] for i in range(num_bins)] for i in range(len(flow_info)): f = flow_info[i] bin = int(i / bin_size) size_timed[bin].append(f[0] / 1000000.0) avg_size_timed = [numpy.mean(x) for x in size_timed] #print avg_size_timed max_size_timed = [max(x) for x in size_timed] #print avg_size_timed x = range(num_bins) plotter.PlotN([x, x], [avg_size_timed, max_size_timed], \ YTitle='Size (MB)', XTitle='Bin (' + str(bin_size) + ' flows)', \ labels=['Avg.', 'Max'], legendLoc='center right', legendOff=False,\ figSize=[7.8, 2.6], onlyLine=True,\ lWidth=2, mSize=8, legendSize=18,\ xAxis=[0, num_bins], yAxis=[0, 35], outputFile=output)
def plot_norm_binned(flow_info, ideal_fct, output): max_size = max([x[0] for x in flow_info]) # split in 100KB chunks bin_size = 100000 num_bins = 1 + int(max_size / bin_size) norm_fct_binned = [[] for i in range(num_bins)] norm_fct_ideal_binned = [[] for i in range(num_bins)] for i in range(len(flow_info)): f = flow_info[i] bin = int(f[0] / bin_size) norm_fct_binned[bin].append(f[3]) norm_fct_ideal_binned[bin].append(ideal_fct[i] / f[2]) avg_norm_fct_binned = [numpy.mean(x) for x in norm_fct_binned] avg_norm_fct_ideal_binned = [numpy.mean(x) for x in norm_fct_ideal_binned] x = range(num_bins) plotter.PlotN([x, x], [avg_norm_fct_binned, avg_norm_fct_ideal_binned], \ YTitle='Avg. Norm FCT', XTitle='Bin (100KB)', \ labels=['pFabric', 'Ideal'], legendLoc='upper left', legendOff=False,\ figSize=[7.8, 2.6], onlyLine=True,\ lWidth=2, mSize=8, legendSize=18,\ xAxis=[0, num_bins], yAxis=[0, max(avg_norm_fct_binned)], outputFile=output)
overheads = [] overheadDeps = [] delays = [0.001, 0.1, 0.2, 0.4, 0.8, 1.6, 3.2] for delay in delays: f1 = open("Dataset/Re_"+str(delay)+"/qston.tr").readlines()[1:-1] flow = open("Dataset/Re_"+str(delay)+"/flow.tr").readlines() bArrivals = [float(x.split()[0]) for x in f1] bDepartures = [float(x.split()[1]) for x in f1] bDrops = [float(x.split()[2]) for x in f1] bytes = [bArrivals[i] - bArrivalsBase[i] for i in range(len(f1))] bytesDep = [bDepartures[i] - bDeparturesBase[i] for i in range(len(f1))] drops = sum(bDrops) fSize = [1460 * float(x.split()[0]) for x in flow] sent = sum(bytes) sentDep = sum(bytesDep) shouldSend = sum(fSize) overhead = 100.0 * (1.0 * sent - shouldSend) / shouldSend; overheadDep = 100.0 * (1.0 * sentDep - shouldSend) / shouldSend; print delay, sent, sentDep, drops, shouldSend, overhead, overheadDep overheads.append(overhead) overheadDeps.append(overheadDep) plotter.PlotN([delays, delays], [overheads, overheadDeps], \ YTitle='%Overhead (bytes)', XTitle='Host Delay (us)', \ labels=['Total', 'In Network'], legendLoc='lower right', legendOff=False,\ figSize=[7.8, 2.6], onlyLine=False,\ lWidth=2, mSize=8, legendSize=18,\ xAxis=[0, 3.5], yAxis=[0, 20], outputFile="OverheadPFabric")
] return flow_size, norm_fct lines = open(sys.argv[1]).readlines() switch_delay = float(sys.argv[2]) lines = [x for x in lines if len(x.split()) > 3] flow_size, norm_fct = get_statistics(lines, switch_delay) X = [1.0 * i / len(flow_size) for i in range(len(flow_size))] Y = sorted(norm_fct) plotter.PlotN([Y], [X], \ YTitle='CDF', XTitle='Normalized FCT', \ labels=['PFabric (Exact Experiment)', 'PFabric (No Host Delay)'], legendLoc='upper right', legendOff=True,\ figSize=[7.8, 2.6], onlyLine=True,\ lWidth=2, mSize=8, legendSize=18,\ xAxis=[0, 100], yAxis=[0, 1], outputFile="NormalizedFctCdf") fct = {} for i in range(len(norm_fct)): fct[norm_fct[i]] = flow_size[i] #for x in sorted(fct.keys()): # print fct[x], x per_byte_fct = [norm_fct[i] * flow_size[i] for i in range(len(norm_fct))] total_bytes = sum(flow_size) print sum(per_byte_fct) / total_bytes * 1.0, numpy.mean(norm_fct)
def main(): loads = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8] norm_fct_ideal_all = [] norm_fct_pfabric_all = [] norm_avg_fct_ideal_all = [] norm_avg_fct_pfabric_all = [] norm_fct_ideal_large = [] norm_fct_pfabric_large = [] norm_avg_fct_ideal_large = [] norm_avg_fct_pfabric_large = [] for l in loads: print "plotting on", l, "load" f = open("./Dataset/aditya_pfabric_"+str(l)+".tr").readlines() #Flow info is array: [size fct oracle_fct norm_fct] flow_info = parse.get_flow_info_new(f, 2.5) #Dictionary with indices as keys f = open("./Dataset/aditya_ideal_"+str(l)+".txt").readlines() ideal_fct = [float(x) for x in f] norm_fct_ideal = [x / y[2] for x, y in zip(ideal_fct, flow_info)] norm_fct_ideal_all.append(numpy.mean(norm_fct_ideal)) norm_fct_pfabric_all.append(numpy.mean([x[3] for x in flow_info])) norm_avg_fct_ideal_all.append(numpy.mean([x for x in ideal_fct]) / numpy.mean([x[2] for x in flow_info])) norm_avg_fct_pfabric_all.append(numpy.mean([x[1] for x in flow_info]) / numpy.mean([x[2] for x in flow_info])) large_indices = [i for i in range(len(flow_info)) if flow_info[i][0] >= 100000] #1MB large_ideal_fct = [ideal_fct[i] for i in large_indices] large_flow_info = [flow_info[i] for i in large_indices] norm_large_fct_ideal = [x / y[2] for x, y in zip(large_ideal_fct, large_flow_info)] norm_fct_ideal_large.append(numpy.mean(norm_large_fct_ideal)) norm_fct_pfabric_large.append(numpy.mean([x[3] for x in large_flow_info])) norm_avg_fct_ideal_large.append(numpy.mean([x for x in large_ideal_fct]) / numpy.mean([x[2] for x in large_flow_info])) norm_avg_fct_pfabric_large.append(numpy.mean([x[1] for x in large_flow_info]) / numpy.mean([x[2] for x in large_flow_info])) x = loads plotter.PlotN([x, x], [norm_fct_ideal_all, norm_fct_pfabric_all], YTitle='Normalized FCT', XTitle='Load', labels=['Ideal', 'pFabric'], xAxis=[0.099, max(x)], yAxis=[0, max(max(norm_fct_ideal_all), max(norm_fct_pfabric_all))], outputFile="AllFlowsSlowdown") plotter.PlotN([x, x], [norm_fct_ideal_large, norm_fct_pfabric_large], YTitle='Normalized FCT', XTitle='Load', labels=['Ideal', 'pFabric'], xAxis=[0.099, max(x)], yAxis=[0, max(max(norm_fct_ideal_large), max(norm_fct_pfabric_large))], outputFile="LargeFlowsSlowdown") plotter.PlotN([x, x], [norm_avg_fct_ideal_all, norm_avg_fct_pfabric_all], YTitle='Normalized Avg FCT', XTitle='Load', labels=['Ideal', 'pFabric'], xAxis=[0.099, max(x)], yAxis=[0, max(max(norm_avg_fct_ideal_all), max(norm_avg_fct_pfabric_all))], outputFile="AllFlowsAvg") plotter.PlotN([x, x], [norm_avg_fct_ideal_large, norm_avg_fct_pfabric_large], YTitle='Normalized Avg FCT', XTitle='Load', labels=['Ideal', 'pFabric'], xAxis=[0.099, max(x)], yAxis=[0, max(max(norm_avg_fct_ideal_large), max(norm_avg_fct_pfabric_large))], outputFile="LargeFlowsAvg")
#print numpy.mean(norm_fct), numpy.mean(norm_fct_big) return numpy.mean(norm_fct) X1 = [3, 4.5, 7.5, 15, 30, 45] Y1 = [] for x in [2, 3, 5, 10, 20, 30]: f = open("VaryingQueueSize_0.8Load_NoHostDelay/flowQ" + str(x) + ".tr").readlines() f = [x for x in f if len(x.split()) > 3] Y1.append(print_statistics(f, 0)) print Y1 #Y1 = [17.97, 6.96, 4.133, 2.28, 1.73, 1.73] #Exact PFabric X2 = [3, 4.5, 9, 18, 36] Y2 = [] for x in [0.17, 0.25, 0.5, 1, 2]: f = open("VaryingQueueSize_0.8Load/flow" + str(x) + ".tr").readlines() f = [x for x in f if len(x.split()) > 3] Y2.append(print_statistics(f, 2.5)) print Y2 plotter.PlotN([X2, X1], [Y2, Y1], \ XTitle='Buffer Size (KB)', YTitle='Normalized FCT', \ labels=['PFabric (Exact Experiment)', 'PFabric (No Host Delay)'], legendLoc='upper right',\ figSize=[7.8, 2.6], lWidth=2, mSize=8, legendSize=18,\ yAxis=[0, 25], xAxis=[0, 45], outputFile="NoHostDelay")
td = (flow_size[i] + 1460) * 8.0 / (10000.0) #us (fs + first) if hops[i] == 4: td += 2 * 1460.0 * 8.0 / (40000.0) oracle_fct.append(pd + td) #for x in outliers: # print x norm_fct = [x / y for x, y in zip(fct, oracle_fct)] norm_fct_big = [ norm_fct[i] for i in range(len(norm_fct)) if flow_size[i] >= 10000000 ] #print numpy.mean(oracle_fct), numpy.mean(fct), numpy.mean(fct) / numpy.mean(oracle_fct), #print numpy.mean(norm_fct), numpy.mean(norm_fct_big) return numpy.mean(norm_fct) load = [0.1 * (x + 1) for x in range(9)] Y1 = [2.71, 2.52, 2.35, 2.17, 1.97, 1.79, 1.61, 1.44, 1.31] Y2 = [10.26, 9.56, 8.87, 8.21, 7.61, 7.06, 6.58, 6.17, 5.96] #3, 4.5, 9, 18, 36] Y3 = [31.36, 28.04, 24.87, 21.90, 19.11, 16.73, 14.62, 12.82, 11.43] plotter.PlotN([load, load, load], [Y1, Y2, Y3], \ XTitle='%of short flows', YTitle='Normalized FCT', \ labels=['PFabric (Exact Experiment)', 'PFabric (No Host Delay) + 3pkt', 'PFabric (No Host Delay) + 2pkt'], legendLoc='upper right',\ figSize=[7.8, 2.6], lWidth=2, mSize=8, legendSize=15,\ yAxis=[0, 35], xAxis=[0, 1], outputFile="Compare")
def main(): loads = [0.5, 0.6, 0.7, 0.8] #Slow Down metric norm_fct_ideal_all = [] norm_fct_pfabric_all = [] norm_fct_low_bdp_all = [] norm_fct_low_bdp_dumb_core_all = [] norm_fct_ideal_large = [] norm_fct_pfabric_large = [] norm_fct_low_bdp_large = [] norm_fct_low_bdp_dumb_core_large = [] #Avg FCT metric avg_fct_ideal_all = [] avg_fct_pfabric_all = [] avg_fct_low_bdp_all = [] avg_fct_low_bdp_dumb_core_all = [] avg_fct_ideal_large = [] avg_fct_pfabric_large = [] avg_fct_low_bdp_large = [] avg_fct_low_bdp_dumb_core_large = [] for l in loads: print "Load: ", l #Flow info is array: [size fct oracle_fct norm_fct] finfo_pf = parse.get_flow_info_new(open("./Dataset/pF_"+str(l)+"Load.tr").readlines(), 0) finfo_low_bdp = parse.get_flow_info_new(open("./Dataset/low_bdp_"+str(l)+"Load.tr").readlines(), 0) finfo_low_bdp_dumb_core = parse.get_flow_info_new(open("./Dataset/low_bdp_dumbcore_"+str(l)+"Load.tr").readlines(), 0) ideal_fct = [float(x) for x in open("./Dataset/ideal_pf_"+str(l)+"Load.tr").readlines()] norm_fct_ideal_all.append(numpy.mean([x / y[2] for x, y in zip(ideal_fct, finfo_pf)])) avg_fct_ideal_all.append(sum(ideal_fct) / sum([y[2] for y in finfo_pf])) norm_fct_pfabric_all.append(numpy.mean([x[3] for x in finfo_pf])) avg_fct_pfabric_all.append(sum([y[1] for y in finfo_pf]) / sum([y[2] for y in finfo_pf])) norm_fct_low_bdp_all.append(numpy.mean([x[3] for x in finfo_low_bdp])) avg_fct_low_bdp_all.append(sum([y[1] for y in finfo_low_bdp]) / sum([y[2] for y in finfo_low_bdp])) norm_fct_low_bdp_dumb_core_all.append(numpy.mean([x[3] for x in finfo_low_bdp_dumb_core])) avg_fct_low_bdp_dumb_core_all.append(sum([y[1] for y in finfo_low_bdp_dumb_core]) / sum([y[2] for y in finfo_low_bdp_dumb_core])) #Large flows for pf and ideal large_indices = [i for i in range(len(finfo_pf)) if finfo_pf[i][0] >= 10000000] large_ideal_fct = [ideal_fct[i] for i in large_indices] large_flow_info = [finfo_pf[i] for i in large_indices] norm_fct_ideal_large.append(numpy.mean([x / y[2] for x, y in zip(large_ideal_fct, large_flow_info)])) avg_fct_ideal_large.append(sum(large_ideal_fct) / sum([y[2] for y in large_flow_info])) large_flow_info = [finfo_pf[i] for i in large_indices] norm_fct_pfabric_large.append(numpy.mean([x[3] for x in large_flow_info])) avg_fct_pfabric_large.append(sum([y[1] for y in large_flow_info]) / sum([y[2] for y in large_flow_info])) #For Low BDP large_indices = [i for i in range(len(finfo_low_bdp)) if finfo_low_bdp[i][0] >= 10000000] large_flow_info = [finfo_low_bdp[i] for i in large_indices] norm_fct_low_bdp_large.append(numpy.mean([x[3] for x in large_flow_info])) avg_fct_low_bdp_large.append(sum([y[1] for y in large_flow_info]) / sum([y[2] for y in large_flow_info])) large_indices = [i for i in range(len(finfo_low_bdp_dumb_core)) if finfo_low_bdp_dumb_core[i][0] >= 10000000] large_flow_info = [finfo_low_bdp_dumb_core[i] for i in large_indices] norm_fct_low_bdp_dumb_core_large.append(numpy.mean([x[3] for x in large_flow_info])) avg_fct_low_bdp_dumb_core_large.append(sum([y[1] for y in large_flow_info]) / sum([y[2] for y in large_flow_info])) x = loads plotter.PlotN([x, x, x, x], [norm_fct_ideal_all, norm_fct_pfabric_all, norm_fct_low_bdp_all, norm_fct_low_bdp_dumb_core_all], YTitle='Normalized FCT', XTitle='Load', labels=['Ideal', 'pFabric (Orig)', 'pFabric (LowBDP)', 'pFabric (LowBDP/DumbCore)'], xAxis=[0.099, max(x)], yAxis=[0, max(norm_fct_low_bdp_dumb_core_all)], outputFile="Figure7AllFlows") plotter.PlotN([x, x, x, x], [norm_fct_ideal_large, norm_fct_pfabric_large, norm_fct_low_bdp_large, norm_fct_low_bdp_dumb_core_large], YTitle='Normalized FCT', XTitle='Load', labels=['Ideal', 'pFabric (Orig)', 'pFabric (LowBDP)', 'pFabric (LowBDP/DumbCore)'], xAxis=[0.099, max(x)], yAxis=[0, max(norm_fct_low_bdp_dumb_core_large)], outputFile="Figure8LargeFlows") plotter.PlotN([x, x, x, x], [avg_fct_ideal_all, avg_fct_pfabric_all, avg_fct_low_bdp_all, avg_fct_low_bdp_dumb_core_all], YTitle='Ratio of Avg FCT', XTitle='Load', labels=['Ideal', 'pFabric (Orig)', 'pFabric (LowBDP)', 'pFabric (LowBDP/DumbCore)'], xAxis=[0.099, max(x)], yAxis=[0, max(avg_fct_low_bdp_dumb_core_all)], outputFile="Figure7AllFlows_Avg") plotter.PlotN([x, x, x, x], [avg_fct_ideal_large, avg_fct_pfabric_large, avg_fct_low_bdp_large, avg_fct_low_bdp_dumb_core_large], YTitle='Ratio of Avg FCT', XTitle='Load', labels=['Ideal', 'pFabric (Orig)', 'pFabric (LowBDP)', 'pFabric (LowBDP/DumbCore)'], xAxis=[0.099, max(x)], yAxis=[0, max(avg_fct_low_bdp_dumb_core_large)], outputFile="Figure8LargeFlows_Avg")
ideal_fct = [float(x) for x in f2] large_flow_indices = [ i for i in range(len(flow_info)) if flow_info[i][0] >= 10000000 ] large_flow_info = [flow_info[i] for i in large_flow_indices] ideal_large_flow = [ideal_fct[i] for i in large_flow_indices] plot_cdf_large_flows(flow_info, large_flow_info, 'NormalizedFctCdf_' + load) plot_norm_binned(flow_info, ideal_fct, 'NormalizedFctBinned_' + load) plot_norm_timed(flow_info, ideal_fct, 'NormalizedFctTimed_' + load) plot_size_timed(flow_info, ideal_fct, 'SizeTimed_' + load) main() #return large_flow_info """ X = [1.0 * i / len(flow_size) for i in range(len(flow_size))] Y = sorted(norm_fct) plotter.PlotN([Y], [X], \ YTitle='CDF', XTitle='Normalized FCT', \ labels=['PFabric (Exact Experiment)', 'PFabric (No Host Delay)'], legendLoc='upper right', legendOff=True,\ figSize=[7.8, 2.6], onlyLine=True,\ lWidth=2, mSize=8, legendSize=18,\ xAxis=[0, 100], yAxis=[0, 1], outputFile="NormalizedFctCdf") fct = {} for i in range(len(norm_fct)): fct[norm_fct[i]] = flow_size[i]