def computeEvictionStats(dataFile): colMap, csvData = datautil.getCSVData(dataFile) rpos = dataFile.rfind("/") pos = dataFile.find("/") print dataFile dataFile = dataFile[0:pos] + dataFile[rpos + 3:] if len(csvData) == 0: return tp = [] txns = [] time = [] if not dict.has_key(dataFile): dict[dataFile] = [] for row in csvData: txn = float(row[colMap["TRANSACTIONS"]]) t = float(row[colMap["ELAPSED"]]) txns.append(txn) time.append(t) tp.append(txn / 5) dict[dataFile].append(np.mean(tp)) print " Average Throughput: %.2f txn/s" % np.mean(tp) print
def computeEvictionStats(dataFile): colMap, csvData = datautil.getCSVData(dataFile) rpos = dataFile.rfind("/"); pos = dataFile.find("/"); print dataFile dataFile = dataFile[0:pos] + dataFile[rpos + 3:] if len(csvData) == 0: return tp = [] txns = [] time = [] if not dict.has_key(dataFile): dict[dataFile] = [] for row in csvData: txn = float(row[colMap["TRANSACTIONS"]]) t = float(row[colMap["ELAPSED"]]) txns.append(txn) time.append(t) tp.append(txn/5) dict[dataFile].append(np.mean(tp)) print " Average Throughput: %.2f txn/s" % np.mean(tp) print
def computeEvictionStats(dataFile): colMap, csvData = datautil.getCSVData(dataFile) pos = dataFile.rfind("/") dataFile = dataFile[pos + 3:] if len(csvData) == 0: return tp = [] if not dict.has_key(dataFile): dict[dataFile] = [] for row in csvData: tp.append(float(row[colMap["THROUGHPUT"]])) dict[dataFile].append(np.mean(tp)) print dataFile print " Average Throughput: %.2f ms" % np.mean(tp) print
def computeEvictionStats(dataFile): colMap, csvData = datautil.getCSVData(dataFile) pos = dataFile.rfind("/"); dataFile = dataFile[pos + 3:] if len(csvData) == 0: return tp = [] if not dict.has_key(dataFile): dict[dataFile] = [] for row in csvData: tp.append(float(row[colMap["THROUGHPUT"]])) dict[dataFile].append(np.mean(tp)) print dataFile print " Average Throughput: %.2f ms" % np.mean(tp) print
def computeEvictionStats(dataFile): colMap, csvData = datautil.getCSVData(dataFile) if len(csvData) == 0: return allTimes = [] allTuples = [] allBlocks = [] allBytes = [] for row in csvData: allTimes.append(row[colMap["STOP"]] - row[colMap["START"]]) allTuples.append(int(row[colMap["TUPLES_EVICTED"]])) allBlocks.append(int(row[colMap["TUPLES_EVICTED"]])) allBytes.append(int(row[colMap["BYTES_EVICTED"]])) print dataFile print " Average Time: %.2f ms" % np.mean(allTimes) print " Average Tuples: %.2f" % np.mean(allTuples) print " Average Blocks: %.2f" % np.mean(allBlocks) print " Average Bytes: %.2f MB" % (np.mean(allBytes) / float(1024 * 1024)) print
def computeEvictionStats(dataFile): colMap, csvData = datautil.getCSVData(dataFile) if len(csvData) == 0: return allTimes = [ ] allTuples = [ ] allBlocks = [ ] allBytes = [ ] for row in csvData: allTimes.append(row[colMap["STOP"]] - row[colMap["START"]]) allTuples.append(int(row[colMap["TUPLES_EVICTED"]])) allBlocks.append(int(row[colMap["TUPLES_EVICTED"]])) allBytes.append(int(row[colMap["BYTES_EVICTED"]])) print dataFile print " Average Time: %.2f ms" % np.mean(allTimes) print " Average Tuples: %.2f" % np.mean(allTuples) print " Average Blocks: %.2f" % np.mean(allBlocks) print " Average Bytes: %.2f MB" % (np.mean(allBytes)/float(1024*1024)) print
def computeEvictionStats(dataFile): colMap, csvData = datautil.getCSVData(dataFile) if len(csvData) == 0: return pos = dataFile.rfind("/"); dataFile = dataFile[pos + 3:] if len(csvData) == 0: return if not dictR.has_key(dataFile): dictR[dataFile] = [] if not dictW.has_key(dataFile): dictW[dataFile] = [] for row in csvData: read = int(row[colMap["ANTICACHE_BYTES_READ"]]) / 1024 write = int(row[colMap["ANTICACHE_BYTES_WRITTEN"]]) / 1024 dictR[dataFile].append(read) dictW[dataFile].append(write) print dataFile print "read: %d" % read print "write: %d" % write print
def computeEvictionStats(dataFile): colMap, csvData = datautil.getCSVData(dataFile) if len(csvData) == 0: return pos = dataFile.rfind("/") dataFile = dataFile[pos + 3:] if len(csvData) == 0: return if not dictR.has_key(dataFile): dictR[dataFile] = [] if not dictW.has_key(dataFile): dictW[dataFile] = [] for row in csvData: read = int(row[colMap["ANTICACHE_BYTES_READ"]]) / 1024 write = int(row[colMap["ANTICACHE_BYTES_WRITTEN"]]) / 1024 dictR[dataFile].append(read) dictW[dataFile].append(write) print dataFile print "read: %d" % read print "write: %d" % write print
for mem in memorySizes: for read_pct in readPcts: # fig = createWorkloadSkewGraphs(benchmark, mem, read_pct, # hstoreData[benchmark], # mysqlData[benchmark], # memcachedData[benchmark]) fig = createWorkloadSkewGraphs(benchmark, mem, read_pct, noAntiCache[benchmark], hstoreData[benchmark], hstoreDataApprox[benchmark], mysqlData[benchmark], memcachedData[benchmark]) fileName = "skew-%s-%dx-%s.pdf" % (benchmark, mem, readLabels[read_pct]) graphutil.saveGraph(fig, fileName, height=OPT_GRAPH_HEIGHT) #break ## FOR #break ## FOR ## FOR colMap, indexTreeData = datautil.getCSVData(OPT_DATA_INDEX_TREE) colMap, indexHashData = datautil.getCSVData(OPT_DATA_INDEX_HASH) fig = createIndexGraph(colMap, indexHashData, indexTreeData) graphutil.saveGraph(fig, "index.pdf") ## LRU graph colMap, hstoreNoAnticacheData = datautil.getCSVData(OPT_DATA_LRU_NONE) colMap, hstoreSingleListData = datautil.getCSVData(OPT_DATA_LRU_SINGLE) colMap, hstoreDoubleListData = datautil.getCSVData(OPT_DATA_LRU_DOUBLE) fig = createLinkedListGraph(colMap, hstoreNoAnticacheData, hstoreSingleListData, hstoreDoubleListData) graphutil.saveGraph(fig, "lru.pdf") colMap, hstoreConstructData = datautil.getCSVData(OPT_DATA_EVICT_CONSTRUCT) colMap, hstoreWriteData = datautil.getCSVData(OPT_DATA_EVICT_WRITE) colMap, hstoreFetchData = datautil.getCSVData(OPT_DATA_EVICT_FETCH)
fig = createWorkloadSkewGraphs(benchmark, mem, read_pct, noAntiCache[benchmark], hstoreData[benchmark], hstoreDataApprox[benchmark], mysqlData[benchmark], memcachedData[benchmark]) fileName = "skew-%s-%dx-%s.pdf" % (benchmark, mem, readLabels[read_pct]) graphutil.saveGraph(fig, fileName, height=OPT_GRAPH_HEIGHT) #break ## FOR #break ## FOR ## FOR colMap, indexTreeData = datautil.getCSVData(OPT_DATA_INDEX_TREE) colMap, indexHashData = datautil.getCSVData(OPT_DATA_INDEX_HASH) fig = createIndexGraph(colMap, indexHashData, indexTreeData) graphutil.saveGraph(fig, "index.pdf") ## LRU graph colMap, hstoreNoAnticacheData = datautil.getCSVData(OPT_DATA_LRU_NONE) colMap, hstoreSingleListData = datautil.getCSVData(OPT_DATA_LRU_SINGLE) colMap, hstoreDoubleListData = datautil.getCSVData(OPT_DATA_LRU_DOUBLE) fig = createLinkedListGraph(colMap, hstoreNoAnticacheData, hstoreSingleListData, hstoreDoubleListData) graphutil.saveGraph(fig, "lru.pdf") colMap, hstoreConstructData = datautil.getCSVData(OPT_DATA_EVICT_CONSTRUCT) colMap, hstoreWriteData = datautil.getCSVData(OPT_DATA_EVICT_WRITE)