def justPlot(pcaps, replayName, outfile, xputInterval): forPlot = {'NOVPN': [], 'VPN': [], 'RANDOM': []} for pcap in pcaps['NOVPN'] + pcaps['VPN'] + pcaps['RANDOM']: xputPath = pcap.path.replace('tcpdumpsResults', 'xputs') + '.pickle' try: (xput, dur) = pickle.load(open(xputPath, 'r')) if DEBUG == 1: print 'read xputs from disk:', xputPath except IOError: if pcap.vpn == 'VPN': (xput, dur) = TH.adjustedXput(pcap.path, xputInterval, addOH=True) else: (xput, dur) = TH.adjustedXput(pcap.path, xputInterval, addOH=False) try: pickle.dump((xput, dur), open(xputPath, 'w'), 2) except Exception as e: print e if DEBUG == 1: print 'wrote xputs from disk:', xputPath try: forPlot[pcap.vpn][pcap.testCount] = xput except: forPlot[pcap.vpn] = {} forPlot[pcap.vpn][pcap.testCount] = xput plotCDFs(forPlot, replayName, outfile)
def testIt(pcapT, pcapO, resultFile, xputBuckets, alpha, doRTT=True): forPlot = {} xputPathT = pcapT.path.replace('tcpdumpsResults', 'xputs') + '.pickle' xputPathO = pcapO.path.replace('tcpdumpsResults', 'xputs') + '.pickle' try: (xputT, durT) = pickle.load(open(xputPathT, 'r')) if DEBUG == 1: print 'read xputs from disk:', xputPathT except IOError: (xputT, durT) = TH.adjustedXput(pcapT.path, xputBuckets, addOH=False) try: pickle.dump((xputT, durT), open(xputPathT, 'w'), 2) except Exception as e: print e try: (xputO, durO) = pickle.load(open(xputPathO, 'r')) if DEBUG == 1: print 'read xputs from disk:', xputPathO except IOError: (xputO, durO) = TH.adjustedXput(pcapO.path, xputBuckets, addOH=False) try: pickle.dump((xputO, durO), open(xputPathO, 'w'), 2) except Exception as e: print e if os.path.isfile(resultFile): results = pickle.load(open(resultFile, 'r')) else: results = TH.doTests(xputO, xputT, alpha) pickle.dump(results, open(resultFile, 'w')) forPlot['Exposed'] = xputO forPlot['Hidden'] = xputT areaTest = results[0] ks2ratio = results[1] xputAvg1 = results[4][2] xputAvg2 = results[5][2] ks2dVal = results[9] ks2pVal = results[10] return forPlot, { 'areaTest': areaTest, 'ks2ratio': ks2ratio, 'xputAvg1': xputAvg1, 'xputAvg2': xputAvg2, 'ks2dVal': ks2dVal, 'ks2pVal': ks2pVal }
def finalAnalyzer(userID, historyCount, testID, path, xputBuckets, alpha, side='Client'): replayInfodir = path + '/' + userID + '/replayInfo/' regexOriginal = '*_' + str(historyCount) + '_' + str(0) + '.json' replayOriginal = glob.glob(replayInfodir + regexOriginal) replayInfo = json.load(open(replayOriginal[0], 'r')) realID = replayInfo[2] replayName = replayInfo[4] extraString = replayInfo[5] incomingTime = replayInfo[0] if side == 'Client': folder = path + '/' + userID + '/clientXputs/' regexOriginal = '*_' + str(historyCount) + '_' + str(0) + '.json' regexControl = '*_' + str(historyCount) + '_' + str(testID) + '.json' fileOriginal = glob.glob(folder + regexOriginal) fileControl = glob.glob(folder + regexControl) try: (xputO, durO) = json.load(open(fileOriginal[0], 'r')) (xputC, durC) = json.load(open(fileControl[0], 'r')) except Exception as e: # elogger.error('FAIL at loading the client xputs', e) print 'FAIL at loading the client xputs', e return None else: try: dumpDir = path + '/' + userID + '/tcpdumpsResults/' regexControl = '*_' + str(historyCount) + '_' + str( testID) + '_out.pcap' regexOriginal = '*_' + str(historyCount) + '_' + str( 0) + '_out.pcap' fileControl = glob.glob(dumpDir + regexControl) fileOriginal = glob.glob(dumpDir + regexOriginal) # print '\r\n TWO FILES ARE', fileControl[0], fileOriginal[0] (xputO, durO) = TH.adjustedXput(fileOriginal[0], xputBuckets) (xputC, durC) = TH.adjustedXput(fileControl[0], xputBuckets) except Exception as e: traceback.print_exc(file=sys.stdout) # elogger.error('FAIL at cleaning up the pcaps for {} {} {}'.format(userID, historyCount, testID)) print 'FAIL at getting server side throughputs ', e return None # resultObj = ResultObj(realID, historyCount, testID, replayName, extraString, date=incomingTime) try: resultFile = (path + '/' + userID + '/decisions/' + 'results_{}_{}_{}_{}.json').format( userID, side, historyCount, testID) forPlot, results = testIt(xputO, xputC, resultFile, xputBuckets, alpha) except Exception as e: # elogger.error('FAIL at testing the result for '.format(userID, historyCount, testID)) print 'FAIL at loading result', e return None resultObj = ResultObj(realID, historyCount, testID, replayName, extraString, incomingTime) resultObj.area_test = results['areaTest'] resultObj.ks2_ratio_test = results['ks2ratio'] resultObj.xput_avg_original = results['xputAvg1'] resultObj.xput_avg_test = results['xputAvg2'] resultObj.ks2dVal = results['ks2dVal'] resultObj.ks2pVal = results['ks2pVal'] plotFile = path + '/' + userID + '/plots/xput_{}_{}_{}_{}_{}_{}_{}_{}.png'.\ format(userID, side, replayName, historyCount, testID, results['areaTest'], results['ks2dVal'], results['ks2pVal']) try: plotCDFs(forPlot, plotFile) except Exception as e: # elogger.error('Error when plotting CDF', userID, historyCount, testID) print 'FAIL at plotting', e return resultObj
def testIt(pcaps, what1, what2, resultFile, xputInterval, alpha, doRTT=True): forPlot = {} merged = {what1: [], what2: []} replaysXputInfo = {what1: {}, what2: {}} replaysRTTinfo = {what1: {}, what2: {}} for pcap in pcaps[what1] + pcaps[what2]: xputPath = pcap.path.replace('tcpdumpsResults', 'xputs') + '.pickle' rttPath = xputPath + '_rtt.pickle' try: (xput, dur) = pickle.load(open(xputPath, 'r')) if DEBUG == 1: print 'read xputs from disk:', xputPath except IOError: if pcap.vpn == 'VPN': (xput, dur) = TH.adjustedXput(pcap.path, xputInterval, addOH=True) else: (xput, dur) = TH.adjustedXput(pcap.path, xputInterval, addOH=False) try: pickle.dump((xput, dur), open(xputPath, 'w'), 2) except Exception as e: print e if DEBUG == 1: print 'wrote xputs from disk:', xputPath try: merged[pcap.vpn] += xput except KeyError: merged[pcap.vpn] = xput try: forPlot[pcap.vpn][pcap.testCount] = xput except: forPlot[pcap.vpn] = {} forPlot[pcap.vpn][pcap.testCount] = xput if doRTT: try: rtt = pickle.load(open(rttPath, 'r')) if DEBUG == 1: print 'read rtts from disk:', rttPath except IOError: rtt = TH.rttTshark_TCP(pcap.path, clientIP=pcap.clientIP) try: pickle.dump(rtt, open(rttPath, 'w'), 2) except Exception as e: print e if DEBUG == 1: print 'wrote rtts from disk:', xputPath replaysXputInfo[pcap.vpn][pcap.testCount] = { 'min': min(xput), 'max': max(xput), 'avg': numpy.average(xput) } replaysRTTinfo[pcap.vpn][pcap.testCount] = { 'min': min(rtt), 'max': max(rtt), 'avg': numpy.average(rtt) } if os.path.isfile(resultFile): results = pickle.load(open(resultFile, 'r')) if DEBUG == 1: print '\t{} vs {} was already done'.format(what1, what2) else: results = TH.doTests(merged[what1], merged[what2], alpha) pickle.dump(results, open(resultFile, 'w')) areaTest = results[0] ks2ratio = results[1] xputAvg1 = results[4][2] xputAvg2 = results[5][2] ks2res = results[10] return forPlot, { 'areaTest': areaTest, 'ks2ratio': ks2ratio, 'xputAvg1': xputAvg1, 'xputAvg2': xputAvg2, 'ks2res': ks2res, 'replaysXputInfo': replaysXputInfo, 'replaysRTTInfo': replaysRTTinfo }
def finalAnalyzer(userID, historyCount, testID, path, xputBuckets, alpha, side='Client'): replayInfodir = path + '/' + userID + '/replayInfo/' regexOriginal = '*_' + str(historyCount) + '_' + str(0) + '.json' replayOriginal = glob.glob(replayInfodir + regexOriginal) replayInfo = json.load(open(replayOriginal[0], 'r')) realID = replayInfo[2] replayName = replayInfo[4] extraString = replayInfo[5] incomingTime = replayInfo[0] if side == 'Client': folder = path + '/' + userID + '/clientXputs/' regexOriginal = '*_' + str(historyCount) + '_' + str(0) + '.json' regexRandom = '*_' + str(historyCount) + '_' + str(testID) + '.json' fileOriginal = glob.glob(folder + regexOriginal) fileRandom = glob.glob(folder + regexRandom) try: (xputO, durO) = json.load(open(fileOriginal[0], 'r')) (xputR, durR) = json.load(open(fileRandom[0], 'r')) except Exception as e: # elogger.error('FAIL at loading the client xputs', e) print 'FAIL at loading client side throughputs', e return None # Do server side analysis # After the analysis is done, scp the pcap file back to achtung immediately # KNOWN ISSUE: sometimes the pcap file does not get scp/rm # --- Temporal Solution: run dataCleaning.py periodically on the server to backup data as well as pcaps that are left on the replay servers else: try: dumpDir = path + '/' + userID + '/tcpdumpsResults/' regexRandom = '*_' + str(historyCount) + '_' + str( testID) + '*.pcap' regexOriginal = '*_' + str(historyCount) + '_' + str(0) + '*.pcap' fileRandom = glob.glob(dumpDir + regexRandom) fileOriginal = glob.glob(dumpDir + regexOriginal) (xputO, durO) = TH.adjustedXput(fileOriginal[0], xputBuckets) (xputR, durR) = TH.adjustedXput(fileRandom[0], xputBuckets) except Exception as e: traceback.print_exc(file=sys.stdout) print 'FAIL at loading server side throughputs', e return None try: resultFile = (path + '/' + userID + '/decisions/' + 'results_{}_{}_{}_{}.json').format( userID, side, historyCount, testID) xputO = [x for x in xputO if x > 0] xputR = [x for x in xputR if x > 0] # Only use none-zero throughputs for test forPlot, results = testIt(xputO, xputR, resultFile, alpha) except Exception as e: # elogger.error('FAIL at testing the result for '.format(userID, historyCount, testID)) print 'FAIL at loading result', e return None resultObj = ResultObj(realID, historyCount, testID, replayName, extraString, incomingTime) resultObj.area_test = results['areaTest'] resultObj.ks2_ratio_test = results['ks2ratio'] resultObj.xput_avg_original = results['xputAvg1'] resultObj.xput_avg_test = results['xputAvg2'] resultObj.ks2dVal = results['ks2dVal'] resultObj.ks2pVal = results['ks2pVal'] return resultObj