def processFile(numNodes, reqesPs, filePath): lines = [line.strip() for line in open(filePath)] searchResult = [] updateResult = [] triggerResult = [] for index in range(len(lines)): line = lines[index] if ((line.find('Search reply recvd') != -1) and (line.find('TimeOut') == -1)): parsed = line.split(' ') queryTime = int(parsed[7]) searchResult.append(queryTime) if ((line.find('LocUpd reply recvd') != -1) and (line.find('TimeOut') == -1)): parsed = line.split(' ') updateTime = int(parsed[6]) updateResult.append(updateTime) if ((line.find('Trigger recvd time') != -1) and (line.find('TimeOut') == -1)): parsed = line.split(' ') triggerTime = int(parsed[4]) triggerResult.append(triggerTime) #result.sort() #result = result[(len(result)*10//100)+1:len(result)*90//100] statDict = get_stats_with_names(searchResult) if (searchMap.has_key(numNodes)): requestDict = searchMap[numNodes] requestDict[reqesPs] = statDict['mean'] searchMap[numNodes] = requestDict else: requestDict = {} requestDict[reqesPs] = statDict['mean'] searchMap[numNodes] = requestDict statDict = get_stats_with_names(updateResult) if (updateMap.has_key(numNodes)): requestDict = updateMap[numNodes] requestDict[reqesPs] = statDict['mean'] updateMap[numNodes] = requestDict else: requestDict = {} requestDict[reqesPs] = statDict['mean'] updateMap[numNodes] = requestDict statDict = get_stats_with_names(triggerResult) if (triggerMap.has_key(numNodes)): requestDict = triggerMap[numNodes] requestDict[reqesPs] = statDict['mean'] triggerMap[numNodes] = requestDict else: requestDict = {} requestDict[reqesPs] = statDict['mean'] triggerMap[numNodes] = requestDict return
def processFile(numNodes, reqesPs, filePath): lines = [line.strip() for line in open(filePath)] searchResult = [] updateResult = [] triggerResult = [] for index in range(len(lines)): line = lines[index] if ( (line.find('Search reply recvd') != -1) and (line.find('TimeOut') == -1) ): parsed = line.split(' ') queryTime = int(parsed[7]) searchResult.append(queryTime) if ( (line.find('LocUpd reply recvd') != -1) and (line.find('TimeOut') == -1) ): parsed = line.split(' ') updateTime = int(parsed[6]) updateResult.append(updateTime) if ( (line.find('Trigger recvd time') != -1) and (line.find('TimeOut') == -1) ): parsed = line.split(' ') triggerTime = int(parsed[4]) triggerResult.append(triggerTime) #result.sort() #result = result[(len(result)*10//100)+1:len(result)*90//100] statDict = get_stats_with_names(searchResult) if( searchMap.has_key(numNodes) ): requestDict = searchMap[numNodes] requestDict[reqesPs] = statDict['mean'] searchMap[numNodes] = requestDict else: requestDict = {} requestDict[reqesPs] = statDict['mean'] searchMap[numNodes] = requestDict statDict = get_stats_with_names(updateResult) if( updateMap.has_key(numNodes) ): requestDict = updateMap[numNodes] requestDict[reqesPs] = statDict['mean'] updateMap[numNodes] = requestDict else: requestDict = {} requestDict[reqesPs] = statDict['mean'] updateMap[numNodes] = requestDict statDict = get_stats_with_names(triggerResult) if( triggerMap.has_key(numNodes) ): requestDict = triggerMap[numNodes] requestDict[reqesPs] = statDict['mean'] triggerMap[numNodes] = requestDict else: requestDict = {} requestDict[reqesPs] = statDict['mean'] triggerMap[numNodes] = requestDict return
def queryProcess(filePath): lines = [line.strip() for line in open(filePath)] result = [] #numAttr = int('-1') queryTime = int('-1') #QPT_dict = {} #QPResult_dict = {} #Q_dict = {} #QResult_dict = {} for index in range(len(lines)): line = lines[index] if ( (line.find('MSOCKETWRITERINTERNAL from CS') != -1) and (line.find('TimeOut') == -1) ): parsed = line.split(' ') queryTime = int(parsed[4]) numAttr = long(parsed[6]) result.append(queryTime) #result.sort() #result = result[(len(result)*10//100)+1:len(result)*90//100] statDict = get_stats_with_names(result) return statDict
def processFile(filePath, searchStr1, searchStr2): lines = [line.strip() for line in open(filePath)] RequestStartDict = {} RequestEndDict = {} #TimeOutTime = 1000000 for index in range(len(lines)): line = lines[index] if (line.find(searchStr1) != -1): parsed = line.split(' ') lineQPStartTime = long(parsed[9]) lineRequestID = int(parsed[5]) RequestStartDict[lineRequestID] = lineQPStartTime if (line.find(searchStr2) != -1): parsed = line.split(' ') lineQPEndTime = long(parsed[9]) lineRequestID = int(parsed[5]) RequestEndDict[lineRequestID] = lineQPEndTime AvgList = [] for key in RequestStartDict: #reqTime = TimeOutTime if (RequestEndDict.has_key(key)): reqTime = RequestEndDict.get(key) - RequestStartDict.get(key) AvgList.append(reqTime) statDict = get_stats_with_names(AvgList) return statDict
def updateProcess(filePath): lines = [line.strip() for line in open(filePath)] result = [] #numAttr = int('-1') updateTime = int('-1') #QPT_dict = {} #QPResult_dict = {} #Q_dict = {} #QResult_dict = {} for index in range(len(lines)): line = lines[index] if ( (line.find('Update reply recvd') != -1) and (line.find('TimeOut') == -1) ): parsed = line.split(' ') updateTime = int(parsed[7]) result.append(updateTime) #result.sort() #result = result[(len(result)*10//100)+1:len(result)*90//100] statDict = get_stats_with_names(result) return statDict
def queryProcess(filePath): lines = [line.strip() for line in open(filePath)] result = [] #numAttr = int('-1') queryTime = int('-1') #QPT_dict = {} #QPResult_dict = {} #Q_dict = {} #QResult_dict = {} for index in range(len(lines)): line = lines[index] if ((line.find('MSOCKETWRITERINTERNAL from CS') != -1) and (line.find('TimeOut') == -1)): parsed = line.split(' ') queryTime = int(parsed[4]) numAttr = long(parsed[6]) result.append(queryTime) #result.sort() #result = result[(len(result)*10//100)+1:len(result)*90//100] statDict = get_stats_with_names(result) return statDict
def processFile(filePath): lines = [line.strip() for line in open(filePath)] updateTimeList = [] readTimeList = [] updateStarted = 0 for index in range(len(lines)): line = lines[index] if (line.find('attr Update Start') != -1): updateStarted = 1 parsed = line.split(' ') updateStartTime = int(parsed[3]) if ((line.find('Refresh trigger recvd') != -1) and (updateStarted == 1)): parsed = line.split(' ') updateEndTime = int(parsed[len(parsed) - 1]) updateTimeList.append(updateEndTime - updateStartTime) updateStarted = 0 if (line.find('getGroupMembersGUIDs time') != -1): parsed = line.split(' ') readTime = int(parsed[len(parsed) - 1]) readTimeList.append(readTime) print "len " + str(len(updateTimeList)) updateTimeList.sort() updateTimeList = updateTimeList[(len(updateTimeList) * 10 // 100) + 1:len(updateTimeList) * 90 // 100] statDict = get_stats_with_names(updateTimeList) print "Mean update " + str(statDict['mean']) + " Min " + str( statDict['perc5']) + " max " + str(statDict['perc95']) print "len " + str(len(readTimeList)) readTimeList.sort() readTimeList = readTimeList[(len(readTimeList) * 10 // 100) + 1:len(readTimeList) * 90 // 100] statDictR = get_stats_with_names(readTimeList) print "Mean Read " + str(statDictR['mean']) + " Min " + str( statDictR['perc5']) + " max " + str(statDictR['perc95']) return statDict
def processFile(filePath): lines = [line.strip() for line in open(filePath)] updateTimeList = [] readTimeList = [] updateStarted = 0 for index in range(len(lines)): line = lines[index] if ( line.find('attr Update Start') != -1 ): updateStarted = 1 parsed = line.split(' ') updateStartTime = int(parsed[3]) if ( (line.find('Refresh trigger recvd') != -1) and ( updateStarted == 1 ) ): parsed = line.split(' ') updateEndTime = int(parsed[len(parsed)-1]) updateTimeList.append(updateEndTime-updateStartTime) updateStarted = 0 if ( line.find('getGroupMembersGUIDs time') != -1 ): parsed = line.split(' ') readTime = int(parsed[len(parsed)-1]) readTimeList.append(readTime) print "len "+str(len(updateTimeList)) updateTimeList.sort() updateTimeList = updateTimeList[(len(updateTimeList)*10//100)+1:len(updateTimeList)*90//100] statDict = get_stats_with_names(updateTimeList) print "Mean update "+str(statDict['mean']) +" Min "+str(statDict['perc5']) +" max "+str(statDict['perc95']) print "len "+str(len(readTimeList)) readTimeList.sort() readTimeList = readTimeList[(len(readTimeList)*10//100)+1:len(readTimeList)*90//100] statDictR = get_stats_with_names(readTimeList) print "Mean Read "+str(statDictR['mean']) +" Min "+str(statDictR['perc5']) +" max "+str(statDictR['perc95']) return statDict
def processCSDirectory(dirName): fileList = os.listdir(dirName) print fileList result_dict = {} for x in fileList: if ( x == '.svn'): continue if ( x.find('contextOutputcompute') == -1 ): continue fI = x.find('-',0) sI = x.find('-',fI+1) tI = x.find('-',sI+1) fTI = x.find('-',tI+1) ffI = x.find('-',fTI+1) sxI = x.find('-',ffI+1) svI = x.find('-',sxI+1) attrNumString = x[ffI+1:sxI] schemeTypeString = x[svI+1: ] numMesgList = [] fileObj = open(dirName+'/'+x) for line in fileObj: if(line.find("NUM MESS") != -1): #print line parsed = line.split(' ') try: lineNumMesg = int(parsed[6]) numMesgList.append(lineNumMesg) except ValueError: print("Oops! wrong line") continue fileObj.close() # removing min value numMesgList = numMesgList[(len(numMesgList)*10//100)+1:len(numMesgList)*90//100] statDict = get_stats_with_names(numMesgList) key = schemeTypeString+'-'+attrNumString if (statDict.has_key('mean')): result_dict[key]=statDict['mean'] else: print "bad exp: no mean key" return result_dict
def processFile(filePath, searchStr1, searchStr2): lines = [line.strip() for line in open(filePath)] RequestStartDict = {} RequestEndDict = {} #TimeOutTime = 1000000 for index in range(len(lines)): line = lines[index] if ( line.find(searchStr1) != -1 ): parsed = line.split(' ') lineQPStartTime = long(parsed[7]) lineRequestID = int(parsed[5]) RequestStartDict[lineRequestID] = lineQPStartTime if ( line.find(searchStr2) != -1 ): parsed = line.split(' ') lineQPEndTime = long(parsed[9]) lineRequestID = int(parsed[5]) RequestEndDict[lineRequestID] = lineQPEndTime AvgList = [] for key in RequestStartDict: #reqTime = TimeOutTime if (RequestEndDict.has_key(key)): reqTime = RequestEndDict.get(key) - RequestStartDict.get(key) AvgList.append(reqTime) statDict = get_stats_with_names(AvgList) return statDict
if (not exptTypeDict.has_key(schemeTypeString) ): meanList = [] meanList.append(currList['mean']) exptTypeDict[schemeTypeString] = meanList else: exptTypeDict[schemeTypeString].append(currList['mean']) #if ( not statDict.has_key('ZEROVALUES') ): Keys=sorted(value_dict.keys()) for ui in Keys: exptTypeDict = value_dict[ui] writeStr=str(ui) for st in exptTypeDict: res=get_stats_with_names(exptTypeDict[st]) writeStr+=','+str(st)+','+str(res['mean'])+','+str(res['perc5'])+','+str(res['perc95']) writeStr+="\n" writef.write(writeStr) writef.close() #fs = '1KB' #writeStr = fs+','+value_dict[fs]['WiFi'] +','+value_dict[fs]['Cellular'] +','+value_dict[fs]['Multipath'] \ #+','+value_dict[fs]['MPTCP']+','+value_dict[fs]['IdealSocket'] +','+value_dict[fs]['IdealMSocket'] +"\n" #writef.write(writeStr) # #fs = '64KB' #writeStr = fs+','+value_dict[fs]['WiFi'] +','+value_dict[fs]['Cellular'] +','+value_dict[fs]['Multipath'] \ #+','+value_dict[fs]['MPTCP']+','+value_dict[fs]['IdealSocket'] +','+value_dict[fs]['IdealMSocket'] +"\n" #writef.write(writeStr)
#!/bin/python import os from mystat import get_stats_with_names lines = [line.strip() for line in open("/home/adipc/Documents/ContextServiceExperiments/WeatherCaseStudyExps/SingleNodeResults/GNSServiceOutput")] contextTime = [] gnsTime = [] for index in range(len(lines)): line = lines[index] if ( line.find('ContextService search results results') != -1 ): parsed = line.split(' ') timeTaken = int(parsed[6]) contextTime.append(timeTaken) if ( line.find('GNS search results results') != -1 ): parsed = line.split(' ') timeTaken = int(parsed[6]) gnsTime.append(timeTaken) if(len(contextTime) > 0): statDict = get_stats_with_names(contextTime) print "context time mean "+str(statDict['mean'])+','+str(statDict['perc5'])+','+str(statDict['perc95']) if(len(gnsTime) > 0): statDict = get_stats_with_names(gnsTime) print "gns time mean "+str(statDict['mean'])+','+str(statDict['perc5'])+','+str(statDict['perc95'])
Keys=sorted(refreshMega.keys()) meanWriteF = open("meanRefreshTime.txt", "w") for ki in Keys: currRefreshMap = refreshMega[ki] currValueUpdateMap = valueUpdateMega[ki] indKs=sorted(currRefreshMap.keys()) timeList = [] for inki in indKs: timeList.append(currRefreshMap[inki] - currValueUpdateMap[inki]) timeList.sort() writef = open("RefreshTimeCDF-"+ki+".txt", "w") for index in range(len(timeList)): totalReqPerc = (1.0 * (index+1))/len(timeList) writeStr = str(timeList[index])+','+str(totalReqPerc)+"\n" writef.write(writeStr) writef.close() res=get_stats_with_names(timeList) writeStr=str(ki)+','+str(res['mean'])+','+str(res['perc5'])+','+str(res['perc95'])+"\n" meanWriteF.write(writeStr) meanWriteF.close()
line = lines[index] if (line.find('Time taken') != -1): #print line parsed = line.split(' ') stime = long(parsed[2]) #sendMap[reqNum] = stime if (timeMapReqsps.has_key(key)): currList = timeMapReqsps[key] currList.append(stime) timeMapReqsps[key] = currList else: currList = [] currList.append(stime) timeMapReqsps[key] = currList Keys = sorted(timeMapReqsps.keys()) meanWriteF = open("meanQueryTime.txt", "w") for ki in Keys: currList = timeMapReqsps[ki] #currList.sort() res = get_stats_with_names(currList) writeStr = str(ki) + ',' + str(res['mean']) + ',' + str( res['perc5']) + ',' + str(res['perc95']) + "\n" meanWriteF.write(writeStr) meanWriteF.close()
Keys = sorted(refreshMega.keys()) meanWriteF = open("meanRefreshTime.txt", "w") for ki in Keys: currRefreshMap = refreshMega[ki] currValueUpdateMap = valueUpdateMega[ki] indKs = sorted(currRefreshMap.keys()) timeList = [] for inki in indKs: timeList.append(currRefreshMap[inki] - currValueUpdateMap[inki]) timeList.sort() writef = open("RefreshTimeCDF-" + ki + ".txt", "w") for index in range(len(timeList)): totalReqPerc = (1.0 * (index + 1)) / len(timeList) writeStr = str(timeList[index]) + ',' + str(totalReqPerc) + "\n" writef.write(writeStr) writef.close() res = get_stats_with_names(timeList) writeStr = str(ki) + ',' + str(res['mean']) + ',' + str( res['perc5']) + ',' + str(res['perc95']) + "\n" meanWriteF.write(writeStr) meanWriteF.close()
for ki in rIDKeys: if(sendMap.has_key(ki)): timeList.append(compMap[ki] - sendMap[ki]) if ( timeMapReqsps.has_key(reqpsString) ): currList = timeMapReqsps[reqpsString] currList.extend(timeList) timeMapReqsps[reqpsString] = currList else: currList = [] currList.extend(timeList) timeMapReqsps[reqpsString] = currList Keys=sorted(timeMapReqsps.keys()) meanWriteF = open("meanQueryTime.txt", "w") for ki in Keys: currList = timeMapReqsps[ki] currList.sort() res=get_stats_with_names(currList) writeStr=str(ki)+','+str(res['mean'])+','+str(res['perc5'])+','+str(res['perc95'])+"\n" meanWriteF.write(writeStr) meanWriteF.close()
def processFile(filePath): lines = [line.strip() for line in open(filePath)] currNumAttr = int('-1') QStartTime = int('-1') QEndTime = int('-1') QPStartTime = long('-1') QPEndTime = long('-1') currRequestID = int('-1') QP_dict = {} Q_dict = {} TimeOutTime = 3000 for index in range(len(lines)): line = lines[index] if ( line.find('CONTEXTSERVICE EXPERIMENT: QUERYFROMUSER REQUEST ID') != -1 ): parsed = line.split(' ') lineCurrNumAttr = int(parsed[7]) lineQPStartTime = long(parsed[9]) lineRequestID = int(parsed[5]) lineQStartTime = int(parsed[12]) # new request write previous one if( (lineRequestID != currRequestID) and (currRequestID != -1) ): if (not QP_dict.has_key(currNumAttr) ): QPNumAttrList = [] QPTime = (int)(QPEndTime - QPStartTime) if(QPTime > 0): QPNumAttrList.append(QPTime) else: QPNumAttrList.append(TimeOutTime) QP_dict[currNumAttr] = QPNumAttrList QNumAttrList = [] QTime = (int)(QEndTime - QStartTime) if(QTime > 0): QNumAttrList.append(QTime) else: QNumAttrList.append(TimeOutTime) Q_dict[currNumAttr] = QNumAttrList else: QPNumAttrList = QP_dict[currNumAttr] QPTime = (int)(QPEndTime - QPStartTime) if(QPTime > 0): QPNumAttrList.append(QPTime) else: QPNumAttrList.append(TimeOutTime) QNumAttrList = Q_dict[currNumAttr] QTime = (int)(QEndTime - QStartTime) #if(reqTime > 0 and reqTime < 1000): if(QTime > 0): QNumAttrList.append(QTime) else: QNumAttrList.append(TimeOutTime) currNumAttr = lineCurrNumAttr QPStartTime = lineQPStartTime currRequestID = lineRequestID QPEndTime = long('-1') QStartTime = lineQStartTime QEndTime = long('-1') elif ( currRequestID == -1 ): currNumAttr = lineCurrNumAttr QPStartTime = lineQPStartTime QStartTime = lineQStartTime currRequestID = lineRequestID if ( line.find('CONTEXTSERVICE EXPERIMENT: QUERYFROMUSERREPLY REQUEST ID') != -1 ): parsed = line.split(' ') lineNumAttr = int(parsed[7]) lineQPEndTime = long(parsed[9]) lineQEndTime = long(parsed[11]) lineRequestID = int(parsed[5]) if ( (lineRequestID == currRequestID) ): QPEndTime = lineQPEndTime QEndTime = lineQEndTime print QP_dict print Q_dict Mean = [] for key in QP_dict: valList = QP_dict[key] valList.sort() #valList = valList[(len(valList)*10//100)+1:len(valList)*90//100] statDict = get_stats_with_names(valList) if ( not statDict.has_key('ZEROVALUES') ): Mean.append(statDict['mean']) #QPResult_dict[key] = str(statDict['mean'])+','+str(statDict['perc5'])+','+str(statDict['perc95']) print Mean statDict = get_stats_with_names(Mean) return statDict
lineQPEndTime = long(parsed[9]) lineQEndTime = long(parsed[11]) lineRequestID = int(parsed[5]) if ( (lineRequestID == currRequestID) ): QPEndTime = lineQPEndTime QEndTime = lineQEndTime print QP_dict print Q_dict Mean = [] for key in QP_dict: valList = QP_dict[key] valList.sort() #valList = valList[(len(valList)*10//100)+1:len(valList)*90//100] statDict = get_stats_with_names(valList) if ( not statDict.has_key('ZEROVALUES') ): Mean.append(statDict['mean']) #QPResult_dict[key] = str(statDict['mean'])+','+str(statDict['perc5'])+','+str(statDict['perc95']) print Mean statDict = get_stats_with_names(Mean) writef = open("resultDir/contextOutputContextNetU3QP.csv", "w") #sortedKeys = sorted(QPResult_dict.iterkeys(), key=int) #for key in sortedKeys: writeStr = str(statDict['mean'])+','+str(statDict['perc5'])+','+str(statDict['perc95'])+"\n" writef.write(writeStr) writef.close()
lines = [ line.strip() for line in open( "/home/adipc/Documents/ContextServiceExperiments/WeatherCaseStudyExps/SingleNodeResults/GNSServiceOutput" ) ] contextTime = [] gnsTime = [] for index in range(len(lines)): line = lines[index] if (line.find('ContextService search results results') != -1): parsed = line.split(' ') timeTaken = int(parsed[6]) contextTime.append(timeTaken) if (line.find('GNS search results results') != -1): parsed = line.split(' ') timeTaken = int(parsed[6]) gnsTime.append(timeTaken) if (len(contextTime) > 0): statDict = get_stats_with_names(contextTime) print "context time mean " + str(statDict['mean']) + ',' + str( statDict['perc5']) + ',' + str(statDict['perc95']) if (len(gnsTime) > 0): statDict = get_stats_with_names(gnsTime) print "gns time mean " + str(statDict['mean']) + ',' + str( statDict['perc5']) + ',' + str(statDict['perc95'])
def processFile(filePath): lines = [line.strip() for line in open(filePath)] currNumAttr = int('-1') QStartTime = int('-1') QEndTime = int('-1') QPStartTime = long('-1') QPEndTime = long('-1') currRequestID = int('-1') QP_dict = {} Q_dict = {} TimeOutTime = 3000 for index in range(len(lines)): line = lines[index] if (line.find('CONTEXTSERVICE EXPERIMENT: QUERYFROMUSER REQUEST ID') != -1): parsed = line.split(' ') lineCurrNumAttr = int(parsed[7]) lineQPStartTime = long(parsed[9]) lineRequestID = int(parsed[5]) lineQStartTime = int(parsed[12]) # new request write previous one if ((lineRequestID != currRequestID) and (currRequestID != -1)): if (not QP_dict.has_key(currNumAttr)): QPNumAttrList = [] QPTime = (int)(QPEndTime - QPStartTime) if (QPTime > 0): QPNumAttrList.append(QPTime) else: QPNumAttrList.append(TimeOutTime) QP_dict[currNumAttr] = QPNumAttrList QNumAttrList = [] QTime = (int)(QEndTime - QStartTime) if (QTime > 0): QNumAttrList.append(QTime) else: QNumAttrList.append(TimeOutTime) Q_dict[currNumAttr] = QNumAttrList else: QPNumAttrList = QP_dict[currNumAttr] QPTime = (int)(QPEndTime - QPStartTime) if (QPTime > 0): QPNumAttrList.append(QPTime) else: QPNumAttrList.append(TimeOutTime) QNumAttrList = Q_dict[currNumAttr] QTime = (int)(QEndTime - QStartTime) #if(reqTime > 0 and reqTime < 1000): if (QTime > 0): QNumAttrList.append(QTime) else: QNumAttrList.append(TimeOutTime) currNumAttr = lineCurrNumAttr QPStartTime = lineQPStartTime currRequestID = lineRequestID QPEndTime = long('-1') QStartTime = lineQStartTime QEndTime = long('-1') elif (currRequestID == -1): currNumAttr = lineCurrNumAttr QPStartTime = lineQPStartTime QStartTime = lineQStartTime currRequestID = lineRequestID if (line.find( 'CONTEXTSERVICE EXPERIMENT: QUERYFROMUSERREPLY REQUEST ID') != -1): parsed = line.split(' ') lineNumAttr = int(parsed[7]) lineQPEndTime = long(parsed[9]) lineQEndTime = long(parsed[11]) lineRequestID = int(parsed[5]) if ((lineRequestID == currRequestID)): QPEndTime = lineQPEndTime QEndTime = lineQEndTime print QP_dict print Q_dict Mean = [] for key in QP_dict: valList = QP_dict[key] valList.sort() #valList = valList[(len(valList)*10//100)+1:len(valList)*90//100] statDict = get_stats_with_names(valList) if (not statDict.has_key('ZEROVALUES')): Mean.append(statDict['mean']) #QPResult_dict[key] = str(statDict['mean'])+','+str(statDict['perc5'])+','+str(statDict['perc95']) print Mean statDict = get_stats_with_names(Mean) return statDict
if (x == '.svn'): continue if (x.find('localDir-compute-') != -1): print x processCSDirectory('/home/ayadav/contextServiceScripts/' + x) #prints writef = open("bulkGet.csv", "w") bkKeys = bulkGetMap.keys() bkKeys.sort() for key in bkKeys: valList = bulkGetMap[key] #valList.sort() #valList = valList[(len(valList)*10//100)+1:len(valList)*90//100] statDict = get_stats_with_names(valList) if (not statDict.has_key('ZEROVALUES')): writeStr = str(key) + ',' + str(statDict['mean']) + ',' + str( statDict['perc5']) + ',' + str(statDict['perc95']) + "\n" writef.write(writeStr) writef.close() writef = open("processValue.csv", "w") pvKeys = processValueNodeMap.keys() pvKeys.sort() for key in pvKeys: valList = processValueNodeMap[key] #valList.sort() #valList = valList[(len(valList)*10//100)+1:len(valList)*90//100]