def csvAggregation(self, file_string, file_name): file_list = [] for file in os.listdir(self.filepath): is_file = file_string in file not_csv_file = file_name != file if is_file and not_csv_file: file_list.append(file) print(file_list) if(file_string == 'transition'): for i in range(len(file_list)): if i == 0: self.transition_all = utils.getCSVi(self.filepath+'/'+file_list[i], 0) else: self.transition_all += utils.getCSVi(self.filepath+'/'+file_list[i], 0) utils.saveCSVi(self.transition_all, self.path+'/after/'+str(self.calmonth)+'/'+file_name) transntion_rate = self.transition_all tsum = self.transition_all.sum(axis=1) for c in transntion_rate.columns: transntion_rate[c] = transntion_rate[c]/tsum transntion_rate = transntion_rate.fillna(0) utils.saveCSVi(transntion_rate, self.path+'/after/'+str(self.calmonth)+'/transition_rate_'+str(self.calmonth)+'.csv') elif(file_string == 'duration'): for i in range(len(file_list)): if i == 0: utils.getCSV(self.filepath+'/'+file_list[i]).to_csv(self.path+'/after/'+str(self.calmonth)+'/'+file_name, columns=['client', 'from', 'to', 'duration'], index=False) else: utils.getCSV(self.filepath+'/'+file_list[i]).to_csv(self.path+'/after/'+str(self.calmonth)+'/'+file_name, columns=['client', 'from', 'to', 'duration'], index=False, mode='a')
def __init__(self, filepath, ap_file, path, calmonth): self.filepath = filepath self.df_ap = utils.getCSV(ap_file) group_number = self.df_ap.iloc[-1]['buildID'] self.group_transition = np.zeros((group_number, group_number)) self.path = path self.calmonth = calmonth
def __init__(self, ap_file, data_file, calmonth, rank, size, path): self.df_ap = utils.getCSV(ap_file) #print(self.df_ap.head()) self.df_data = utils.getCSV(data_file) #print(self.df_data.head()) self.calmonth = calmonth self.rank = rank self.size = size self.path = path self.df_unique = utils.getDuplicate(self.df_data, 'client') if self.rank == 0: print('ユニーク利用者数 : ' + str(len(self.df_unique))) self.transition_from = [] self.transition_to = [] self.duration = [] self.client = [] self.transition = [[0 for i in range(len(self.df_ap) + 1)] for i in range(len(self.df_ap) + 1)]
def getGroup(self, file_name): transition_all_np = self.transition_all.to_numpy() for i in range(len(transition_all_np)): print('i = {0}, from_group = {1}'.format(i, self.df_ap.iloc[i]['buildID'])) for j in range(len(transition_all_np)): self.group_transition[self.df_ap.iloc[i]['buildID']-1][self.df_ap.iloc[j]['buildID']-1] += transition_all_np[i][j] utils.saveCSVi(np.nan_to_num(self.group_transition), self.path+'/after/'+str(self.calmonth)+'/group_transition_'+str(self.calmonth)+'.csv') ''' group_duration = utils.getCSV('./after/'+file_name) for i, row in enumerate(self.df_ap.itertuples()): group_duration.loc[group_duration['from'] == i, 'from'] = row.buildID group_duration.loc[group_duration['to'] == i, 'to'] = row.buildID print(str(i+1)+'/'+str(len(self.df_ap))) utils.saveCSV(group_duration, './after/group_duration.csv') ''' group_duration = utils.getCSV(self.path+'/after/'+str(self.calmonth)+'/'+file_name) self.df_ap['ind'] = [str(n) for n in range(len(self.df_ap))] group_duration = pd.merge(group_duration, self.df_ap[['ind', 'buildID']], left_on='from', right_on='ind', how='left').drop(['from', 'ind'], axis=1).rename(columns={'buildID': 'from'}) group_duration = pd.merge(group_duration, self.df_ap[['ind', 'buildID']], left_on='to', right_on='ind', how='left').drop(['to', 'ind'], axis=1).rename(columns={'buildID': 'to'}) group_duration = group_duration.reindex(columns=['client','from','to','duration']) utils.saveCSV(group_duration, self.path+'/after/'+str(self.calmonth)+'/group_duration_'+str(self.calmonth)+'.csv')
def __init__(self, ap_file, data_file, calmonth, path): self.df_ap = utils.getCSV(ap_file) self.df_data = utils.getCSV(data_file) self.calmonth = calmonth self.path = path
def pubsub(o): texec = [] apiselect=0 publisher = 'publisher' subscriber = 'subscriber' if sys.platform == 'win32': publisher = 'publisher.exe' subscriber = 'subscriber.exe' #if o.capi: # #C # texec.append([]) # texec[apiselect].append(os.environ['OSPL_HOME'] + '/examples/dcps/Throughput/c/' + publisher) # texec[apiselect].append(os.environ['OSPL_HOME'] + '/examples/dcps/Throughput/c/' + subscriber) # texec[apiselect].append('C') # apiselect+=1 if o.cppapi: #SACPP texec.append([]) texec[apiselect].append(os.environ['OSPL_HOME'] + '/examples/dcps/Throughput/cpp/' + publisher) texec[apiselect].append(os.environ['OSPL_HOME'] + '/examples/dcps/Throughput/cpp/' + subscriber) texec[apiselect].append('SACPP') apiselect+=1 if o.isoapi: #ISOCPP texec.append([]) texec[apiselect].append(os.environ['OSPL_HOME'] + '/examples/dcps/Throughput/isocpp/' + publisher) texec[apiselect].append(os.environ['OSPL_HOME'] + '/examples/dcps/Throughput/isocpp/' + subscriber) texec[apiselect].append('ISOCPP') apiselect+=1 tafcsv = utils.getCSV(o.averagesfile) results = utils.tree() for i in texec: #Open per exec csv csvfile = i[0] + ".csv" cw = utils.getCSV(csvfile) cw.writerow([str(time.strftime("%x %H:%M:%S"))]) resultsApi = results[i[2]] Bsize = 0 while Bsize <= o.maxpayload: resultsBsize = resultsApi[Bsize] try: if o.subonly: print ("launching Subscriber " + i[1] + " " + str(o.maxcycles) + " " + str(o.pollingdelay)+ " " + str(o.subpartition)) subscriber = subprocess.Popen([i[1], str(o.maxcycles), str(o.pollingdelay), str(o.subpartition)], stdout=subprocess.PIPE, stderr=subprocess.PIPE) utils.setPriority(subscriber.pid, o.subnice, o.subaffinity) try: if o.pubonly: print ("launching publisher " + i[0] + " " + str((Bsize*1024)) + " " + str(o.burstI) + " " + str(o.burstS) + " " + str(o.timeout) + " " + str(o.pubpartition)) publisher = subprocess.Popen([i[0], str(Bsize*1024), str(o.burstI), str(o.burstS), str(o.timeout), str(o.pubpartition)], stdout=subprocess.PIPE, stderr=subprocess.PIPE) utils.setPriority(publisher.pid, o.pubnice, o.pubaffinity) if not o.pubonly: subscriber.wait() elif not o.subonly: publisher.wait() elif o.subonly and o.pubonly: #Waiting for a return on windows is broken sometimes for some reason if sys.platform == 'win32': time.sleep(12) else: subscriber.wait() for line in subscriber.stdout: sys.stdout.write(line) utils.parseTP(line, resultsBsize) if resultsBsize['Average Transfer']['Mbit']: if o.pubonly: kill(publisher) if o.subonly: kill(subscriber) for line in zip(publisher.stderr if o.pubonly else [],subscriber.stderr if o.subonly else []): print 'Publisher err: ' + line[0] print 'Subscriber err: ' + line[1] if o.subonly: cw.writerow([str(Bsize)+'KiB']) cw.writerow(['Count'] + ['Payload Size'] + ['Received Samples'] + ['Received Bytes'] + ['Out of order'] + ['Sample Rate'] + ['Transfer Rate']) for key in sorted(resultsBsize): if str(key).isdigit(): k = resultsBsize[key] cw.writerow([str(key)] + [k['Payload Size']] + [k['Received Samples']] + [k['Received Bytes']] + [k['Out of order']] + [k['Sample Rate']] + [k['Transfer Rate']]) #utils.jsonPrint(resultsBsize) if o.pubonly: kill(publisher) if o.subonly: kill(subscriber) except OSError as detail: print "Cannot find publisher executable: " + i[0] print detail sys.exit(1) except OSError as detail: print "Cannot find subscriber executable: " + i[0] print detail sys.exit(1) Bsize += 1 if o.subonly: ''' Create or append to total averages file ''' tafcsv = utils.getCSV(o.averagesfile) tafcsv.writerow([str(time.strftime("%x %H:%M:%S"))]) tafcsv.writerow(['Payload KiB'] #+ ['C Total Samples'] + ['C Total Bytes'] + ['C Total out of order'] + ['C Average Samples'] + ['C Average Mbit'] + ['SACPP Total Samples'] + ['SACPP Total Bytes'] + ['SACPP Total out of order'] + ['SACPP Average Samples'] + ['SACPP Average Mbit'] + ['ISOCPP Total Samples'] + ['ISOCPP Total Bytes'] + ['ISOCPP Total out of order'] + ['ISOCPP Average Samples'] + ['ISOCPP Average Mbit'] ) #Grab existing API apis = results.keys() api = apis[0] for size in sorted(results[api]): tafcsv.writerow([size] #+ utils.is_empty(results['C'][size]['Total']['Samples']) #+ utils.is_empty(results['C'][size]['Total']['Bytes']) #+ utils.is_empty(results['C'][size]['Total out of order']) #+ utils.is_empty(results['C'][size]['Average Transfer']['Samples']) #+ utils.is_empty(results['C'][size]['Average Transfer']['Mbit']) + utils.is_empty(results['SACPP'][size]['Total']['Samples']) + utils.is_empty(results['SACPP'][size]['Total']['Bytes']) + utils.is_empty(results['SACPP'][size]['Total out of order']) + utils.is_empty(results['SACPP'][size]['Average Transfer']['Samples']) + utils.is_empty(results['SACPP'][size]['Average Transfer']['Mbit']) + utils.is_empty(results['ISOCPP'][size]['Total']['Samples']) + utils.is_empty(results['ISOCPP'][size]['Total']['Bytes']) + utils.is_empty(results['ISOCPP'][size]['Total out of order']) + utils.is_empty(results['ISOCPP'][size]['Average Transfer']['Samples']) + utils.is_empty(results['ISOCPP'][size]['Average Transfer']['Mbit']) )
def __init__(self, calmonth, path): self.calmonth = calmonth self.path = path self.group_duration = utils.getCSV(self.path + '/group_duration_' + str(self.calmonth) + '.csv')
def pingpong(o): texec = [] ping = 'ping' pong = 'pong' if sys.platform == 'win32': ping = 'ping.exe' pong = 'pong.exe' apiselect=0 if o.capi: #C texec.append([]) texec[apiselect].append(os.environ['OSPL_HOME'] + '/examples/dcps/RoundTrip/c/' + ping) texec[apiselect].append(os.environ['OSPL_HOME'] + '/examples/dcps/RoundTrip/c/' + pong) texec[apiselect].append('C') apiselect+=1 if o.cppapi: #SACPP texec.append([]) texec[apiselect].append(os.environ['OSPL_HOME'] + '/examples/dcps/RoundTrip/cpp/' + ping) texec[apiselect].append(os.environ['OSPL_HOME'] + '/examples/dcps/RoundTrip/cpp/' + pong) texec[apiselect].append('SACPP') apiselect+=1 if o.isoapi: #ISOCPP texec.append([]) texec[apiselect].append(os.environ['OSPL_HOME'] + '/examples/dcps/RoundTrip/isocpp/' + ping) texec[apiselect].append(os.environ['OSPL_HOME'] + '/examples/dcps/RoundTrip/isocpp/' + pong) texec[apiselect].append('ISOCPP') apiselect+=1 ''' Create or append to total averages file ''' tafcsv = utils.getCSV(o.averagesfile) #Create nested dictionary results = utils.tree() for i in texec: resultsApi = results[i[2]] #1KB Bsize = 1000 try: if o.pongonly: pong = subprocess.Popen([i[1]],stdout=subprocess.PIPE, stderr=subprocess.PIPE) utils.setPriority(pong.pid, o.pongnice, o.pongaffinity) if o.pongonly and not o.pingonly: #Run for 10 minutes and exit program time.sleep(600) sys.exit(0) time.sleep(1) ''' Set the CSV output file (af) ''' csvfile = i[0] + ".csv" cw = utils.getCSV(csvfile) cw.writerow([str(time.strftime("%x %H:%M:%S"))]) try: while(Bsize <= (o.maxpayload * 1000)): resultsBsize = resultsApi[int(Bsize)] print "launching " + i[0] + "with args:" + str(Bsize) + " " + str(o.samples) + " " + str(o.seconds) cw.writerow([str(Bsize/1000)+"KB"]) cw.writerow(['Seconds'] + ['RT Count'] + ['RT median'] + ['RT min'] + ['W Count'] + ['W median'] + ['W min'] + ['R Count'] + ['R mean'] + ['R min']); try: if o.pingonly: ping = subprocess.Popen( [i[0], str(Bsize), str(o.samples), str(o.seconds) ], stdout=subprocess.PIPE, stderr=subprocess.PIPE) utils.setPriority(ping.pid, o.pingnice, o.pingaffinity) except OSError: print "Cannot find ping executable: " + str([i[0]]) #Wait for ping to terminate ping.wait() for line in ping.stderr: print 'err: ' + line for line in ping.stdout: utils.parseRT(line,resultsBsize) for key in sorted(resultsBsize): k = resultsBsize[key] cw.writerow([key] + [k['RoundTrip']['Count']] + [k['RoundTrip']['Median']] + [k['RoundTrip']['Min']] + [k['Read']['Count']] + [k['Read']['Median']] + [k['Read']['Min']] + [k['Write']['Count']] + [k['Write']['Median']] + [k['Write']['Min']]) Bsize = Bsize*2 except OSError: print "Cannot find ping executable: " + [i[0]] finally: if o.pongonly: #Quit pong pingq = subprocess.Popen( [i[0], 'quit' ], stdout=subprocess.PIPE, stderr=subprocess.PIPE) pingq.wait() for line in zip(pingq.stdout, pingq.stderr): print line pong.terminate() except OSError: print "Cannot find pong executable: " + str([i[1]]) tafcsv.writerow([str(time.strftime("%x %H:%M:%S"))]) tafcsv.writerow(['Payload KB'] + ['RoundTrip C'] + ['RoundTrip SACPP'] + ['RoundTip ISOCPP'] + ['Read C'] + ['Read SACPP'] + ['Read ISOCPP'] + ['Write C'] + ['Write SACPP'] + ['Write ISOCPP']) Bsize = 1000 while Bsize <= (o.maxpayload * 1000): KB = Bsize/1000 #pdb.set_trace() tafcsv.writerow([KB] + utils.is_empty(results['C'][Bsize]['Overall']['RoundTrip']['Median']) + utils.is_empty(results['SACPP'][Bsize]['Overall']['RoundTrip']['Median']) + utils.is_empty(results['ISOCPP'][Bsize]['Overall']['RoundTrip']['Median']) + utils.is_empty(results['C'][Bsize]['Overall']['Read']['Median']) + utils.is_empty(results['SACPP'][Bsize]['Overall']['Read']['Median']) + utils.is_empty(results['ISOCPP'][Bsize]['Overall']['Read']['Median']) + utils.is_empty(results['C'][Bsize]['Overall']['Write']['Median']) + utils.is_empty(results['SACPP'][Bsize]['Overall']['Write']['Median']) + utils.is_empty(results['ISOCPP'][Bsize]['Overall']['Write']['Median'])) Bsize = Bsize*2