def parse_capacity_measurements(hash): data = [] result = [] device = database_helper.get_device_by_hash(hash).replace(':','') filename = settings.PROJECT_ROOT + '/summary/measurements/capacity/' + device #garbage characters to be removed: remove = ')("\n' try: with open(filename,'r') as f: #each line represents one measurement record: for record in f: entry = [] for i in range(0,len(remove)): record = record.replace(remove[i],'') record = record.split('|') #eventstamp: entry.append(int(record[0])) #average: entry.append(float(record[1])*1000) #direction: entry.append(record[2]) data.append(entry) except: return result #sort by eventstamp: sorted_data = sorted(data, key=lambda x: x[0]) sorted_up = [(x,y) for x,y,z in sorted_data if z=='up'] sorted_down = [(x,y) for x,y,z in sorted_data if z=='dw'] series_up = dict(name='Capacity Up', type='line', data=sorted_up) series_down = dict(name='Capacity Down', type='line', data=sorted_down) result.append(series_up) result.append(series_down) return result
def parse_lmrtt_measurements(hash): data = [] result = [] device = database_helper.get_device_by_hash(hash).replace(':', '') filename = settings.PROJECT_ROOT + '/summary/measurements/lmrtt/' + device #garbage characters to be removed: remove = ')("\n' try: with open(filename, 'r') as f: #each line represents one measurement record: for record in f: entry = [] for i in range(0, len(remove)): record = record.replace(remove[i], '') record = record.split('|') #eventstamp: entry.append(int(record[0])) #average: entry.append(float(record[1])) data.append(entry) #file not found: except: return result #sort by eventstamp: sorted_data = sorted(data, key=lambda x: x[0]) series = dict(name='Last mile latency', type='line', data=sorted_data, priority=1, id=4) result.append(series) return result
def parse_lmrtt_measurements(hash): data = [] result = [] device = database_helper.get_device_by_hash(hash).replace(':','') filename = settings.PROJECT_ROOT + '/summary/measurements/lmrtt/' + device #garbage characters to be removed: remove = ')("\n' try: with open(filename,'r') as f: #each line represents one measurement record: for record in f: entry = [] for i in range(0,len(remove)): record = record.replace(remove[i],'') record = record.split('|') #eventstamp: entry.append(int(record[0])) #average: entry.append(float(record[1])) data.append(entry) #file not found: except: return result #sort by eventstamp: sorted_data = sorted(data, key=lambda x: x[0]) series = dict(name='Last mile latency', type='line', data=sorted_data, priority=1, id=4) result.append(series) return result
def parse_underload_measurements(hash): result = [] data = [] device = database_helper.get_device_by_hash(hash).replace(':', '') filename = settings.PROJECT_ROOT + '/summary/measurements/underload/' + device #garbage characters to be removed: remove = ')("\n' try: with open(filename, 'r') as f: #each line represents one measurement record: for record in f: entry = [] for i in range(0, len(remove)): record = record.replace(remove[i], '') record = record.split('|') #eventstamp: entry.append(int(record[0])) #average: entry.append(float(record[1])) #direction: direction = record[2] entry.append(direction) data.append(entry) #file not found: except: return result #sort by eventstamp: sorted_data = sorted(data, key=lambda x: x[0]) sorted_up = [(x, y) for x, y, z in sorted_data if z == 'up'] sorted_dw = [(x, y) for x, y, z in sorted_data if z == 'dw'] series_up = dict(name='Under Load Up', type='line', data=sorted_up) series_dw = dict(name='Under Load Down', type='line', data=sorted_dw) result.append(series_up) result.append(series_dw) return result
def parse_bitrate_measurements(hash, dir): result = [] data = [] device = database_helper.get_device_by_hash(hash).replace(':', '') filename = settings.PROJECT_ROOT + '/summary/measurements/bitrate/' + device #garbage characters to be removed: remove = ')("\n' try: with open(filename, 'r') as f: #each line represents one measurement record: for record in f: entry = [] for i in range(0, len(remove)): record = record.replace(remove[i], '') record = record.split('|') #eventstamp: entry.append(int(record[0])) #average: entry.append(float(record[1]) * 1000) #direction: direction = record[2] entry.append(direction) toolid = record[3] entry.append(toolid) data.append(entry) #file not found: except: return result #sort by eventstamp: sorted_data = sorted(data, key=lambda x: x[0]) sorted_multi = [(x, y) for x, y, z, t in sorted_data if z == dir and t == 'NETPERF_3'] sorted_single = [(x, y) for x, y, z, t in sorted_data if z == dir and t != 'NETPERF_3'] multi_series = dict(name='Multi-threaded TCP', type='line', data=sorted_multi, priority=1) if dir == 'dw': multi_series['id'] = 1 else: multi_series['id'] = 2 single_series = dict(name='Single-threaded TCP', type='line', data=sorted_single) result.append(multi_series) result.append(single_series) return result
def parse_rtt_measurements(hash): result = [] data = [] dstips = [] device = database_helper.get_device_by_hash(hash).replace(':','') filename = settings.PROJECT_ROOT + '/summary/measurements/rtt/' + device #garbage characters to be removed: remove = ')("\n' ipr = database_helper.get_server_list() try: with open(filename,'r') as f: #each line represents one measurement record: for record in f: entry = [] for i in range(0,len(remove)): record = record.replace(remove[i],'') record = record.split('|') #eventstamp: entry.append(int(record[0])) #average: entry.append(float(record[1])) #mserver address: dstip = record[2] entry.append(dstip) if dstip not in dstips and dstip!='': dstips.append(dstip) data.append(entry) #file not found: except: return result #sort by eventstamp: sorted_data = sorted(data, key=lambda x: x[0]) #group data into sub-series by measurement server for dstip in dstips: mserver = ipr.filter(ip=dstip) if len(mserver)==0: continue #measurements are grouped by dstip, though dstip itself is discarded: series_data = [(x,y) for x,y,z in sorted_data if z==dstip] series = dict(name=mserver[0].location,type='line',data=series_data) result.append(series) return result
def parse_rtt_measurements(hash): result = [] data = [] dstips = [] device = database_helper.get_device_by_hash(hash).replace(':', '') filename = settings.PROJECT_ROOT + '/summary/measurements/rtt/' + device #garbage characters to be removed: remove = ')("\n' ipr = database_helper.get_server_list() try: with open(filename, 'r') as f: #each line represents one measurement record: for record in f: entry = [] for i in range(0, len(remove)): record = record.replace(remove[i], '') record = record.split('|') #eventstamp: entry.append(int(record[0])) #average: entry.append(float(record[1])) #mserver address: dstip = record[2] entry.append(dstip) if dstip not in dstips and dstip != '': dstips.append(dstip) data.append(entry) #file not found: except: return result #sort by eventstamp: sorted_data = sorted(data, key=lambda x: x[0]) #group data into sub-series by measurement server for dstip in dstips: mserver = ipr.filter(ip=dstip) if len(mserver) == 0: continue #measurements are grouped by dstip, though dstip itself is discarded: series_data = [(x, y) for x, y, z in sorted_data if z == dstip] series = dict(name=mserver[0].location, type='line', data=series_data) result.append(series) return result
def parse_bitrate_measurements(hash, dir): result = [] data = [] device = database_helper.get_device_by_hash(hash).replace(':','') filename = settings.PROJECT_ROOT + '/summary/measurements/bitrate/' + device #garbage characters to be removed: remove = ')("\n' try: with open(filename,'r') as f: #each line represents one measurement record: for record in f: entry = [] for i in range(0,len(remove)): record = record.replace(remove[i],'') record = record.split('|') #eventstamp: entry.append(int(record[0])) #average: entry.append(float(record[1])*1000) #direction: direction = record[2] entry.append(direction) toolid = record[3] entry.append(toolid) data.append(entry) #file not found: except: return result #sort by eventstamp: sorted_data = sorted(data, key=lambda x: x[0]) sorted_multi = [(x,y) for x,y,z,t in sorted_data if z==dir and t=='NETPERF_3'] sorted_single = [(x,y) for x,y,z,t in sorted_data if z==dir and t!='NETPERF_3'] multi_series = dict(name='Multi-threaded TCP', type='line',data=sorted_multi,priority=1) if dir=='dw': multi_series['id']=1 else: multi_series['id']=2 single_series = dict(name='Single-threaded TCP', type='line', data=sorted_single) result.append(multi_series) result.append(single_series) return result
def get_location(hash): device = database_helper.get_device_by_hash(hash) details = database_helper.get_details_by_deviceid(device) if details.count() > 0: return (details[0].city + ", " + details[0].country) return "unavailable"
def get_location(hash): device = database_helper.get_device_by_hash(hash) details = database_helper.get_details_by_deviceid(device) if details.count()>0: return (details[0].city + ", " + details[0].country) return "unavailable"