def parse_rtt_compare(device, earliest, latest, name): data = [] #only want measurements against a particular measurement server: dstip = '8.8.8.8' earliest = datetime_helper.datetime_to_JSON(earliest) latest = datetime_helper.datetime_to_JSON(latest) device = device.replace(':', '') filename = settings.PROJECT_ROOT + '/summary/measurements/rtt/' + device #garbage characters to be removed: remove = ')("\n' #file is closed automatically after all lines are read: with open(filename, 'r') as f: #each line represents one measurement record: for record in f: entry = [] try: for i in range(0, len(remove)): record = record.replace(remove[i], '') record = record.split('|') #eventstamp: entry.append(int(record[0])) #average: entry.append(float(record[1])) #dstip: entry.append(record[2]) data.append(entry) except: continue #apply filtering: data = [(x, y, z) for x, y, z in data if (x > earliest and x < latest and z == dstip)] data = sorted(data, key=lambda x: x[0]) series = dict(name=name + ' device', type='line', data=data) return series
def parse_bitrate_compare(device,earliest,latest,dir,name): data = [] earliest = datetime_helper.datetime_to_JSON(earliest) latest = datetime_helper.datetime_to_JSON(latest) device = device.replace(':','') filename = settings.PROJECT_ROOT + '/summary/measurements/bitrate/' + device #garbage characters to be removed: remove = ')("\n' #file is closed automatically after all lines are read: with open(filename,'r') as f: #each line represents one measurement record: for record in f: entry = [] try: for i in range(0,len(remove)): record = record.replace(remove[i],'') record = record.split('|') #eventstamp: entry.append(int(record[0])) #average: entry.append(float(record[1])*1000) #direction: entry.append(record[2]) #toolid: entry.append(record[3]) data.append(entry) except: continue data = sorted(data, key=lambda x: x[0]) #apply filtering: data = [(x,y) for x,y,z,t in data if (x>earliest and x<latest and z==dir and t=='NETPERF_3')] series = dict(name=name + ' device', type='line',data=data) return series
def parse_bitrate_compare(device, earliest, latest, dir, name): data = [] earliest = datetime_helper.datetime_to_JSON(earliest) latest = datetime_helper.datetime_to_JSON(latest) device = device.replace(':', '') filename = settings.PROJECT_ROOT + '/summary/measurements/bitrate/' + device #garbage characters to be removed: remove = ')("\n' #file is closed automatically after all lines are read: with open(filename, 'r') as f: #each line represents one measurement record: for record in f: entry = [] try: for i in range(0, len(remove)): record = record.replace(remove[i], '') record = record.split('|') #eventstamp: entry.append(int(record[0])) #average: entry.append(float(record[1]) * 1000) #direction: entry.append(record[2]) #toolid: entry.append(record[3]) data.append(entry) except: continue data = sorted(data, key=lambda x: x[0]) #apply filtering: data = [(x, y) for x, y, z, t in data if (x > earliest and x < latest and z == dir and t == 'NETPERF_3')] series = dict(name=name + ' device', type='line', data=data) return series
def parse_rtt_compare(device,earliest,latest,name): data = [] #only want measurements against a particular measurement server: dstip = '8.8.8.8' earliest = datetime_helper.datetime_to_JSON(earliest) latest = datetime_helper.datetime_to_JSON(latest) device = device.replace(':','') filename = settings.PROJECT_ROOT + '/summary/measurements/rtt/' + device #garbage characters to be removed: remove = ')("\n' #file is closed automatically after all lines are read: with open(filename,'r') as f: #each line represents one measurement record: for record in f: entry = [] try: for i in range(0,len(remove)): record = record.replace(remove[i],'') record = record.split('|') #eventstamp: entry.append(int(record[0])) #average: entry.append(float(record[1])) #dstip: entry.append(record[2]) data.append(entry) except: continue #apply filtering: data = [(x,y,z) for x,y,z in data if (x>earliest and x<latest and z==dstip)] data = sorted(data, key=lambda x: x[0]) series = dict(name=name + ' device', type='line',data=data) return series
def parse_bitrate_isp_average(start_date,end_date,isp,direction,country): data = [] ret = [] start = int(datetime_helper.datetime_to_JSON(start_date)) end = int(datetime_helper.datetime_to_JSON(end_date)) cities = database_helper.get_all_cities() filename = settings.PROJECT_ROOT + '/summary/measurements/bitrate_averages/isp' #garbage characters to be removed: remove = ')("\n' with open(filename,'r') as f: for record in f: entry = [] for i in range(0,len(remove)): record = record.replace(remove[i],'') record = record.split('|') #average (a): entry.append(float(record[0])*1000) #measurement count (b): entry.append(int(record[1])) #day (c) entry.append(int(record[2])) #isp (d) entry.append(record[3]) #device count (e): entry.append(record[4]) #direction (f): entry.append(record[5]) #country(g): entry.append(record[6]) #city(h) entry.append(record[7]) data.append(entry) f.close() for city in cities: city_name = city['geoip_city'] if city_name==None or city_name=='': continue for i in range(0,len(remove)): city_name = city_name.replace(remove[i],'') filtered = [] if country=="none": filtered = [(a,b,c,d,e,f,g,h) for a,b,c,d,e,f,g,h in data if d==isp and c>start and c<end and h==city_name and f==direction] else: filtered = [(a,b,c,d,e,f,g,h) for a,b,c,d,e,f,g,h in data if d==isp and c>start and c<end and h==city_name and f==direction and g==country] if len(filtered)==0: continue try: d_count = max(x[4] for x in filtered) except: continue #total number of measurements: n_measurements = sum(x[1] for x in filtered) #apply weight to each average based on total number of measurements, and sum to get the overall average: average = sum((x[0]*x[1]/n_measurements) for x in filtered) series = dict(name=city_name, type='bar', data=average, count=d_count) ret.append(series) #order series alphabetically: ret = sorted(ret, key= lambda x: x['name'].lstrip()) return ret
def get_countries_vis_data(request): start_date = request.GET.get('startdate') end_date = request.GET.get('enddate') start_date = datetime_helper.format_date_from_calendar(start_date) end_date = datetime_helper.format_date_from_calendar(end_date) start = int(datetime_helper.datetime_to_JSON(start_date)) end = int(datetime_helper.datetime_to_JSON(end_date)) server = request.GET.get('serverip') data = data_helper.parse_countries_vis_data(start,end,server) return HttpResponse(json.dumps(data))
def get_countries_vis_data(request): start_date = request.GET.get('startdate') end_date = request.GET.get('enddate') start_date = datetime_helper.format_date_from_calendar(start_date) end_date = datetime_helper.format_date_from_calendar(end_date) start = int(datetime_helper.datetime_to_JSON(start_date)) end = int(datetime_helper.datetime_to_JSON(end_date)) server = request.GET.get('serverip') data = data_helper.parse_countries_vis_data(start, end, server) return HttpResponse(json.dumps(data))
def parse_bitrate_country_average(start_date, end_date, country, direction): data = [] ret = [] start = int(datetime_helper.datetime_to_JSON(start_date)) end = int(datetime_helper.datetime_to_JSON(end_date)) isps = database_helper.get_all_isps() filename = settings.PROJECT_ROOT + '/summary/measurements/bitrate_averages/country' #garbage characters to be removed: remove = ')("\n' with open(filename, 'r') as f: for record in f: entry = [] for i in range(0, len(remove)): record = record.replace(remove[i], '') record = record.split('|') #average (a): entry.append(float(record[0]) * 1000) #measurement count (b): entry.append(int(record[1])) #day (c) entry.append(int(record[2])) #country (d) entry.append(record[3]) #device count (e): entry.append(record[4]) #direction (f): entry.append(record[5]) #isp (g): entry.append(record[6]) data.append(entry) f.close() for isp in isps: provider = isp['geoip_isp'] if provider == None or provider == '': continue for i in range(0, len(remove)): provider = provider.replace(remove[i], '') filtered = [(a, b, c, d, e, f, g) for a, b, c, d, e, f, g in data if d == country and c > start and c < end and g == provider and f == direction] if len(filtered) == 0: continue try: d_count = max(x[4] for x in filtered) except: continue #total number of measurements: n_measurements = sum(x[1] for x in filtered) #apply weight to each average based on total number of measurements, and sum to get the overall average: average = sum((x[0] * x[1] / n_measurements) for x in filtered) series = dict(name=provider, type='bar', data=average, count=d_count) ret.append(series) #order series alphabetically: ret = sorted(ret, key=lambda x: x['name'].lstrip()) return ret
def parse_bitrate_city_average(start_date,end_date,city,direction): data = [] ret = [] start = int(datetime_helper.datetime_to_JSON(start_date)) end = int(datetime_helper.datetime_to_JSON(end_date)) isps = Devicedetails.objects.values('geoip_isp').distinct() filename = settings.PROJECT_ROOT + '/summary/measurements/bitrate_averages/city' #garbage characters to be removed: remove = ')("\n' with open(filename,'r') as f: for record in f: entry = [] for i in range(0,len(remove)): record = record.replace(remove[i],'') record = record.split('|') #average (a): entry.append(float(record[0])*1000) #measurement count (b): entry.append(int(record[1])) #day (c) entry.append(int(record[2])) #city (d) entry.append(record[3]) #device count (e): entry.append(record[4]) #direction (f): entry.append(record[5]) #isp (g): entry.append(record[6]) data.append(entry) f.close() for isp in isps: provider = isp['geoip_isp'] if provider==None or provider=='': continue for i in range(0,len(remove)): provider = provider.replace(remove[i],'') filtered = [(a,b,c,d,e,f,g) for a,b,c,d,e,f,g in data if d==city and c>start and c<end and g==provider and f==direction] if len(filtered)==0: continue try: d_count = max(x[4] for x in filtered) except: continue #total number of measurements: n_measurements = sum(x[1] for x in filtered) #apply weight to each average based on total number of measurements, and sum to get the overall average: average = sum((x[0]*x[1]/n_measurements) for x in filtered) series = dict(name=provider, type='bar', data=average, count=d_count) ret.append(series) #order series alphabetically: ret = sorted(ret, key= lambda x: x['name'].lstrip()) return ret
def linegraph_bucket(data, bucket_size, title): result = {} result['name'] = title result['type'] = "spline" output = [] try: start_time = datetime_helper.datetime_format_to_unixtime( data[0].eventstamp) end_time = start_time + bucket_size bucket = [] for measure in data: time = datetime_helper.datetime_format_to_unixtime( measure.eventstamp) if time < end_time: bucket.append(int(measure.average)) else: mid_time = (start_time + end_time) / 2 n = len(bucket) if n != 0: mean = sum(bucket) / n temp = [] temp.append( datetime_helper.datetime_to_JSON(measure.eventstamp)) temp.append(int(mean)) output.append(temp) bucket = [] while (time > end_time): start_time = end_time + 1 end_time = start_time + bucket_size bucket.append(int(measure.average)) n = len(bucket) if n != 0: mean = sum(bucket) / n temp = [] temp.append(datetime_helper.datetime_to_JSON(measure.eventstamp)) temp.append(int(mean)) output.append(temp) result['data'] = output except: return result return result
def linegraph_bucket(data,bucket_size,title): result={} result['name'] = title result['type'] = "spline" output=[] try: start_time = datetime_helper.datetime_format_to_unixtime(data[0].eventstamp) end_time = start_time + bucket_size bucket = [] for measure in data: time = datetime_helper.datetime_format_to_unixtime(measure.eventstamp) if time < end_time: bucket.append(int(measure.average)) else: mid_time = (start_time + end_time)/2 n = len(bucket) if n!=0: mean = sum(bucket) / n temp=[] temp.append(datetime_helper.datetime_to_JSON(measure.eventstamp)) temp.append(int(mean)) output.append(temp) bucket = [] while(time>end_time): start_time = end_time+1; end_time = start_time+bucket_size bucket.append(int(measure.average)) n = len(bucket) if n!=0: mean = sum(bucket) / n temp=[] temp.append(datetime_helper.datetime_to_JSON(measure.eventstamp)) temp.append(int(mean)) output.append(temp) result['data']=output except: return result return result
def get_bitrate_measurements(device, days, direction, multi): threading = "multi" data = [] end = datetime.now() start = datetime_helper.get_daterange_start(int(days)) rows = MBitrate.objects.filter(deviceid=device, eventstamp__gte=start, eventstamp__lte=end, direction=direction) if len(rows) == 0: return [] if multi == "1": rows = rows.filter(toolid='NETPERF_3') else: rows = rows.exclude(toolid='NETPERF_3') threading = "single" for r in rows: try: eventstamp = datetime_helper.datetime_to_JSON(r.eventstamp) d = [eventstamp, float(r.average)] data.append(d) except: continue return dict(device=device, days=days, data=data, direction=direction, threading=threading)
def linegraph_normal(data,title,factor,roundit,priority,id): output = [] for measure in data: if measure.average > 0: output.append( (datetime_helper.datetime_to_JSON(measure.eventstamp), float(measure.average) * factor)) return dict(name=title, type='line', data=output, priority=priority, id=id)
def linegraph_normal(data, title, factor, roundit, priority, id): output = [] for measure in data: if measure.average > 0: output.append( (datetime_helper.datetime_to_JSON(measure.eventstamp), float(measure.average) * factor)) return dict(name=title, type='line', data=output, priority=priority, id=id)
def get_lmrtt_measurements(device,days): threading = "multi" data = [] end = datetime.now() start = datetime_helper.get_daterange_start(int(days)) rows = MRtt.objects.filter(deviceid=device,eventstamp__gte=start,eventstamp__lte=end) if len(rows)==0: return [] for r in rows: try: eventstamp = datetime_helper.datetime_to_JSON(r.eventstamp) d = [eventstamp, float(r.average)] data.append(d) except: continue return dict(device=device,days=days,data=data)
def get_lmrtt_measurements(device, days): threading = "multi" data = [] end = datetime.now() start = datetime_helper.get_daterange_start(int(days)) rows = MRtt.objects.filter(deviceid=device, eventstamp__gte=start, eventstamp__lte=end) if len(rows) == 0: return [] for r in rows: try: eventstamp = datetime_helper.datetime_to_JSON(r.eventstamp) d = [eventstamp, float(r.average)] data.append(d) except: continue return dict(device=device, days=days, data=data)
def linegraph_normal_passive(data,title): result={} result['name'] = title result['type'] = "column" output= [] for measure in data: if(measure.bytes_transferred <= 0): continue temp=[] temp.append(datetime_helper.datetime_to_JSON(measure.eventstamp)) temp.append(int(measure.bytes_transferred)) output.append(temp) result['data'] = output return result
def linegraph_normal(data,title,factor,roundit): result={} result['name'] = title result['type'] = "line" output= [] for measure in data: if(measure.average <= 0): continue temp=[] temp.append(datetime_helper.datetime_to_JSON(measure.eventstamp,roundit)) temp.append(int(measure.average)*factor) output.append(temp) result['data'] = output return result
def linegraph_normal_passive(data, title): result = {} result['name'] = title result['type'] = "column" output = [] for measure in data: if (measure.bytes_transferred <= 0): continue temp = [] temp.append(datetime_helper.datetime_to_JSON(measure.eventstamp)) temp.append(int(measure.bytes_transferred)) output.append(temp) result['data'] = output return result
def linegraph_compare(data,factor): output = [] sorted_data = {} for record in data: if record['name'] == None: continue if record['deviceid'] not in sorted_data: sorted_data[record['deviceid']]=[] sorted_data[record['deviceid']].append(record['name']) sorted_data[record['deviceid']].append([]) eventstamp = datetime_helper.datetime_to_JSON(record['eventstamp']) m = [eventstamp, float(record['avg'])*factor] sorted_data[record['deviceid']][1].append(m) for device in sorted_data: try: series = dict(name=sorted_data[device][0] + ' Device', type='line', data=sorted_data[device][1]) output.append(series) except: continue return output
def get_bitrate_measurements(device,days,direction,multi): threading = "multi" data = [] end = datetime.now() start = datetime_helper.get_daterange_start(int(days)) rows = MBitrate.objects.filter(deviceid=device,eventstamp__gte=start,eventstamp__lte=end,direction=direction) if len(rows)==0: return [] if multi=="1": rows = rows.filter(toolid='NETPERF_3') else: rows = rows.exclude(toolid='NETPERF_3') threading = "single" for r in rows: try: eventstamp = datetime_helper.datetime_to_JSON(r.eventstamp) d = [eventstamp, float(r.average)] data.append(d) except: continue return dict(device=device,days=days,data=data,direction=direction,threading=threading)
def throughputGraph(request): g_filter = Graph_Filter(request) print "me" print g_filter device = request.GET.get('deviceid') graphno = int(request.GET.get('graphno')) filter_by = request.GET.get('filter_by') chosen_limit = 100000000 data = "[" all_device_details = MBitrate.objects.filter(average__lte = chosen_limit).order_by('eventstamp') if(graphno==1): all_device_details = all_device_details.filter(srcip = '143.215.131.173') else: all_device_details = all_device_details.filter(dstip = '143.215.131.173') for entry in all_device_details: if(data!='['): data+= ',' data += '[' + datetime_helper.datetime_to_JSON(entry.eventstamp)+ ',' + str(entry.average) +"]" data += "]" return HttpResponse(data)
def linegraph_compare(data, factor): output = [] sorted_data = {} for record in data: if record['name'] == None: continue if record['deviceid'] not in sorted_data: sorted_data[record['deviceid']] = [] sorted_data[record['deviceid']].append(record['name']) sorted_data[record['deviceid']].append([]) eventstamp = datetime_helper.datetime_to_JSON(record['eventstamp']) m = [eventstamp, float(record['avg']) * factor] sorted_data[record['deviceid']][1].append(m) for device in sorted_data: try: series = dict(name=sorted_data[device][0] + ' Device', type='line', data=sorted_data[device][1]) output.append(series) except: continue return output
def parse_rtt_isp_average(start_date, end_date, isp, country): data = [] ret = [] start = int(datetime_helper.datetime_to_JSON(start_date)) end = int(datetime_helper.datetime_to_JSON(end_date)) cities = database_helper.get_all_cities() filename = settings.PROJECT_ROOT + '/summary/measurements/rtt_averages/isp' #garbage characters to be removed: remove = ')("\n' with open(filename, 'r') as f: for record in f: entry = [] for i in range(0, len(remove)): record = record.replace(remove[i], '') record = record.split('|') #average (a): entry.append(float(record[0])) #measurement count (b): entry.append(int(record[1])) #day (c) entry.append(int(record[2])) #isp (d) entry.append(record[3]) #device count (e): entry.append(record[4]) #country(f): entry.append(record[5]) #city(g) entry.append(record[6]) data.append(entry) f.close() for city in cities: city_name = city['geoip_city'] if city_name == None or city_name == '': continue for i in range(0, len(remove)): city_name = city_name.replace(remove[i], '') filtered = [] if country == "none": filtered = [ (a, b, c, d, e, f, g) for a, b, c, d, e, f, g in data if d == isp and c > start and c < end and g == city_name ] else: filtered = [(a, b, c, d, e, f, g) for a, b, c, d, e, f, g in data if d == isp and c > start and c < end and g == city_name and f == country] if len(filtered) == 0: continue try: d_count = max(x[4] for x in filtered) except: continue #total number of measurements: n_measurements = sum(x[1] for x in filtered) #apply weight to each average based on total number of measurements, and sum to get the overall average: average = sum((x[0] * x[1] / n_measurements) for x in filtered) series = dict(name=city_name, type='bar', data=average, count=d_count) ret.append(series) #sort series alphabetically: ret = sorted(ret, key=lambda x: x['name'].lstrip()) return ret