def write_underload_measurements(): devices = Devicedetails.objects.all() cursor = get_dict_cursor() for d in devices: device2 = d.deviceid.replace(':','') filename = settings.PROJECT_ROOT + '/summary/measurements/underload/' + device2 SQL = "" last = "" params = [] params.append(d.deviceid) try: with open(filename, 'r') as fh: # probably a better way to do this: for line in fh: last = line except: pass f = open(filename, 'a') if last!='': last=last.split('|') latest = datetime.fromtimestamp(int(last[0])/1000) params.append(latest) SQL1 = "SELECT \ m_ulrttdw.eventstamp, average, direction \ FROM m_ulrttdw JOIN devicedetails on devicedetails.deviceid=m_ulrttdw.deviceid \ WHERE m_ulrttdw.deviceid=%s AND m_ulrttdw.eventstamp>%s" SQL2 = "SELECT \ m_ulrttup.eventstamp, average, direction \ FROM m_ulrttup JOIN devicedetails on devicedetails.deviceid=m_ulrttup.deviceid \ WHERE m_ulrttup.deviceid=%s AND m_ulrttup.eventstamp>%s" else: SQL1 = "SELECT \ m_ulrttdw.eventstamp, average, direction \ FROM m_ulrttdw JOIN devicedetails on devicedetails.deviceid=m_ulrttdw.deviceid \ WHERE m_ulrttdw.deviceid=%s" SQL2 = "SELECT \ m_ulrttup.eventstamp, average, direction \ FROM m_ulrttup JOIN devicedetails on devicedetails.deviceid=m_ulrttup.deviceid \ WHERE m_ulrttup.deviceid=%s" cursor.execute(SQL1,params) records = cursor.fetchall() records = sorted(records,key=lambda x: datetime_helper.datetime_to_JSON(x['eventstamp'])) for r in records: eventstamp = datetime_helper.datetime_to_JSON(r['eventstamp']) avg = r['average'] line = str(eventstamp) + '|' + str(avg) + '|' + 'dw' + '\n' f.write(line) cursor.execute(SQL2,params) records = cursor.fetchall() for r in records: eventstamp = datetime_helper.datetime_to_JSON(r['eventstamp']) avg = r['average'] line = str(eventstamp) + '|' + str(avg) + '|' + 'up' + '\n' f.write(line) f.close() cursor.close() return
def write_capacity_measurements(): devices = Devicedetails.objects.all() cursor = get_dict_cursor() count = 0 t0 = datetime.now() for d in devices: device2 = d.deviceid.replace(':', '') filename = settings.PROJECT_ROOT + '/summary/measurements/capacity/' + device2 SQL = "" last = "" params = [] params.append(d.deviceid) try: with open(filename, 'r') as fh: # probably a better way to do this: for line in fh: last = line except: pass f = open(filename, 'a') if last != '': last = last.split('|') latest = datetime.fromtimestamp(int(last[0]) / 1000) params.append(latest) SQL = "SELECT \ m_capacity.eventstamp, average, direction \ FROM m_capacity JOIN devicedetails on devicedetails.deviceid=m_capacity.deviceid \ WHERE m_capacity.deviceid=%s AND m_capacity.eventstamp>%s" else: SQL = "SELECT \ m_capacity.eventstamp, average, direction \ FROM m_capacity JOIN devicedetails on devicedetails.deviceid=m_capacity.deviceid \ WHERE m_capacity.deviceid=%s" cursor.execute(SQL, params) records = cursor.fetchall() records = sorted( records, key=lambda x: datetime_helper.datetime_to_JSON(x['eventstamp'])) for r in records: eventstamp = datetime_helper.datetime_to_JSON(r['eventstamp']) avg = r['average'] direction = r['direction'] if direction == '' or direction == None: continue line = str(eventstamp) + '|' + str(avg) + '|' + direction + '\n' f.write(line) f.close() cursor.close() return
def dump_all_latencies(): cursor = get_dict_cursor() servers = IpResolver.objects.all() for s in servers: filename = settings.PROJECT_ROOT + '/summary/measurements/server_averages/' + str( s.ip) f = open(filename, 'w') params = [] params.append(s.ip) SQL = "SELECT \ country_code AS country, \ m_rtt.eventstamp::date AS day, \ count(distinct m_rtt.deviceid) AS ndevices, \ count(*) AS nmeasurements, \ avg(m_rtt.average) AS latency \ FROM m_rtt \ JOIN devicedetails AS d ON d.deviceid = m_rtt.deviceid \ WHERE m_rtt.dstip = %s AND m_rtt.average>0 AND m_rtt.average<3000 AND country_code!='' \ GROUP BY day, d.country_code;" cursor.execute(SQL, params) records = cursor.fetchall() for r in records: avg = r['latency'] m_count = r['nmeasurements'] day = datetime_helper.datetime_to_JSON(r['day']) country = r['country'] d_count = r['ndevices'] line = str(avg) + '|' + str(m_count) + '|' + str( day) + '|' + country.encode('utf-8') + '|' + str( d_count) + '\n' f.write(line) f.close() cursor.close() return
def write_lmrtt_isp_averages(): cursor = get_dict_cursor() filename = settings.PROJECT_ROOT + '/summary/measurements/lmrtt_averages/isp' f = open(filename, 'w') SQL = "SELECT \ geoip_country AS country, \ geoip_city AS city, \ geoip_isp AS isp, \ m_lmrtt.eventstamp::date AS day, \ count(distinct m_lmrtt.deviceid) AS ndevices, \ count(*) AS nmeasurements, \ avg(m_lmrtt.average) AS latency \ FROM m_lmrtt \ JOIN devicedetails AS d ON d.deviceid = m_lmrtt.deviceid \ WHERE m_lmrtt.average>0 AND m_lmrtt.average<3000 AND geoip_isp!='' AND geoip_country!='' AND geoip_city!='' \ GROUP BY day, d.geoip_isp, d.geoip_country, d.geoip_city;" cursor.execute(SQL) records = cursor.fetchall() for r in records: avg = r['latency'] m_count = r['nmeasurements'] day = datetime_helper.datetime_to_JSON(r['day']) isp = r['isp'] country = r['country'] city = r['city'] d_count = r['ndevices'] line = str(avg) + '|' + str(m_count) + '|' + str( day) + '|' + isp.encode('utf-8') + '|' + str( d_count) + '|' + country.encode('utf-8') + '|' + city.encode( 'utf-8') + '\n' f.write(line) f.close() cursor.close() return
def write_rtt_country_averages(): dstip = '8.8.8.8' cursor = get_dict_cursor() filename = settings.PROJECT_ROOT + '/summary/measurements/rtt_averages/country' f = open(filename, 'w') params = [] params.append(dstip) SQL = "SELECT \ geoip_isp AS isp, \ geoip_country AS country, \ m_rtt.eventstamp::date AS day, \ count(distinct m_rtt.deviceid) AS ndevices, \ count(*) AS nmeasurements, \ avg(m_rtt.average) AS latency \ FROM m_rtt \ JOIN devicedetails AS d ON d.deviceid = m_rtt.deviceid \ WHERE m_rtt.dstip = %s AND m_rtt.average>0 AND m_rtt.average<3000 AND geoip_country!='' AND geoip_isp != '' \ GROUP BY day, d.geoip_country, d.geoip_isp;" cursor.execute(SQL, params) records = cursor.fetchall() for r in records: avg = r['latency'] m_count = r['nmeasurements'] day = datetime_helper.datetime_to_JSON(r['day']) country = r['country'] isp = r['isp'] d_count = r['ndevices'] line = str(avg) + '|' + str(m_count) + '|' + str( day) + '|' + country + '|' + str(d_count) + '|' + isp + '\n' f.write(line) f.close() cursor.close() return
def dump_all_latencies(): cursor = get_dict_cursor() servers = IpResolver.objects.all() for s in servers: filename = settings.PROJECT_ROOT + '/summary/measurements/server_averages/' + str(s.ip) f = open(filename, 'w') params = [] params.append(s.ip) SQL = "SELECT \ country_code AS country, \ m_rtt.eventstamp::date AS day, \ count(distinct m_rtt.deviceid) AS ndevices, \ count(*) AS nmeasurements, \ avg(m_rtt.average) AS latency \ FROM m_rtt \ JOIN devicedetails AS d ON d.deviceid = m_rtt.deviceid \ WHERE m_rtt.dstip = %s AND m_rtt.average>0 AND m_rtt.average<3000 AND country_code!='' \ GROUP BY day, d.country_code;" cursor.execute(SQL,params) records = cursor.fetchall() for r in records: avg = r['latency'] m_count = r['nmeasurements'] day = datetime_helper.datetime_to_JSON(r['day']) country = r['country'] d_count = r['ndevices'] line = str(avg) + '|' + str(m_count) + '|' + str(day) + '|' + country.encode('utf-8') + '|' + str(d_count) + '\n' f.write(line) f.close() cursor.close() return
def write_lmrtt_isp_averages(): cursor = get_dict_cursor() filename = settings.PROJECT_ROOT + '/summary/measurements/lmrtt_averages/isp' f = open(filename, 'w') SQL = "SELECT \ geoip_country AS country, \ geoip_city AS city, \ geoip_isp AS isp, \ m_lmrtt.eventstamp::date AS day, \ count(distinct m_lmrtt.deviceid) AS ndevices, \ count(*) AS nmeasurements, \ avg(m_lmrtt.average) AS latency \ FROM m_lmrtt \ JOIN devicedetails AS d ON d.deviceid = m_lmrtt.deviceid \ WHERE m_lmrtt.average>0 AND m_lmrtt.average<3000 AND geoip_isp!='' AND geoip_country!='' AND geoip_city!='' \ GROUP BY day, d.geoip_isp, d.geoip_country, d.geoip_city;" cursor.execute(SQL) records = cursor.fetchall() for r in records: avg = r['latency'] m_count = r['nmeasurements'] day = datetime_helper.datetime_to_JSON(r['day']) isp = r['isp'] country = r['country'] city = r['city'] d_count = r['ndevices'] line = str(avg) + '|' + str(m_count) + '|' + str(day) + '|' + isp.encode('utf-8') + '|' + str(d_count) + '|' + country.encode('utf-8') + '|' + city.encode('utf-8') + '\n' f.write(line) f.close() cursor.close() return
def write_bitrate_city_averages(): cursor = get_dict_cursor() filename = settings.PROJECT_ROOT + '/summary/measurements/bitrate_averages/city' f = open(filename, 'w') SQL = "SELECT \ geoip_isp AS isp, \ direction AS dir,\ geoip_city AS city, \ m_bitrate.eventstamp::date AS day, \ count(distinct m_bitrate.deviceid) AS ndevices, \ count(*) AS nmeasurements, \ avg(m_bitrate.average) AS bitrate \ FROM m_bitrate \ JOIN devicedetails AS d ON d.deviceid = m_bitrate.deviceid \ WHERE m_bitrate.average>0 AND geoip_city!='' AND toolid='NETPERF_3' AND geoip_isp!='' \ GROUP BY day, d.geoip_city, d.geoip_isp, dir;" cursor.execute(SQL) records = cursor.fetchall() for r in records: try: avg = r['bitrate'] m_count = r['nmeasurements'] day = datetime_helper.datetime_to_JSON(r['day']) city = r['city'] isp = r['isp'] d_count = r['ndevices'] dir = r['dir'] line = str(avg) + '|' + str(m_count) + '|' + str(day) + '|' + city.encode('utf-8') + '|' + str(d_count) + '|' + dir + '|' + isp.encode('utf-8') + '\n' f.write(line) except: continue f.close() cursor.close() return
def write_rtt_country_averages(): dstip='8.8.8.8' cursor = get_dict_cursor() filename = settings.PROJECT_ROOT + '/summary/measurements/rtt_averages/country' f = open(filename, 'w') params = [] params.append(dstip) SQL = "SELECT \ geoip_isp AS isp, \ geoip_country AS country, \ m_rtt.eventstamp::date AS day, \ count(distinct m_rtt.deviceid) AS ndevices, \ count(*) AS nmeasurements, \ avg(m_rtt.average) AS latency \ FROM m_rtt \ JOIN devicedetails AS d ON d.deviceid = m_rtt.deviceid \ WHERE m_rtt.dstip = %s AND m_rtt.average>0 AND m_rtt.average<3000 AND geoip_country!='' AND geoip_isp != '' \ GROUP BY day, d.geoip_country, d.geoip_isp;" cursor.execute(SQL,params) records = cursor.fetchall() for r in records: avg = r['latency'] m_count = r['nmeasurements'] day = datetime_helper.datetime_to_JSON(r['day']) country = r['country'] isp = r['isp'] d_count = r['ndevices'] line = str(avg) + '|' + str(m_count) + '|' + str(day) + '|' + country + '|' + str(d_count) + '|' + isp + '\n' f.write(line) f.close() cursor.close() return
def write_devices(): devices = Devicedetails.objects.all() filename = settings.PROJECT_ROOT + '/summary/device_data/devices' cursor = get_dict_cursor() file = open(filename, 'w') for d in devices: SQL = '' last = '' params = [] active = 0 params.append(d.deviceid) active_thresh = datetime_helper.get_daterange_start(7) recent_measurement_count = MBitrate.objects.filter( deviceid=d.deviceid, eventstamp__gte=active_thresh).count() if recent_measurement_count > 0: active = 1 SQL = "SELECT \ deviceid, eventstamp, geoip_city, geoip_country, geoip_isp \ FROM devicedetails \ WHERE deviceid=%s" cursor.execute(SQL, params) rec = cursor.fetchone() id = d.deviceid if id == None: id = 'none' eventstamp = datetime_helper.datetime_to_JSON(rec['eventstamp']) city = rec['geoip_city'] if city == None: city = 'none' country = rec['geoip_country'] if country == None: country = 'none' isp = rec['geoip_isp'] if isp == None: isp = 'none' line = id + '|' + str(eventstamp) + '|' + city.encode( 'utf-8') + '|' + country.encode('utf-8') + '|' + isp.encode( 'utf-8') + '|' + str(active) + '\n' file.write(line) file.close() cursor.close() return
def write_devices(): devices = Devicedetails.objects.all() filename = settings.PROJECT_ROOT + '/summary/device_data/devices' cursor = get_dict_cursor() file = open(filename, 'w') for d in devices: SQL = '' last = '' params = [] active = 0 params.append(d.deviceid) active_thresh = datetime_helper.get_daterange_start(7) recent_measurement_count = MBitrate.objects.filter(deviceid=d.deviceid,eventstamp__gte=active_thresh).count() if recent_measurement_count>0: active=1 SQL = "SELECT \ deviceid, eventstamp, geoip_city, geoip_country, geoip_isp \ FROM devicedetails \ WHERE deviceid=%s" cursor.execute(SQL,params) rec = cursor.fetchone() id = d.deviceid if id==None: id='none' eventstamp = datetime_helper.datetime_to_JSON(rec['eventstamp']) city = rec['geoip_city'] if city==None: city='none' country = rec['geoip_country'] if country==None: country='none' isp = rec['geoip_isp'] if isp==None: isp='none' line = id + '|' + str(eventstamp) + '|' + city.encode('utf-8') + '|' + country.encode('utf-8') + '|' + isp.encode('utf-8') + '|' + str(active) + '\n' file.write(line) file.close() cursor.close() return
def write_bitrate_city_averages(): cursor = get_dict_cursor() filename = settings.PROJECT_ROOT + '/summary/measurements/bitrate_averages/city' f = open(filename, 'w') SQL = "SELECT \ geoip_isp AS isp, \ direction AS dir,\ geoip_city AS city, \ m_bitrate.eventstamp::date AS day, \ count(distinct m_bitrate.deviceid) AS ndevices, \ count(*) AS nmeasurements, \ avg(m_bitrate.average) AS bitrate \ FROM m_bitrate \ JOIN devicedetails AS d ON d.deviceid = m_bitrate.deviceid \ WHERE m_bitrate.average>0 AND geoip_city!='' AND toolid='NETPERF_3' AND geoip_isp!='' \ GROUP BY day, d.geoip_city, d.geoip_isp, dir;" cursor.execute(SQL) records = cursor.fetchall() for r in records: try: avg = r['bitrate'] m_count = r['nmeasurements'] day = datetime_helper.datetime_to_JSON(r['day']) city = r['city'] isp = r['isp'] d_count = r['ndevices'] dir = r['dir'] line = str(avg) + '|' + str(m_count) + '|' + str( day) + '|' + city.encode('utf-8') + '|' + str( d_count) + '|' + dir + '|' + isp.encode('utf-8') + '\n' f.write(line) except: continue f.close() cursor.close() return