def do_process(self, influx_client, s_str, e_str): gen = {} gen['video_sent'] = query_measurement(influx_client, 'video_sent', s_str, e_str)['video_sent'] gen['video_acked'] = query_measurement(influx_client, 'video_acked', s_str, e_str)['video_acked'] it = {} for measurement in ['video_sent', 'video_acked']: it[measurement] = next(gen[measurement], None) while True: next_ts = None next_measurement = None for measurement in ['video_sent', 'video_acked']: item = it[measurement] if not item: continue ts = np.datetime64(item['time']) if next_ts is None or ts < next_ts: next_ts = ts next_measurement = measurement if next_measurement is None: break self.video_stream.add_data_point(it[next_measurement], next_measurement) it[next_measurement] = next(gen[next_measurement], None) self.process_expired_sessions()
def do_process(self, influx_client, s_str, e_str): # process the data point with smaller ts from two lists of results gen = {} gen['video_sent'] = query_measurement(influx_client, 'video_sent', s_str, e_str)['video_sent'] gen['video_acked'] = query_measurement(influx_client, 'video_acked', s_str, e_str)['video_acked'] # maintain iterators for the two lists it = {} for measurement in ['video_sent', 'video_acked']: it[measurement] = next(gen[measurement], None) while True: next_ts = None next_measurement = None for measurement in ['video_sent', 'video_acked']: item = it[measurement] if not item: continue ts = np.datetime64(item['time']) if next_ts is None or ts < next_ts: next_ts = ts next_measurement = measurement if next_measurement is None: # no data left in either list break self.add_data_point(it[next_measurement], next_measurement) it[next_measurement] = next(gen[next_measurement], None) self.process_expired_sessions()
def do_count_hours_users(influx_client, s_str, e_str, state): video_acked_results = query_measurement(influx_client, 'video_acked', s_str, e_str) for pt in video_acked_results['video_acked']: state['num_chunk'] += 1 state['distinct_users'].add(pt['user'])
def collect_rebuffer(influx_client, expt_id_cache, postgres_cursor, args): client_buffer_results = query_measurement(influx_client, 'client_buffer', args.time_start, args.time_end) buffer_data = collect_buffer_data(client_buffer_results) return calculate_rebuffer_by_abr_cc(buffer_data, expt_id_cache, postgres_cursor)
def do_process(self, influx_client, s_str, e_str): client_buffer_results = query_measurement(influx_client, 'client_buffer', s_str, e_str)['client_buffer'] for pt in client_buffer_results: self.add_data_point(pt) self.process_expired_sessions()
def do_collect_rebuffer(s_str, e_str, buffer_stream): sys.stderr.write('Processing client_buffer data between {} and {}\n' .format(s_str, e_str)) sys.stderr.flush() client_buffer_results = query_measurement(influx_client, 'client_buffer', s_str, e_str) for pt in client_buffer_results['client_buffer']: buffer_stream.add_data_point(pt)
def do_collect_ssim(s_str, e_str, d): sys.stderr.write('Processing video_acked data between {} and {}\n' .format(s_str, e_str)) video_acked_results = query_measurement( influx_client, 'video_acked', s_str, e_str)['video_acked'] for pt in video_acked_results: expt_id = str(pt['expt_id']) expt_config = retrieve_expt_config(expt_id, expt, postgres_cursor) abr_cc = get_abr_cc(expt_config) if abr_cc not in d: d[abr_cc] = [0.0, 0] # sum, count ssim_index = get_ssim_index(pt) if ssim_index is not None and ssim_index != 1: d[abr_cc][0] += ssim_index d[abr_cc][1] += 1
def collect_ssim(influx_client, expt_id_cache, postgres_cursor, args): video_acked_results = query_measurement(influx_client, 'video_acked', args.time_start, args.time_end) return do_collect_ssim(video_acked_results, expt_id_cache, postgres_cursor)