def prepare_raw_data(yaml_settings_path, time_start, time_end, cc): with open(yaml_settings_path, 'r') as fh: yaml_settings = yaml.safe_load(fh) # construct time clause after 'WHERE' time_clause = create_time_clause(time_start, time_end) # create a client connected to InfluxDB influx_client = connect_to_influxdb(yaml_settings) # perform queries in InfluxDB video_sent_query = 'SELECT * FROM video_sent' if time_clause is not None: video_sent_query += ' WHERE ' + time_clause video_sent_results = influx_client.query(video_sent_query) if not video_sent_results: sys.exit('Error: no results returned from query: ' + video_sent_query) video_acked_query = 'SELECT * FROM video_acked' if time_clause is not None: video_acked_query += ' WHERE ' + time_clause video_acked_results = influx_client.query(video_acked_query) if not video_acked_results: sys.exit('Error: no results returned from query: ' + video_acked_query) # create a client connected to Postgres postgres_client = connect_to_postgres(yaml_settings) postgres_cursor = postgres_client.cursor() # calculate chunk transmission times ret = calculate_trans_times(video_sent_results, video_acked_results, cc, postgres_cursor) postgres_cursor.close() return ret
def main(): parser = argparse.ArgumentParser() parser.add_argument('yaml_settings') parser.add_argument('-o', metavar='OUTPUT', help='output path (default: expt_cache.json)') args = parser.parse_args() with open(args.yaml_settings, 'r') as fh: yaml_settings = yaml.safe_load(fh) output_path = args.o if args.o else 'expt_cache.json' # create a Postgres client and perform queries postgres_client = connect_to_postgres(yaml_settings) postgres_cursor = postgres_client.cursor() postgres_cursor.execute('SELECT * FROM puffer_experiment;') rows = postgres_cursor.fetchall() postgres_cursor.close() # read and dump expt_cache into a JSON file expt_cache = {} for row in rows: expt_id = row[0] # expt_config_hash = row[1] expt_config = row[2] if expt_id not in expt_cache: expt_cache[expt_id] = expt_config else: sys.exit('expt_id {} already exists'.format(expt_id)) with open(output_path, 'w') as fh: json.dump(expt_cache, fh) sys.stderr.write('Saved to {}\n'.format(output_path))
def main(): parser = argparse.ArgumentParser() parser.add_argument('yaml_settings') parser.add_argument('-o', '--output', required=True) parser.add_argument('-d', '--days', type=int, default=1) args = parser.parse_args() output = args.output days = args.days if days < 1: sys.exit('-d/--days must be a positive integer') with open(args.yaml_settings, 'r') as fh: yaml_settings = yaml.safe_load(fh) # create an InfluxDB client and perform queries influx_client = connect_to_influxdb(yaml_settings) # query video_acked and client_buffer video_acked_results = influx_client.query( 'SELECT * FROM video_acked WHERE time >= now() - {}d'.format(days)) client_buffer_results = influx_client.query( 'SELECT * FROM client_buffer WHERE time >= now() - {}d'.format(days)) # cache of Postgres data: experiment 'id' -> json 'data' of the experiment expt_id_cache = {} # create a Postgres client and perform queries postgres_client = connect_to_postgres(yaml_settings) postgres_cursor = postgres_client.cursor() # collect ssim and rebuffer ssim = collect_ssim(video_acked_results, expt_id_cache, postgres_cursor) buffer_data = collect_buffer_data(client_buffer_results) rebuffer = calculate_rebuffer_by_abr_cc(buffer_data, expt_id_cache, postgres_cursor) if not ssim or not rebuffer: sys.exit('Error: no data found in the queried range') # plot ssim vs rebuffer plot_ssim_rebuffer(ssim, rebuffer, output, days) postgres_cursor.close()
def main(): parser = argparse.ArgumentParser() parser.add_argument('yaml_settings') parser.add_argument('--from', dest='start_time', required=True, help='datetime in UTC conforming to RFC3339') parser.add_argument('--to', dest='end_time', required=True, help='datetime in UTC conforming to RFC3339') parser.add_argument('--expt', help='e.g., expt_cache.json') parser.add_argument('-o', '--output', required=True) global args args = parser.parse_args() with open(args.yaml_settings, 'r') as fh: yaml_settings = yaml.safe_load(fh) if args.expt is not None: with open(args.expt, 'r') as fh: global expt expt = json.load(fh) else: # create a Postgres client and perform queries postgres_client = connect_to_postgres(yaml_settings) global postgres_cursor postgres_cursor = postgres_client.cursor() # create an InfluxDB client and perform queries global influx_client influx_client = connect_to_influxdb(yaml_settings) # collect ssim and rebuffer ssim = collect_ssim() rebuffer = collect_rebuffer() if not ssim or not rebuffer: sys.exit('Error: no data found in the queried range') print(ssim) print(rebuffer) # plot ssim vs rebuffer plot_ssim_rebuffer(ssim, rebuffer) if postgres_cursor: postgres_cursor.close()
def main(): parser = argparse.ArgumentParser() parser.add_argument('yaml_settings') parser.add_argument('--from', dest='time_start', help='datetime in UTC conforming to RFC3339') parser.add_argument('--to', dest='time_end', help='datetime in UTC conforming to RFC3339') parser.add_argument('-o', '--output', required=True) args = parser.parse_args() output = args.output with open(args.yaml_settings, 'r') as fh: yaml_settings = yaml.safe_load(fh) # create an InfluxDB client and perform queries influx_client = connect_to_influxdb(yaml_settings) # cache of Postgres data: experiment 'id' -> json 'data' of the experiment expt_id_cache = {} # create a Postgres client and perform queries postgres_client = connect_to_postgres(yaml_settings) postgres_cursor = postgres_client.cursor() # collect ssim and rebuffer ssim = collect_ssim(influx_client, expt_id_cache, postgres_cursor, args) rebuffer = collect_rebuffer(influx_client, expt_id_cache, postgres_cursor, args) if not ssim or not rebuffer: sys.exit('Error: no data found in the queried range') # plot ssim vs rebuffer plot_ssim_rebuffer(ssim, rebuffer, output, args) postgres_cursor.close()
def main(): parser = argparse.ArgumentParser() parser.add_argument('yaml_settings') args = parser.parse_args() time = datetime.utcnow() with open(args.yaml_settings, 'r') as fh: yaml_settings = yaml.safe_load(fh) options = Options() options.set_headless(headless=True) driver = webdriver.Firefox(firefox_options=options) driver.implicitly_wait(150) try: driver.get("https://puffer.stanford.edu/grafana/login/") driver.find_element_by_name("username").click() driver.find_element_by_name("username").clear() driver.find_element_by_name("username").send_keys("puffer") driver.find_element_by_id("inputPassword").click() driver.find_element_by_id("inputPassword").clear() driver.find_element_by_id("inputPassword").send_keys(GRAFANA_PWD) xpath = ("(.//*[normalize-space(text()) and normalize-space(.)" "='Help'])[1]/following::button[1]") driver.find_element_by_xpath(xpath).click() xpath = ("(.//*[normalize-space(text()) and normalize-space(.)" "='Help'])[1]/following::i[5]") driver.find_element_by_xpath(xpath).click() driver.find_element_by_link_text("Snapshot").click() xpath = ("(.//*[normalize-space(text()) and normalize-space(.)" "='Expire'])[1]/following::select[1]") driver.find_element_by_xpath(xpath).click() xpath = ("(.//*[normalize-space(text()) and normalize-space(.)" "='Expire'])[1]/following::select[1]") Select(driver.find_element_by_xpath(xpath)).select_by_visible_text( "1 Hour") xpath = ("(.//*[normalize-space(text()) and normalize-space(.)" "='Expire'])[1]/following::select[1]") driver.find_element_by_xpath(xpath).click() # begin code to set timeout xpath = ("(.//*[normalize-space(text()) and normalize-space(.)" "='Timeout (seconds)'])[1]/following::input[1]") driver.find_element_by_xpath(xpath).click() xpath = ("(.//*[normalize-space(text()) and normalize-space(.)" "='Timeout (seconds)'])[1]/following::input[1]") driver.find_element_by_xpath(xpath).clear() xpath = ("(.//*[normalize-space(text()) and normalize-space(.)" "='Timeout (seconds)'])[1]/following::input[1]") driver.find_element_by_xpath(xpath).send_keys("60") # end code to set timeout xpath = ("(.//*[normalize-space(text()) and normalize-space(.)" "='Timeout (seconds)'])[1]/following::button[1]") driver.find_element_by_xpath(xpath).click() prefix = "https://puffer.stanford.edu/grafana/dashboard/snapshot/" snapshot_url = driver.find_element_by_partial_link_text(prefix).text sys.stderr.write("Generated snapshot: {}\n".format(snapshot_url)) driver.quit() except NoSuchElementException: driver.quit() sys.exit("Error generating snapshot") # Now, add this link to postgres, and delete old links from the table conn = None try: conn = connect_to_postgres(yaml_settings) cur = conn.cursor() add_snap_cmd = ("INSERT INTO puffer_grafanasnapshot " "(url, created_on) VALUES (%s, %s)") cur.execute(add_snap_cmd, (snapshot_url, time)) del_old_snap_cmd = "DELETE FROM puffer_grafanasnapshot WHERE (url) != (%s)" cur.execute(del_old_snap_cmd, (snapshot_url, )) conn.commit() cur.close() except: sys.stderr.write("Failed to post data to PostgreSQL\n") finally: if conn is not None: conn.close()