Ejemplo n.º 1
0
def prepare_raw_data(yaml_settings_path, time_start, time_end, cc):
    with open(yaml_settings_path, 'r') as fh:
        yaml_settings = yaml.safe_load(fh)

    # construct time clause after 'WHERE'
    time_clause = create_time_clause(time_start, time_end)

    # create a client connected to InfluxDB
    influx_client = connect_to_influxdb(yaml_settings)

    # perform queries in InfluxDB
    video_sent_query = 'SELECT * FROM video_sent'
    if time_clause is not None:
        video_sent_query += ' WHERE ' + time_clause
    video_sent_results = influx_client.query(video_sent_query)
    if not video_sent_results:
        sys.exit('Error: no results returned from query: ' + video_sent_query)

    video_acked_query = 'SELECT * FROM video_acked'
    if time_clause is not None:
        video_acked_query += ' WHERE ' + time_clause
    video_acked_results = influx_client.query(video_acked_query)
    if not video_acked_results:
        sys.exit('Error: no results returned from query: ' + video_acked_query)

    # create a client connected to Postgres
    postgres_client = connect_to_postgres(yaml_settings)
    postgres_cursor = postgres_client.cursor()

    # calculate chunk transmission times
    ret = calculate_trans_times(video_sent_results, video_acked_results, cc,
                                postgres_cursor)

    postgres_cursor.close()
    return ret
Ejemplo n.º 2
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('yaml_settings')
    parser.add_argument('--from', required=True, dest='start_date',
                        help='e.g., "2019-04-03" ({} AM in UTC)'.format(backup_hour))
    parser.add_argument('--to', required=True, dest='end_date',
                        help='e.g., "2019-04-05" ({} AM in UTC)'.format(backup_hour))
    parser.add_argument('--allow-skipping', action='store_true',
                        help='allow skipping a day if data already exist in InfluxDB')
    parser.add_argument('--force', action='store_true',
                        help='force restoring data even if data exist')
    parser.add_argument('--dry-run', action='store_true',
                        help='only check and print the status of InfluxDB')
    global args
    args = parser.parse_args()

    with open(args.yaml_settings, 'r') as fh:
        yaml_settings = yaml.safe_load(fh)

    global DST_DB
    DST_DB = yaml_settings['influxdb_connection']['dbname']

    # connect to InfluxDB
    influx_client = connect_to_influxdb(yaml_settings)
    sanity_check_influxdb(influx_client)

    # parse input dates and get valid files to restore
    start_date = args.start_date + 'T{}'.format(backup_hour)
    end_date = args.end_date + 'T{}'.format(backup_hour)
    files_to_restore = get_files_to_restore(start_date, end_date, influx_client)

    if not args.dry_run:
        for f in files_to_restore:
            restore(f, influx_client)
Ejemplo n.º 3
0
def send_to_influx(status, yaml_settings):
    curr_ts = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ')
    json_body = []

    for k, v in status.items():
        if 'channel' not in v or not v['channel']:
            continue

        snr = v['snr'] if 'snr' in v else 0.0
        selected_rate = v['selected_rate'] if 'selected_rate' in v else 0.0

        json_body.append({
            'time': curr_ts,
            'measurement': 'channel_status',
            'tags': {
                'channel': v['channel']
            },
            'fields': {
                'snr': snr,
                'selected_rate': selected_rate
            }
        })

        sys.stderr.write('channel {}, SNR {}, bitrate {}\n'.format(
            v['channel'], snr, selected_rate))

    client = connect_to_influxdb(yaml_settings)
    client.write_points(json_body, time_precision='ms')
Ejemplo n.º 4
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('yaml_settings')
    parser.add_argument('-o', '--output', required=True)
    parser.add_argument('-d', '--days', type=int, default=1)
    args = parser.parse_args()
    output = args.output
    days = args.days

    if days < 1:
        sys.exit('-d/--days must be a positive integer')

    with open(args.yaml_settings, 'r') as fh:
        yaml_settings = yaml.safe_load(fh)

    # create an InfluxDB client and perform queries
    influx_client = connect_to_influxdb(yaml_settings)

    # query video_acked and client_buffer
    video_acked_results = influx_client.query(
        'SELECT * FROM video_acked WHERE time >= now() - {}d'.format(days))
    client_buffer_results = influx_client.query(
        'SELECT * FROM client_buffer WHERE time >= now() - {}d'.format(days))

    # cache of Postgres data: experiment 'id' -> json 'data' of the experiment
    expt_id_cache = {}

    # create a Postgres client and perform queries
    postgres_client = connect_to_postgres(yaml_settings)
    postgres_cursor = postgres_client.cursor()

    # collect ssim and rebuffer
    ssim = collect_ssim(video_acked_results, expt_id_cache, postgres_cursor)

    buffer_data = collect_buffer_data(client_buffer_results)
    rebuffer = calculate_rebuffer_by_abr_cc(buffer_data, expt_id_cache,
                                            postgres_cursor)

    if not ssim or not rebuffer:
        sys.exit('Error: no data found in the queried range')

    # plot ssim vs rebuffer
    plot_ssim_rebuffer(ssim, rebuffer, output, days)

    postgres_cursor.close()
Ejemplo n.º 5
0
def send_to_influx(status, yaml_settings):
    curr_time = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')

    json_body = []
    for k, v in status.items():
        json_body.append({
          'measurement': 'channel_status',
          'tags': {'channel': v['channel']},
          'time': curr_time,
          'fields': {'snr': v['snr'],
                     'selected_rate': v['selected_rate']}
        })

        sys.stderr.write('channel {}, SNR {}, bitrate {}\n'.format(
            v['channel'], v['snr'], v['selected_rate']))

    client = connect_to_influxdb(yaml_settings)
    client.write_points(json_body, time_precision='s', database='puffer')
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('yaml_settings')
    parser.add_argument('--from', dest='start_time', required=True,
                        help='datetime in UTC conforming to RFC3339')
    parser.add_argument('--to', dest='end_time', required=True,
                        help='datetime in UTC conforming to RFC3339')
    parser.add_argument('--expt', help='e.g., expt_cache.json')
    parser.add_argument('-o', '--output', required=True)
    global args
    args = parser.parse_args()

    with open(args.yaml_settings, 'r') as fh:
        yaml_settings = yaml.safe_load(fh)

    if args.expt is not None:
        with open(args.expt, 'r') as fh:
            global expt
            expt = json.load(fh)
    else:
        # create a Postgres client and perform queries
        postgres_client = connect_to_postgres(yaml_settings)
        global postgres_cursor
        postgres_cursor = postgres_client.cursor()

    # create an InfluxDB client and perform queries
    global influx_client
    influx_client = connect_to_influxdb(yaml_settings)

    # collect ssim and rebuffer
    ssim = collect_ssim()
    rebuffer = collect_rebuffer()

    if not ssim or not rebuffer:
        sys.exit('Error: no data found in the queried range')

    print(ssim)
    print(rebuffer)

    # plot ssim vs rebuffer
    plot_ssim_rebuffer(ssim, rebuffer)

    if postgres_cursor:
        postgres_cursor.close()
Ejemplo n.º 7
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('yaml_settings')
    parser.add_argument('--from',
                        dest='start_time',
                        help='datetime in UTC conforming to RFC3339')
    parser.add_argument('--to',
                        dest='end_time',
                        help='datetime in UTC conforming to RFC3339')
    global args
    args = parser.parse_args()

    with open(args.yaml_settings, 'r') as fh:
        yaml_settings = yaml.safe_load(fh)

    # create an InfluxDB client and perform queries
    influx_client = connect_to_influxdb(yaml_settings)

    count_hours_users(influx_client)
Ejemplo n.º 8
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('yaml_settings')
    parser.add_argument(
        '--from',
        required=True,
        dest='start_date',
        help='e.g., "2019-04-03" ({} AM in UTC)'.format(backup_hour))
    parser.add_argument(
        '--to',
        required=True,
        dest='end_date',
        help='e.g., "2019-04-05" ({} AM in UTC)'.format(backup_hour))
    args = parser.parse_args()

    with open(args.yaml_settings, 'r') as fh:
        yaml_settings = yaml.safe_load(fh)

    # connect to InfluxDB
    influx_client = connect_to_influxdb(yaml_settings)

    start_date = args.start_date + 'T{}'.format(backup_hour)
    end_date = args.end_date + 'T{}'.format(backup_hour)

    start_date = datetime.strptime(start_date, date_format)
    end_date = datetime.strptime(end_date, date_format)

    if end_date <= start_date:
        sys.exit('END_DATE precedes START_DATE')

    s = start_date
    while True:
        e = s + timedelta(days=1)
        if e > end_date:
            break

        # convert each day of data
        convert(s, e, influx_client)

        s = e
Ejemplo n.º 9
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('yaml_settings')
    parser.add_argument('--from',
                        dest='time_start',
                        help='datetime in UTC conforming to RFC3339')
    parser.add_argument('--to',
                        dest='time_end',
                        help='datetime in UTC conforming to RFC3339')
    parser.add_argument('-o', '--output', required=True)
    args = parser.parse_args()
    output = args.output

    with open(args.yaml_settings, 'r') as fh:
        yaml_settings = yaml.safe_load(fh)

    # create an InfluxDB client and perform queries
    influx_client = connect_to_influxdb(yaml_settings)

    # cache of Postgres data: experiment 'id' -> json 'data' of the experiment
    expt_id_cache = {}

    # create a Postgres client and perform queries
    postgres_client = connect_to_postgres(yaml_settings)
    postgres_cursor = postgres_client.cursor()

    # collect ssim and rebuffer
    ssim = collect_ssim(influx_client, expt_id_cache, postgres_cursor, args)
    rebuffer = collect_rebuffer(influx_client, expt_id_cache, postgres_cursor,
                                args)

    if not ssim or not rebuffer:
        sys.exit('Error: no data found in the queried range')

    # plot ssim vs rebuffer
    plot_ssim_rebuffer(ssim, rebuffer, output, args)

    postgres_cursor.close()
Ejemplo n.º 10
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('yaml_settings')
    parser.add_argument('file_to_restore',
                        help='e.g., 2018-12-04T11_2018-12-05T11.tar.gz')
    args = parser.parse_args()

    file_to_restore = args.file_to_restore
    filename = file_to_restore[:file_to_restore.index('.')]

    with open(args.yaml_settings, 'r') as fh:
        yaml_settings = yaml.safe_load(fh)

    # connect to InfluxDB
    influx_client = connect_to_influxdb(yaml_settings)

    # sanity checks
    sanity_check_influxdb(influx_client)

    # download data from Google cloud
    download_untar(file_to_restore)

    # restore InfluxDB data
    restore(filename, influx_client)