Exemplo n.º 1
0
def test_generate_single_success_noscale(dotsbws_success_result, caplog,
                                         parser, capfd):
    dotsbws = dotsbws_success_result
    args = parser.parse_args(
        '-d {} --log-level DEBUG generate --output /dev/stdout'.format(
            dotsbws.name).split())
    conf = get_config(args)
    sbws.core.generate.main(args, conf)
    dd = conf['paths']['datadir']
    # Here results is a dict
    results = load_recent_results_in_datadir(1, dd, success_only=False)
    assert isinstance(results, dict)
    res_len = sum([len(results[fp]) for fp in results])
    assert res_len == 1, 'There should be one result in the datadir'
    # And here we change it to a list
    results = [r for fp in results for r in results[fp]]
    result = results[0]
    assert isinstance(result, ResultSuccess), 'The one existing result '\
        'should be a success'
    captured = capfd.readouterr()
    stdout_lines = captured.out.strip().split('\n')
    assert len(stdout_lines) == 1 + NUM_LINES_HEADER

    bw = round(
        median(
            [dl['amount'] / dl['duration'] / 1024 for dl in result.downloads]))
    rtt = median([round(r * 1000) for r in result.rtts])
    bw_line = 'node_id=${} bw={} nick={} rtt={} time={}'.format(
        result.fingerprint, bw, result.nickname, rtt, round(result.time))
    assert stdout_lines[NUM_LINES_HEADER] == bw_line
Exemplo n.º 2
0
def main(args, conf):
    os.makedirs(conf.getpath('paths', 'v3bw_dname'), exist_ok=True)

    datadir = conf.getpath('paths', 'datadir')
    if not os.path.isdir(datadir):
        fail_hard('%s does not exist', datadir)
    if args.scale_constant < 1:
        fail_hard('--scale-constant must be positive')
    if args.torflow_bw_margin < 0:
        fail_hard('toflow-bw-margin must be major than 0.')
    if args.scale_sbws:
        scaling_method = SBWS_SCALING
    elif args.raw:
        scaling_method = None
    else:
        # sbws will scale as torflow until we have a better algorithm for
        # scaling (#XXX)
        scaling_method = TORFLOW_SCALING
    if args.secs_recent:
        fresh_days = ceil(args.secs_recent / 24 / 60 / 60)
    else:
        fresh_days = conf.getint('general', 'data_period')
    reset_bw_ipv4_changes = conf.getboolean('general', 'reset_bw_ipv4_changes')
    reset_bw_ipv6_changes = conf.getboolean('general', 'reset_bw_ipv6_changes')
    results = load_recent_results_in_datadir(
        fresh_days,
        datadir,
        on_changed_ipv4=reset_bw_ipv4_changes,
        on_changed_ipv6=reset_bw_ipv6_changes)
    if len(results) < 1:
        log.warning('No recent results, so not generating anything. (Have you '
                    'ran sbws scanner recently?)')
        return
    state_fpath = conf.getpath('paths', 'state_fname')
    consensus_path = os.path.join(conf.getpath('tor', 'datadir'),
                                  "cached-consensus")
    # Accept None as scanner_country to be compatible with older versions.
    scanner_country = conf['scanner'].get('country')
    destinations_countries = destination.parse_destinations_countries(conf)
    bw_file = V3BWFile.from_results(results,
                                    scanner_country,
                                    destinations_countries,
                                    state_fpath,
                                    args.scale_constant,
                                    scaling_method,
                                    torflow_cap=args.torflow_bw_margin,
                                    round_digs=args.round_digs,
                                    secs_recent=args.secs_recent,
                                    secs_away=args.secs_away,
                                    min_num=args.min_num,
                                    consensus_path=consensus_path)

    output = args.output or \
        conf.getpath('paths', 'v3bw_fname').format(now_fname())
    bw_file.write(output)
    bw_file.info_stats
Exemplo n.º 3
0
def test_generate_two_relays_success_noscale(dotsbws_success_result_two_relays,
                                             parser, capfd):
    dotsbws = dotsbws_success_result_two_relays
    args = parser.parse_args(
        '-d {} --log-level DEBUG generate --output /dev/stdout'.format(
            dotsbws.name).split())
    conf = get_config(args)
    sbws.core.generate.main(args, conf)
    dd = conf['paths']['datadir']
    # Here results is a dict
    results = load_recent_results_in_datadir(1, dd, success_only=False)
    assert isinstance(results, dict)
    res_len = sum([len(results[fp]) for fp in results])
    assert res_len == 4, 'There should be 4 results in the datadir'
    # And here we change it to a list
    results = [r for fp in results for r in results[fp]]
    for result in results:
        assert isinstance(result, ResultSuccess), 'All existing results '\
            'should be a success'
    captured = capfd.readouterr()
    stdout_lines = captured.out.strip().split('\n')
    assert len(stdout_lines) == 2 + NUM_LINES_HEADER

    r1_results = [r for r in results if r.fingerprint == 'A' * 40]
    r1_time = round(max([r.time for r in r1_results]))
    r1_name = r1_results[0].nickname
    r1_fingerprint = r1_results[0].fingerprint
    r1_speeds = [
        dl['amount'] / dl['duration'] / 1024 for r in r1_results
        for dl in r.downloads
    ]
    r1_speed = round(median(r1_speeds))
    r1_rtt = round(
        median([round(rtt * 1000) for r in r1_results for rtt in r.rtts]))
    bw_line = 'node_id=${} bw={} nick={} rtt={} time={}'.format(
        r1_fingerprint, r1_speed, r1_name, r1_rtt, r1_time)
    assert stdout_lines[1 + NUM_LINES_HEADER] == bw_line

    r2_results = [r for r in results if r.fingerprint == 'B' * 40]
    r2_time = round(max([r.time for r in r2_results]))
    r2_name = r2_results[0].nickname
    r2_fingerprint = r2_results[0].fingerprint
    r2_speeds = [
        dl['amount'] / dl['duration'] / 1024 for r in r2_results
        for dl in r.downloads
    ]
    r2_speed = round(median(r2_speeds))
    r2_rtt = round(
        median([round(rtt * 1000) for r in r2_results for rtt in r.rtts]))
    bw_line = 'node_id=${} bw={} nick={} rtt={} time={}'.format(
        r2_fingerprint, r2_speed, r2_name, r2_rtt, r2_time)
    assert stdout_lines[NUM_LINES_HEADER] == bw_line
Exemplo n.º 4
0
def test_results(conf):
    results = load_recent_results_in_datadir(5, conf["paths"]["datadir"])
    for fp, values in results.items():
        count = max(
            [
                len(getattr(r, "relay_recent_measurement_attempt", []))
                for r in values
            ]
        )
        assert count == 1
        count = max(
            [len(getattr(r, "relay_in_recent_consensus", [])) for r in values]
        )
        assert count == 1
        count = max(
            [len(getattr(r, "relay_recent_priority_list", [])) for r in values]
        )
        assert count == 1
Exemplo n.º 5
0
def main(args, conf):
    os.makedirs(conf.getpath('paths', 'v3bw_dname'), exist_ok=True)

    datadir = conf.getpath('paths', 'datadir')
    if not os.path.isdir(datadir):
        fail_hard('%s does not exist', datadir)
    if args.scale_constant < 1:
        fail_hard('--scale-constant must be positive')
    if args.torflow_bw_margin < 0:
        fail_hard('toflow-bw-margin must be major than 0.')
    if args.scale_sbws:
        scaling_method = SBWS_SCALING
    elif args.raw:
        scaling_method = None
    else:
        scaling_method = TORFLOW_SCALING

    fresh_days = conf.getint('general', 'data_period')
    reset_bw_ipv4_changes = conf.getboolean('general', 'reset_bw_ipv4_changes')
    reset_bw_ipv6_changes = conf.getboolean('general', 'reset_bw_ipv6_changes')
    results = load_recent_results_in_datadir(
        fresh_days,
        datadir,
        success_only=True,
        on_changed_ipv4=reset_bw_ipv4_changes,
        on_changed_ipv6=reset_bw_ipv6_changes)
    if len(results) < 1:
        log.warning('No recent results, so not generating anything. (Have you '
                    'ran sbws scanner recently?)')
        return
    state_fpath = conf.getpath('paths', 'state_fname')
    bw_file = V3BWFile.from_results(results,
                                    state_fpath,
                                    args.scale_constant,
                                    scaling_method,
                                    torflow_cap=args.torflow_bw_margin,
                                    torflow_round_digs=args.torflow_round_digs,
                                    secs_recent=args.secs_recent,
                                    secs_away=args.secs_away,
                                    min_num=args.min_num)
    output = args.output or \
        conf.getpath('paths', 'v3bw_fname').format(now_fname())
    bw_file.write(output)
    bw_file.info_stats
Exemplo n.º 6
0
def main(args, conf):
    if not is_initted(args.directory):
        fail_hard('Sbws isn\'t initialized.  Try sbws init')

    datadir = conf['paths']['datadir']
    if not os.path.isdir(datadir):
        fail_hard('%s does not exist', datadir)
    if args.scale_constant < 1:
        fail_hard('--scale-constant must be positive')

    fresh_days = conf.getint('general', 'data_period')
    results = load_recent_results_in_datadir(fresh_days,
                                             datadir,
                                             success_only=True)
    if results:
        # Using naive datetime object without timezone, assumed utc
        # Not using .isoformat() since that does not include 'T'
        earliest_bandwidth = datetime.utcfromtimestamp(
                                min([r.time for fp in results
                                     for r in results[fp]])) \
                                .strftime(TIMESTAMP_DT_FRMT)
    if len(results) < 1:
        log.warning('No recent results, so not generating anything. (Have you '
                    'ran sbws scanner recently?)')
        return
    data_lines = [result_data_to_v3bw_line(results, fp) for fp in results]
    data_lines = sorted(data_lines, key=lambda d: d.bw, reverse=True)
    data_lines = scale_lines(args, data_lines)
    generator_started = read_started_ts(conf)
    if results:
        header = V3BwHeader(earliest_bandwidth=earliest_bandwidth,
                            generator_started=generator_started)
    else:
        header = V3BwHeader(generator_started=generator_started)
    log_stats(data_lines)
    output = conf['paths']['v3bw_fname']
    if args.output:
        output = args.output
    log.info('Writing v3bw file to %s', output)
    with open(output, 'wt') as fd:
        fd.write(str(header))
        for line in data_lines:
            fd.write('{}\n'.format(str(line)))
Exemplo n.º 7
0
def main(args, conf):
    '''
    Main entry point into the stats command.

    :param argparse.Namespace args: command line arguments
    :param configparser.ConfigParser conf: parsed config files
    '''

    datadir = conf.getpath('paths', 'datadir')
    if not os.path.isdir(datadir):
        fail_hard('%s does not exist', datadir)

    fresh_days = conf.getint('general', 'data_period')
    results = load_recent_results_in_datadir(fresh_days,
                                             datadir,
                                             success_only=False)
    if len(results) < 1:
        log.warning('No fresh results')
        return
    print_stats(args, results)