Ejemplo n.º 1
0
def test_cleanup_delete_barely_rotten(time_mock, empty_dotsbws_datadir, caplog,
                                      parser):
    caplog.set_level(logging.DEBUG)
    dotsbws = empty_dotsbws_datadir
    args = parser.parse_args('-d {} --log-level DEBUG cleanup'.format(
        dotsbws.name).split())
    conf = get_config(args)
    conf['general']['data_period'] = '1'
    conf['cleanup']['stale_days'] = '5'
    conf['cleanup']['rotten_days'] = '20'
    now = 1443571200  # 1,443,571,200 is 30 Sep 2015 00:00:00 UTC
    time_mock.side_effect = monotonic_time(start=now)
    dd = os.path.join(dotsbws.name, 'datadir')
    fname_rotten1 = os.path.join(dd, '2015-09-09.txt')
    fname_rotten2 = os.path.join(dd, '2015-09-10.txt')
    fname_leave = os.path.join(dd, '2015-09-11.txt')
    touch_file(fname_rotten1)
    touch_file(fname_rotten2)
    touch_file(fname_leave)
    sbws.core.cleanup.main(args, conf)
    expected_fnames = [
        fname_leave + '.gz',
        os.path.join(dd, '.lockfile'),
    ]
    existing_fnames = []
    for root, dirs, files in os.walk(dd):
        for fname in files:
            existing_fnames.append(os.path.join(root, fname))
    expected_fnames.sort()
    existing_fnames.sort()
    assert expected_fnames == existing_fnames
Ejemplo n.º 2
0
def test_generate_single_success_noscale(dotsbws_success_result, caplog,
                                         parser, capfd):
    dotsbws = dotsbws_success_result
    args = parser.parse_args(
        '-d {} --log-level DEBUG generate --output /dev/stdout'.format(
            dotsbws.name).split())
    conf = get_config(args)
    sbws.core.generate.main(args, conf)
    dd = conf['paths']['datadir']
    # Here results is a dict
    results = load_recent_results_in_datadir(1, dd, success_only=False)
    assert isinstance(results, dict)
    res_len = sum([len(results[fp]) for fp in results])
    assert res_len == 1, 'There should be one result in the datadir'
    # And here we change it to a list
    results = [r for fp in results for r in results[fp]]
    result = results[0]
    assert isinstance(result, ResultSuccess), 'The one existing result '\
        'should be a success'
    captured = capfd.readouterr()
    stdout_lines = captured.out.strip().split('\n')
    assert len(stdout_lines) == 1 + NUM_LINES_HEADER

    bw = round(
        median(
            [dl['amount'] / dl['duration'] / 1024 for dl in result.downloads]))
    rtt = median([round(r * 1000) for r in result.rtts])
    bw_line = 'node_id=${} bw={} nick={} rtt={} time={}'.format(
        result.fingerprint, bw, result.nickname, rtt, round(result.time))
    assert stdout_lines[NUM_LINES_HEADER] == bw_line
Ejemplo n.º 3
0
def test_stats_fresh_result(tmpdir, capsys, caplog):
    '''
    An initialized .sbws directory with a fresh error result should have some
    boring stats and exit cleanly
    '''
    caplog.set_level(logging.DEBUG)
    init_directory(tmpdir)
    add_single_fresh_result(tmpdir)
    p = create_parser()
    args = p.parse_args(
        '-d {} --log-level DEBUG stats --error-types'.format(tmpdir).split())
    conf = get_config(args)
    sbws.core.stats.main(args, conf)
    captured = capsys.readouterr()
    lines = captured.out.strip().split('\n')
    needed_output_lines = [
        '1 relays have recent results',
        'Mean 0.00 successful measurements per relay',
        '0 success results and 1 error results',
    ]
    for needed_line in needed_output_lines:
        assert needed_line in lines
    lines = [l.getMessage() for l in caplog.records]
    needed_log_lines = [
        'Keeping 1/1 read lines from {}/{}/{}.txt'.format(
            tmpdir, 'datadir',
            datetime.utcfromtimestamp(time.time()).date()),
        'Keeping 1/1 results after removing old ones',
    ]
    for needed_line in needed_log_lines:
        assert needed_line in lines
Ejemplo n.º 4
0
def dotsbws_success_result(empty_dotsbws_datadir):
    '''
    Creates an ~/.sbws with a single fresh ResultSuccess in it
    '''
    fp1 = 'A' * 40
    fp2 = 'B' * 40
    circ = [fp1, fp2]
    nick = 'CowSayWhat'
    relay_ip = '169.254.100.1'
    server_ip = '169.254.100.2'
    scanner_nick = 'SBWSscanner'
    rtts = [4.242]
    downloads = [{'duration': 4, 'amount': 40 * 1024}]
    t = time.time()
    relay = Result.Relay(fp1, nick, relay_ip)
    result = ResultSuccess(rtts,
                           downloads,
                           relay,
                           circ,
                           server_ip,
                           scanner_nick,
                           t=t)
    args = _PseudoArguments(directory=empty_dotsbws_datadir.name)
    conf = get_config(args)
    dd = conf['paths']['datadir']
    write_result_to_datadir(result, dd)
    return empty_dotsbws_datadir
Ejemplo n.º 5
0
def main():
    parser = create_parser()
    args = parser.parse_args()
    conf = get_config(args)
    _adjust_log_level(args, conf)
    conf_valid, conf_errors = validate_config(conf)
    if not conf_valid:
        for e in conf_errors:
            log.critical(e)
        exit(1)
    configure_logging(conf)
    def_args = [args, conf]
    def_kwargs = {}
    known_commands = {
        'cleanup': {'f': sbws.core.cleanup.main,
                    'a': def_args, 'kw': def_kwargs},
        'scanner': {'f': sbws.core.scanner.main,
                    'a': def_args, 'kw': def_kwargs},
        'generate': {'f': sbws.core.generate.main,
                     'a': def_args, 'kw': def_kwargs},
        'init': {'f': sbws.core.init.main,
                 'a': def_args, 'kw': def_kwargs},
        'stats': {'f': sbws.core.stats.main,
                  'a': def_args, 'kw': def_kwargs},
    }
    try:
        if args.command not in known_commands:
            parser.print_help()
        else:
            comm = known_commands[args.command]
            exit(comm['f'](*comm['a'], **comm['kw']))
    except KeyboardInterrupt:
        print('')
Ejemplo n.º 6
0
def test_generate_empty_datadir(empty_dotsbws_datadir, caplog, parser):
    dotsbws = empty_dotsbws_datadir
    args = parser.parse_args(
        '-d {} --log-level DEBUG generate --output /dev/stdout'.format(
            dotsbws.name).split())
    conf = get_config(args)
    sbws.core.generate.main(args, conf)
    assert 'No recent results' in caplog.records[-1].getMessage()
Ejemplo n.º 7
0
def empty_dotsbws_datadir(empty_dotsbws):
    '''
    Creates a ~/.sbws with nothing in it but a config.ini and an empty datadir
    '''
    args = _PseudoArguments(directory=empty_dotsbws.name)
    conf = get_config(args)
    dd = conf['paths']['datadir']
    os.makedirs(dd, exist_ok=False)
    return empty_dotsbws
Ejemplo n.º 8
0
def empty_dotsbws(parser):
    '''
    Creates a ~/.sbws with nothing in it but a config.ini
    '''
    d = TemporaryDirectory()
    args = parser.parse_args('-d {} --log-level DEBUG init'.format(
        d.name).split())
    conf = get_config(args)
    sbws.core.init.main(args, conf)
    return d
Ejemplo n.º 9
0
def test_cleanup_nothing_to_do(empty_dotsbws_datadir, caplog, parser):
    caplog.set_level(logging.DEBUG)
    dotsbws = empty_dotsbws_datadir
    args = parser.parse_args('-d {} --log-level DEBUG cleanup'.format(
        dotsbws.name).split())
    conf = get_config(args)
    try:
        sbws.core.cleanup.main(args, conf)
    except Exception as e:
        assert None, 'Nothing bad should have happened, but this did: {}'\
            .format(e)
Ejemplo n.º 10
0
def test_cleanup_medium_stale(empty_dotsbws_datadir, caplog, parser):
    dotsbws = empty_dotsbws_datadir
    args = parser.parse_args('-d {} --log-level DEBUG cleanup'.format(
        dotsbws.name).split())
    conf = get_config(args)
    conf['general']['data_period'] = '10'
    conf['cleanup']['stale_days'] = '19'
    conf['cleanup']['rotten_days'] = '50'
    sbws.core.cleanup.main(args, conf)
    assert 'cleanup/stale_days (19) is less than twice ' +\
        'general/data_period (10).' in caplog.records[-1].getMessage()
Ejemplo n.º 11
0
def test_stats_stale_result(tmpdir, caplog):
    '''
    An initialized .sbws directory with no fresh results should say so and
    exit cleanly
    '''
    init_directory(tmpdir)
    add_single_stale_result(tmpdir)
    p = create_parser()
    args = p.parse_args('-d {} --log-level DEBUG stats'.format(tmpdir).split())
    conf = get_config(args)
    sbws.core.stats.main(args, conf)
    assert 'No fresh results' == caplog.records[-1].getMessage()
Ejemplo n.º 12
0
def test_generate_two_relays_success_noscale(dotsbws_success_result_two_relays,
                                             parser, capfd):
    dotsbws = dotsbws_success_result_two_relays
    args = parser.parse_args(
        '-d {} --log-level DEBUG generate --output /dev/stdout'.format(
            dotsbws.name).split())
    conf = get_config(args)
    sbws.core.generate.main(args, conf)
    dd = conf['paths']['datadir']
    # Here results is a dict
    results = load_recent_results_in_datadir(1, dd, success_only=False)
    assert isinstance(results, dict)
    res_len = sum([len(results[fp]) for fp in results])
    assert res_len == 4, 'There should be 4 results in the datadir'
    # And here we change it to a list
    results = [r for fp in results for r in results[fp]]
    for result in results:
        assert isinstance(result, ResultSuccess), 'All existing results '\
            'should be a success'
    captured = capfd.readouterr()
    stdout_lines = captured.out.strip().split('\n')
    assert len(stdout_lines) == 2 + NUM_LINES_HEADER

    r1_results = [r for r in results if r.fingerprint == 'A' * 40]
    r1_time = round(max([r.time for r in r1_results]))
    r1_name = r1_results[0].nickname
    r1_fingerprint = r1_results[0].fingerprint
    r1_speeds = [
        dl['amount'] / dl['duration'] / 1024 for r in r1_results
        for dl in r.downloads
    ]
    r1_speed = round(median(r1_speeds))
    r1_rtt = round(
        median([round(rtt * 1000) for r in r1_results for rtt in r.rtts]))
    bw_line = 'node_id=${} bw={} nick={} rtt={} time={}'.format(
        r1_fingerprint, r1_speed, r1_name, r1_rtt, r1_time)
    assert stdout_lines[1 + NUM_LINES_HEADER] == bw_line

    r2_results = [r for r in results if r.fingerprint == 'B' * 40]
    r2_time = round(max([r.time for r in r2_results]))
    r2_name = r2_results[0].nickname
    r2_fingerprint = r2_results[0].fingerprint
    r2_speeds = [
        dl['amount'] / dl['duration'] / 1024 for r in r2_results
        for dl in r.downloads
    ]
    r2_speed = round(median(r2_speeds))
    r2_rtt = round(
        median([round(rtt * 1000) for r in r2_results for rtt in r.rtts]))
    bw_line = 'node_id=${} bw={} nick={} rtt={} time={}'.format(
        r2_fingerprint, r2_speed, r2_name, r2_rtt, r2_time)
    assert stdout_lines[NUM_LINES_HEADER] == bw_line
Ejemplo n.º 13
0
def test_cleanup_no_datadir(empty_dotsbws, caplog, parser):
    dotsbws = empty_dotsbws
    args = parser.parse_args('-d {} --log-level DEBUG cleanup'.format(
        dotsbws.name).split())
    conf = get_config(args)
    try:
        sbws.core.cleanup.main(args, conf)
    except SystemExit as e:
        assert e.code == 1
    else:
        assert None, 'Should have failed'
    dd = conf['paths']['datadir']
    assert '{} does not exist'.format(dd) in caplog.records[-1].getMessage()
Ejemplo n.º 14
0
def test_cleanup_no_dotsbws(tmpdir, caplog, parser):
    caplog.set_level(logging.DEBUG)
    dotsbws = tmpdir
    args = parser.parse_args(
        '-d {} --log-level DEBUG cleanup'.format(dotsbws).split())
    conf = get_config(args)
    try:
        sbws.core.cleanup.main(args, conf)
    except SystemExit as e:
        assert e.code == 1
    else:
        assert None, 'Should have failed'
    assert 'Try sbws init' in caplog.records[-1].getMessage()
Ejemplo n.º 15
0
def test_generate_bad_scale_constant(empty_dotsbws_datadir, caplog, parser):
    dotsbws = empty_dotsbws_datadir
    args = parser.parse_args(
        '-d {} --log-level DEBUG generate --scale-constant -1 '
        '--output /dev/stdout'.format(dotsbws.name).split())
    conf = get_config(args)
    try:
        sbws.core.generate.main(args, conf)
    except SystemExit as e:
        assert e.code == 1
    else:
        assert None, 'Should have failed'
    assert '--scale-constant must be positive' in \
        caplog.records[-1].getMessage()
Ejemplo n.º 16
0
def test_stats_uninitted(tmpdir, caplog):
    '''
    An un-initialized .sbws directory should fail hard and exit immediately
    '''
    p = create_parser()
    args = p.parse_args('-d {} --log-level DEBUG stats'.format(tmpdir).split())
    conf = get_config(args)
    try:
        sbws.core.stats.main(args, conf)
    except SystemExit as e:
        assert e.code == 1
    else:
        assert None, 'Should have failed'
    assert 'Sbws isn\'t initialized. Try sbws init' == \
        caplog.records[-1].getMessage()
Ejemplo n.º 17
0
def test_generate_single_error(dotsbws_error_result, caplog, parser):
    caplog.set_level(logging.DEBUG)
    dotsbws = dotsbws_error_result
    args = parser.parse_args(
        '-d {} --log-level DEBUG generate --output /dev/stdout'.format(
            dotsbws.name).split())
    conf = get_config(args)
    sbws.core.generate.main(args, conf)
    dd = conf['paths']['datadir']
    for record in caplog.records:
        if 'Keeping 0/1 read lines from {}'.format(dd) in record.getMessage():
            break
    else:
        assert None, 'Unable to find log line indicating 0 success results '\
            'in data file'
    assert 'No recent results' in caplog.records[-1].getMessage()
Ejemplo n.º 18
0
def test_cleanup_small_rotten(empty_dotsbws_datadir, caplog, parser):
    dotsbws = empty_dotsbws_datadir
    args = parser.parse_args('-d {} --log-level DEBUG cleanup'.format(
        dotsbws.name).split())
    conf = get_config(args)
    conf['general']['data_period'] = '1'
    conf['cleanup']['stale_days'] = '5'
    conf['cleanup']['rotten_days'] = '4'
    try:
        sbws.core.cleanup.main(args, conf)
    except SystemExit as e:
        assert e.code == 1
    else:
        assert None, 'Should have failed'
    assert 'cleanup/rotten_days (4) must be the same or larger than ' +\
        'cleanup/stale_days (5)' in caplog.records[-1].getMessage()
Ejemplo n.º 19
0
def test_cleanup_only_delete_rotten(time_mock, empty_dotsbws_datadir, caplog,
                                    parser):
    caplog.set_level(logging.DEBUG)
    dotsbws = empty_dotsbws_datadir
    args = parser.parse_args('-d {} --log-level DEBUG cleanup'.format(
        dotsbws.name).split())
    conf = get_config(args)
    conf['general']['data_period'] = '1'
    conf['cleanup']['stale_days'] = '10'
    conf['cleanup']['rotten_days'] = str(365 * 2)
    now = 1041379200  # 1,041,379,200 is 1 Jan 2003 00:00:00 UTC
    time_mock.side_effect = monotonic_time(start=now)
    j = os.path.join
    dd = j(dotsbws.name, 'datadir')
    sub_a = j(dd, 'a')
    sub_b = j(dd, 'b')
    sub_ab = j(dd, 'a', 'b')
    for dname in [sub_a, sub_b, sub_ab]:
        os.makedirs(dname, exist_ok=True)
    should_delete_fnames = [
        j(dd, '2000-01-01aaaa.txt'),
        j(sub_a, '2000-10-01bbbb.txt'),
        j(sub_b, '2000-10-10-cccc.txt'),
        j(sub_a, '2000-10-10.dddd.txt'),
        j(sub_a, '2000-10-11.eeee.txt.gz'),
        j(dd, '2000-10-12.txt.gz'),
        j(sub_ab, '2000-11-30.txt'),
    ]
    should_ignore_fnames = [
        j(dd, '2002-12-31.txt'),  # too new, should be ignored
        j(dd, '2003-01-01.txt'),  # today, should be ignored
        j(dd, '2003-02-10.txt'),  # in the future, should be ignored
        j(sub_b, '2000-10-10.nottxt'),  # wrong ext, should be ignored
        j(sub_a, '200j-10-10.txt'),  # not YYYY-MM-DD*.txt, should be ignored
        j(dd, '1999-1*-11.txt.gz'),  # not YYYY-MM-DD*.txt.gz, should ignore
    ]
    for fname in should_ignore_fnames + should_delete_fnames:
        touch_file(fname)
    sbws.core.cleanup.main(args, conf)
    expected_fnames = should_ignore_fnames + [os.path.join(dd, '.lockfile')]
    existing_fnames = []
    for root, dirs, files in os.walk(dd):
        for fname in files:
            existing_fnames.append(os.path.join(root, fname))
    expected_fnames.sort()
    existing_fnames.sort()
    assert expected_fnames == existing_fnames
Ejemplo n.º 20
0
def test_stats_initted(tmpdir, caplog):
    '''
    An initialized but rather empty .sbws directory should fail about missing
    ~/.sbws/datadir
    '''
    init_directory(tmpdir)
    p = create_parser()
    args = p.parse_args('-d {} --log-level DEBUG stats'.format(tmpdir).split())
    conf = get_config(args)
    try:
        sbws.core.stats.main(args, conf)
    except SystemExit as e:
        assert e.code == 1
    else:
        assert None, 'Should have failed'
    assert '{}/datadir does not exist'.format(tmpdir) == \
        caplog.records[-1].getMessage()
Ejemplo n.º 21
0
def dotsbws_error_result(empty_dotsbws_datadir):
    '''
    Creates an ~/.sbws with a single fresh ResultError in it
    '''
    fp1 = 'A' * 40
    fp2 = 'B' * 40
    circ = [fp1, fp2]
    nick = 'CowSayWhat'
    relay_ip = '169.254.100.1'
    server_ip = '169.254.100.2'
    scanner_nick = 'SBWSscanner'
    msg = 'UnitTest error message'
    t = time.time()
    relay = Result.Relay(fp1, nick, relay_ip)
    result = ResultError(relay, circ, server_ip, scanner_nick, t=t, msg=msg)
    args = _PseudoArguments(directory=empty_dotsbws_datadir.name)
    conf = get_config(args)
    dd = conf['paths']['datadir']
    write_result_to_datadir(result, dd)
    return empty_dotsbws_datadir
Ejemplo n.º 22
0
def test_stats_fresh_results(time_mock, tmpdir, capsys, caplog):
    '''
    An initialized .sbws directory with a fresh error and fresh success should
    have some exciting stats and exit cleanly
    '''
    caplog.set_level(logging.DEBUG)
    init_directory(tmpdir)
    start = 1524769441
    time_mock.side_effect = monotonic_time(start=start)
    add_two_fresh_results(tmpdir, start - 1)
    p = create_parser()
    args = p.parse_args(
        '-d {} --log-level DEBUG stats --error-types'.format(tmpdir).split())
    conf = get_config(args)
    sbws.core.stats.main(args, conf)
    needed_output_lines = [
        '1 relays have recent results',
        '1 success results and 1 error results',
        'Mean 1.00 successful measurements per relay',
        '1/2 (50.00%) results were error-misc',
    ]
    captured = capsys.readouterr()
    lines = captured.out.strip().split('\n')
    for needed_line in needed_output_lines:
        assert needed_line in lines
    lines = [l.getMessage() for l in caplog.records]
    needed_log_lines = [
        'Keeping 2/2 read lines from {}/{}/{}.txt'.format(
            tmpdir, 'datadir',
            datetime.utcfromtimestamp(time.time()).date()),
        'Keeping 2/2 results after removing old ones',
        'Found a _ResultType.Error for the first time',
        'Found a _ResultType.Success for the first time',
    ]
    for needed_line in needed_log_lines:
        assert needed_line in lines
Ejemplo n.º 23
0
def init_directory(dname):
    p = create_parser()
    args = p.parse_args('-d {} --log-level DEBUG init'.format(dname).split())
    conf = get_config(args)
    sbws.core.init.main(args, conf)
Ejemplo n.º 24
0
def dotsbws_success_result_two_relays(empty_dotsbws_datadir):
    '''
    Creates an ~/.sbws with a a couple of fresh ResultSuccess for a couple or
    relays
    '''
    args = _PseudoArguments(directory=empty_dotsbws_datadir.name)
    conf = get_config(args)
    dd = conf['paths']['datadir']
    fp1 = 'A' * 40
    fp2 = 'C' * 40
    circ = [fp1, fp2]
    nick = 'CowSayWhat1'
    relay_ip = '169.254.100.1'
    server_ip = '169.254.100.3'
    scanner_nick = 'SBWSscanner'
    rtts = [5, 25]
    downloads = [{'duration': 4, 'amount': 40 * 1024}]
    t = time.time()
    relay = Result.Relay(fp1, nick, relay_ip)
    result = ResultSuccess(rtts,
                           downloads,
                           relay,
                           circ,
                           server_ip,
                           scanner_nick,
                           t=t)
    write_result_to_datadir(result, dd)

    rtts = [10, 20]
    downloads = [{'duration': 4, 'amount': 80 * 1024}]
    t = time.time()
    result = ResultSuccess(rtts,
                           downloads,
                           relay,
                           circ,
                           server_ip,
                           scanner_nick,
                           t=t)
    write_result_to_datadir(result, dd)

    fp1 = 'B' * 40
    circ = [fp1, fp2]
    nick = 'CowSayWhat2'
    relay_ip = '169.254.100.2'
    rtts = [50, 250]
    downloads = [{'duration': 4, 'amount': 400 * 1024}]
    t = time.time()
    relay = Result.Relay(fp1, nick, relay_ip)
    result = ResultSuccess(rtts,
                           downloads,
                           relay,
                           circ,
                           server_ip,
                           scanner_nick,
                           t=t)
    write_result_to_datadir(result, dd)

    rtts = [100, 200]
    downloads = [{'duration': 4, 'amount': 800 * 1024}]
    t = time.time()
    result = ResultSuccess(rtts,
                           downloads,
                           relay,
                           circ,
                           server_ip,
                           scanner_nick,
                           t=t)
    write_result_to_datadir(result, dd)

    return empty_dotsbws_datadir