def test_ResultSuccess_from_dict(time_mock): t = 2000 time_mock.side_effect = monotonic_time(start=t) fp1 = 'A' * 40 fp2 = 'Z' * 40 circ = [fp1, fp2] dest_url = 'http://example.com/sbws.bin' scanner_nick = 'sbwsscanner' nick = 'Mooooooo' relay_ip = '169.254.100.1' relay = Result.Relay(fp1, nick, relay_ip) rtts = [5, 25] downloads = [{'duration': 4, 'amount': 40}] r1 = ResultSuccess(rtts, downloads, relay, circ, dest_url, scanner_nick) d = { 'rtts': rtts, 'downloads': downloads, 'fingerprint': fp1, 'nickname': nick, 'address': relay_ip, 'circ': circ, 'dest_url': dest_url, 'scanner': scanner_nick, 'version': RESULT_VERSION, 'type': _ResultType.Success, 'time': t, } r2 = Result.from_dict(d) assert isinstance(r1, ResultSuccess) assert isinstance(r2, ResultSuccess) assert str(r1) == str(r2)
def test_ResultErrorAuth_from_dict(time_mock): t = 2000 time_mock.side_effect = monotonic_time(start=t) fp1 = 'A' * 40 fp2 = 'Z' * 40 circ = [fp1, fp2] dest_url = 'http://example.com/sbws.bin' scanner_nick = 'sbwsscanner' nick = 'Mooooooo' relay_ip = '169.254.100.1' relay = Result.Relay(fp1, nick, relay_ip) msg = 'aaaaayyyyyy bb' r1 = ResultErrorAuth(relay, circ, dest_url, scanner_nick, msg=msg) d = { 'msg': msg, 'fingerprint': fp1, 'nickname': nick, 'address': relay_ip, 'circ': circ, 'dest_url': dest_url, 'scanner': scanner_nick, 'version': RESULT_VERSION, 'type': _ResultType.ErrorAuth, 'time': t, } r2 = Result.from_dict(d) assert isinstance(r1, ResultErrorAuth) assert isinstance(r2, ResultErrorAuth) assert str(r1) == str(r2)
def test_ResultSuccess(time_mock): t = 2000 time_mock.side_effect = monotonic_time(start=t) fp1 = 'A' * 40 fp2 = 'Z' * 40 circ = [fp1, fp2] dest_url = 'http://example.com/sbws.bin' scanner_nick = 'sbwsscanner' nick = 'Mooooooo' relay_ip = '169.254.100.1' relay = Result.Relay(fp1, nick, relay_ip) rtts = [5, 25] downloads = [{'duration': 4, 'amount': 40}] r1 = ResultSuccess(rtts, downloads, relay, circ, dest_url, scanner_nick) r2 = ResultSuccess(rtts, downloads, relay, circ, dest_url, scanner_nick, t=t) assert r1.downloads == downloads assert r1.rtts == rtts assert r1.nickname == nick assert r1.time == t assert r1.fingerprint == fp1 assert r1.scanner == scanner_nick assert r1.type == _ResultType.Success assert r1.address == relay_ip assert r1.circ == circ assert r1.dest_url == dest_url assert r1.version == RESULT_VERSION assert str(r1) == str(r2)
def test_ResultErrorAuth(time_mock): t = 2000 time_mock.side_effect = monotonic_time(start=t) fp1 = 'A' * 40 fp2 = 'Z' * 40 circ = [fp1, fp2] dest_url = 'http://example.com/sbws.bin' scanner_nick = 'sbwsscanner' nick = 'Mooooooo' relay_ip = '169.254.100.1' relay = Result.Relay(fp1, nick, relay_ip) msg = 'aaaaayyyyyy bb' r1 = ResultErrorAuth(relay, circ, dest_url, scanner_nick, msg=msg) r2 = ResultErrorAuth(relay, circ, dest_url, scanner_nick, msg=msg, t=t) assert r1.msg == msg assert r1.nickname == nick assert r1.time == t assert r1.fingerprint == fp1 assert r1.scanner == scanner_nick assert r1.type == _ResultType.ErrorAuth assert r1.address == relay_ip assert r1.circ == circ assert r1.dest_url == dest_url assert r1.version == RESULT_VERSION assert str(r1) == str(r2)
def test_cleanup_delete_barely_rotten(time_mock, empty_dotsbws_datadir, caplog, parser): caplog.set_level(logging.DEBUG) dotsbws = empty_dotsbws_datadir args = parser.parse_args('-d {} --log-level DEBUG cleanup'.format( dotsbws.name).split()) conf = get_config(args) conf['general']['data_period'] = '1' conf['cleanup']['stale_days'] = '5' conf['cleanup']['rotten_days'] = '20' now = 1443571200 # 1,443,571,200 is 30 Sep 2015 00:00:00 UTC time_mock.side_effect = monotonic_time(start=now) dd = os.path.join(dotsbws.name, 'datadir') fname_rotten1 = os.path.join(dd, '2015-09-09.txt') fname_rotten2 = os.path.join(dd, '2015-09-10.txt') fname_leave = os.path.join(dd, '2015-09-11.txt') touch_file(fname_rotten1) touch_file(fname_rotten2) touch_file(fname_leave) sbws.core.cleanup.main(args, conf) expected_fnames = [ fname_leave + '.gz', os.path.join(dd, '.lockfile'), ] existing_fnames = [] for root, dirs, files in os.walk(dd): for fname in files: existing_fnames.append(os.path.join(root, fname)) expected_fnames.sort() existing_fnames.sort() assert expected_fnames == existing_fnames
def test_cleanup_only_delete_rotten(time_mock, empty_dotsbws_datadir, caplog, parser): caplog.set_level(logging.DEBUG) dotsbws = empty_dotsbws_datadir args = parser.parse_args('-d {} --log-level DEBUG cleanup'.format( dotsbws.name).split()) conf = get_config(args) conf['general']['data_period'] = '1' conf['cleanup']['stale_days'] = '10' conf['cleanup']['rotten_days'] = str(365 * 2) now = 1041379200 # 1,041,379,200 is 1 Jan 2003 00:00:00 UTC time_mock.side_effect = monotonic_time(start=now) j = os.path.join dd = j(dotsbws.name, 'datadir') sub_a = j(dd, 'a') sub_b = j(dd, 'b') sub_ab = j(dd, 'a', 'b') for dname in [sub_a, sub_b, sub_ab]: os.makedirs(dname, exist_ok=True) should_delete_fnames = [ j(dd, '2000-01-01aaaa.txt'), j(sub_a, '2000-10-01bbbb.txt'), j(sub_b, '2000-10-10-cccc.txt'), j(sub_a, '2000-10-10.dddd.txt'), j(sub_a, '2000-10-11.eeee.txt.gz'), j(dd, '2000-10-12.txt.gz'), j(sub_ab, '2000-11-30.txt'), ] should_ignore_fnames = [ j(dd, '2002-12-31.txt'), # too new, should be ignored j(dd, '2003-01-01.txt'), # today, should be ignored j(dd, '2003-02-10.txt'), # in the future, should be ignored j(sub_b, '2000-10-10.nottxt'), # wrong ext, should be ignored j(sub_a, '200j-10-10.txt'), # not YYYY-MM-DD*.txt, should be ignored j(dd, '1999-1*-11.txt.gz'), # not YYYY-MM-DD*.txt.gz, should ignore ] for fname in should_ignore_fnames + should_delete_fnames: touch_file(fname) sbws.core.cleanup.main(args, conf) expected_fnames = should_ignore_fnames + [os.path.join(dd, '.lockfile')] existing_fnames = [] for root, dirs, files in os.walk(dd): for fname in files: existing_fnames.append(os.path.join(root, fname)) expected_fnames.sort() existing_fnames.sort() assert expected_fnames == existing_fnames
def test_Result(time_mock): ''' A standard Result should not be convertible to a string because Result.type is not implemented. ''' time_mock.side_effect = monotonic_time() fp1 = 'A' * 40 fp2 = 'Z' * 40 circ = [fp1, fp2] dest_url = 'http://example.com/sbws.bin' scanner_nick = 'sbwsscanner' nick = 'Mooooooo' relay_ip = '169.254.100.1' relay = Result.Relay(fp1, nick, relay_ip) r = Result(relay, circ, dest_url, scanner_nick) try: str(r) except NotImplementedError: pass else: assert None, 'Should have failed'
def test_stats_fresh_results(time_mock, tmpdir, capsys, caplog): ''' An initialized .sbws directory with a fresh error and fresh success should have some exciting stats and exit cleanly ''' caplog.set_level(logging.DEBUG) init_directory(tmpdir) start = 1524769441 time_mock.side_effect = monotonic_time(start=start) add_two_fresh_results(tmpdir, start - 1) p = create_parser() args = p.parse_args( '-d {} --log-level DEBUG stats --error-types'.format(tmpdir).split()) conf = get_config(args) sbws.core.stats.main(args, conf) needed_output_lines = [ '1 relays have recent results', '1 success results and 1 error results', 'Mean 1.00 successful measurements per relay', '1/2 (50.00%) results were error-misc', ] captured = capsys.readouterr() lines = captured.out.strip().split('\n') for needed_line in needed_output_lines: assert needed_line in lines lines = [l.getMessage() for l in caplog.records] needed_log_lines = [ 'Keeping 2/2 read lines from {}/{}/{}.txt'.format( tmpdir, 'datadir', datetime.utcfromtimestamp(time.time()).date()), 'Keeping 2/2 results after removing old ones', 'Found a _ResultType.Error for the first time', 'Found a _ResultType.Success for the first time', ] for needed_line in needed_log_lines: assert needed_line in lines