def test_cached_microdesc_consensus(self): """ Parses the cached-microdesc-consensus file in our data directory. """ consensus_path = test.runner.get_runner().get_test_dir( 'cached-microdesc-consensus') if not os.path.exists(consensus_path): test.runner.skip(self, '(no cached-microdesc-consensus)') return elif stem.util.system.is_windows(): test.runner.skip(self, '(unavailable on windows)') return count, reported_flags = 0, [] with open(consensus_path, 'rb') as descriptor_file: for router in stem.descriptor.parse_file( descriptor_file, 'network-status-microdesc-consensus-3 1.0', validate=True): count += 1 for flag in router.flags: if flag not in stem.Flag: register_new_capability('Flag (microdescriptor)', flag) reported_flags.append(flag) for line in router.get_unrecognized_lines(): register_new_capability('Microdescriptor Consensus Line', line) self.assertTrue(count > 100)
def test_cached_microdesc_consensus(self): """ Parses the cached-microdesc-consensus file in our data directory. """ consensus_path = test.runner.get_runner().get_test_dir('cached-microdesc-consensus') if not os.path.exists(consensus_path): test.runner.skip(self, '(no cached-microdesc-consensus)') return elif stem.util.system.is_windows(): test.runner.skip(self, '(unavailable on windows)') return count, reported_flags = 0, [] with open(consensus_path, 'rb') as descriptor_file: for router in stem.descriptor.parse_file(descriptor_file, 'network-status-microdesc-consensus-3 1.0', validate = True): count += 1 for flag in router.flags: if flag not in stem.Flag: register_new_capability('Flag (microdescriptor)', flag) reported_flags.append(flag) for line in router.get_unrecognized_lines(): register_new_capability('Microdescriptor Consensus Line', line) self.assertTrue(count > 100)
def test_cached_descriptor(self): """ Parses the cached descriptor file in our data directory, checking that it doesn't raise any validation issues and looking for unrecognized descriptor additions. """ descriptor_path = test.runner.get_runner().get_test_dir( 'cached-descriptors') if not os.path.exists(descriptor_path): test.runner.skip(self, '(no cached descriptors)') return with open(descriptor_path, 'rb') as descriptor_file: for desc in stem.descriptor.parse_file(descriptor_file, 'server-descriptor 1.0', validate=True): # the following attributes should be deprecated, and not appear in the wild self.assertEqual(None, desc.read_history_end) self.assertEqual(None, desc.write_history_end) self.assertEqual(None, desc.eventdns) self.assertEqual(None, desc.socks_port) for line in desc.get_unrecognized_lines(): register_new_capability('Server Descriptor Line', line)
def test_cached_consensus(self): """ Parses the cached-consensus file in our data directory. """ consensus_path = test.runner.get_runner().get_test_dir('cached-consensus') if not os.path.exists(consensus_path): test.runner.skip(self, '(no cached-consensus)') return elif stem.util.system.is_windows(): # Unable to check memory usage on windows, so can't prevent hanging the # system if things go bad. test.runner.skip(self, '(unavailable on windows)') return count, reported_flags = 0, [] with open(consensus_path, 'rb') as descriptor_file: for router in stem.descriptor.parse_file(descriptor_file, 'network-status-consensus-3 1.0', validate = True): count += 1 for flag in router.flags: if flag not in stem.Flag and flag not in reported_flags: register_new_capability('Flag', flag) reported_flags.append(flag) for line in router.get_unrecognized_lines(): register_new_capability('Consensus Line', line) # Sanity test that there's at least a hundred relays. If that's not the # case then this probably isn't a real, complete tor consensus. self.assertTrue(count > 100)
def test_cached_descriptor(self): """ Parses the cached descriptor file in our data directory, checking that it doesn't raise any validation issues and looking for unrecognized descriptor additions. """ descriptor_path = test.runner.get_runner().get_test_dir('cached-extrainfo') if not os.path.exists(descriptor_path): test.runner.skip(self, '(no cached descriptors)') return with open(descriptor_path, 'rb') as descriptor_file: for desc in stem.descriptor.parse_file(descriptor_file, 'extra-info 1.0', validate = True): for line in desc.get_unrecognized_lines(): register_new_capability('Extra-info Line', line) if desc.dir_v2_responses_unknown: self.fail('Unrecognized statuses on dirreq-v2-resp lines: %s' % desc.dir_v2_responses_unknown) elif desc.dir_v3_responses_unknown: self.fail('Unrecognized statuses on dirreq-v3-resp lines: %s' % desc.dir_v3_responses_unknown) elif desc.dir_v2_direct_dl_unknown: self.fail('Unrecognized stats on dirreq-v2-direct-dl lines: %s' % desc.dir_v2_direct_dl_unknown) elif desc.dir_v3_direct_dl_unknown: self.fail('Unrecognized stats on dirreq-v3-direct-dl lines: %s' % desc.dir_v2_direct_dl_unknown) elif desc.dir_v2_tunneled_dl_unknown: self.fail('Unrecognized stats on dirreq-v2-tunneled-dl lines: %s' % desc.dir_v2_tunneled_dl_unknown)
def test_cached_microdescriptors(self): """ Parses the cached microdescriptor file in our data directory, checking that it doesn't raise any validation issues and looking for unrecognized descriptor additions. """ descriptor_path = test.runner.get_runner().get_test_dir('cached-microdescs') if not os.path.exists(descriptor_path): test.runner.skip(self, '(no cached microdescriptors)') return with open(descriptor_path, 'rb') as descriptor_file: for desc in stem.descriptor.parse_file(descriptor_file, 'microdescriptor 1.0', validate = True): for line in desc.get_unrecognized_lines(): register_new_capability('Microdescriptor Line', line)
def test_cached_microdescriptors(self): """ Parses the cached microdescriptor file in our data directory, checking that it doesn't raise any validation issues and looking for unrecognized descriptor additions. """ descriptor_path = test.runner.get_runner().get_test_dir( 'cached-microdescs') if not os.path.exists(descriptor_path): test.runner.skip(self, '(no cached microdescriptors)') return with open(descriptor_path, 'rb') as descriptor_file: for desc in stem.descriptor.parse_file(descriptor_file, 'microdescriptor 1.0', validate=True): for line in desc.get_unrecognized_lines(): register_new_capability('Microdescriptor Line', line)
def test_cached_descriptor(self): """ Parses the cached descriptor file in our data directory, checking that it doesn't raise any validation issues and looking for unrecognized descriptor additions. """ descriptor_path = test.runner.get_runner().get_test_dir( 'cached-extrainfo') if not os.path.exists(descriptor_path): test.runner.skip(self, '(no cached descriptors)') return with open(descriptor_path, 'rb') as descriptor_file: for desc in stem.descriptor.parse_file(descriptor_file, 'extra-info 1.0', validate=True): for line in desc.get_unrecognized_lines(): register_new_capability('Extra-info Line', line) if desc.dir_v2_responses_unknown: self.fail( 'Unrecognized statuses on dirreq-v2-resp lines: %s' % desc.dir_v2_responses_unknown) elif desc.dir_v3_responses_unknown: self.fail( 'Unrecognized statuses on dirreq-v3-resp lines: %s' % desc.dir_v3_responses_unknown) elif desc.dir_v2_direct_dl_unknown: self.fail( 'Unrecognized stats on dirreq-v2-direct-dl lines: %s' % desc.dir_v2_direct_dl_unknown) elif desc.dir_v3_direct_dl_unknown: self.fail( 'Unrecognized stats on dirreq-v3-direct-dl lines: %s' % desc.dir_v2_direct_dl_unknown) elif desc.dir_v2_tunneled_dl_unknown: self.fail( 'Unrecognized stats on dirreq-v2-tunneled-dl lines: %s' % desc.dir_v2_tunneled_dl_unknown)
def test_cached_consensus(self): """ Parses the cached-consensus file in our data directory. """ consensus_path = test.runner.get_runner().get_test_dir( 'cached-consensus') if not os.path.exists(consensus_path): test.runner.skip(self, '(no cached-consensus)') return elif stem.util.system.is_windows(): # Unable to check memory usage on windows, so can't prevent hanging the # system if things go bad. test.runner.skip(self, '(unavailable on windows)') return count, reported_flags = 0, [] with open(consensus_path, 'rb') as descriptor_file: for router in stem.descriptor.parse_file( descriptor_file, 'network-status-consensus-3 1.0', validate=True): count += 1 for flag in router.flags: if flag not in stem.Flag and flag not in reported_flags: register_new_capability('Flag', flag) reported_flags.append(flag) for line in router.get_unrecognized_lines(): register_new_capability('Consensus Line', line, suppression_token=line.split()[0]) # Sanity test that there's at least a hundred relays. If that's not the # case then this probably isn't a real, complete tor consensus. self.assertTrue(count > 100)
def test_cached_descriptor(self): """ Parses the cached descriptor file in our data directory, checking that it doesn't raise any validation issues and looking for unrecognized descriptor additions. """ descriptor_path = test.runner.get_runner().get_test_dir("cached-descriptors") if not os.path.exists(descriptor_path): test.runner.skip(self, "(no cached descriptors)") return with open(descriptor_path, "rb") as descriptor_file: for desc in stem.descriptor.parse_file(descriptor_file, "server-descriptor 1.0", validate=True): # the following attributes should be deprecated, and not appear in the wild self.assertEqual(None, desc.read_history_end) self.assertEqual(None, desc.write_history_end) self.assertEqual(None, desc.eventdns) self.assertEqual(None, desc.socks_port) for line in desc.get_unrecognized_lines(): register_new_capability("Server Descriptor Line", line)