Пример #1
0
    def test_parse_descriptors_parseExtraInfoFiles_unparseable(self):
        """Test parsing three extrainfo descriptors: one is a valid descriptor,
        one is an older duplicate, and one is unparseable (it has a bad
        geoip-db-digest line). The parsing should cause
         _copyUnparseableDescriptorFile() to be called and create a copy
         based on the filename.
        """
        # Give it a bad geoip-db-digest:
        unparseable = BRIDGE_EXTRA_INFO_DESCRIPTOR.replace(
            b"MiserLandfalls E08B324D20AD0A13E114F027AB9AC3F32CA696A0",
            b"DontParseMe F373CC1D86D82267F1F1F5D39470F0E0A022122E").replace(
                b"geoip-db-digest 09A0E093100B279AD9CFF47A67B13A21C6E1483F",
                b"geoip-db-digest FOOOOOOOOOOOOOOOOOOBAAAAAAAAAAAAAAAAAARR")

        descFileOne = self.writeTestDescriptorsToFile(
            'cached-extrainfo', BRIDGE_EXTRA_INFO_DESCRIPTOR)
        descFileTwo = self.writeTestDescriptorsToFile(
            'cached-extrainfo.2',
            BRIDGE_EXTRA_INFO_DESCRIPTOR_NEWEST_DUPLICATE)
        descFileThree = self.writeTestDescriptorsToFile(
            'copy-unparseable-test', unparseable)

        descriptors.parseExtraInfoFiles(descFileOne, descFileTwo,
                                        descFileThree)

        matchingFiles = glob.glob("*copy-unparseable-test.unparseable")
        self.assertEqual(len(matchingFiles), 1)

        newFile = matchingFiles[-1]
        self.assertTrue(os.path.isfile(newFile))

        timestamp = datetime.datetime.strptime(
            newFile.split("_")[0], "%Y-%m-%d-%H:%M:%S")
        # The timestamp should be roughly today (unless we just passed
        # midnight, then it might be +/- 1):
        self.assertApproximates(timestamp.now().day, timestamp.day, 1)

        # The timestamp should be roughly this hour (+/- 1):
        self.assertApproximates(timestamp.now().hour, timestamp.hour, 1)

        # The timestamp should be roughly this minute (+/- 2):
        self.assertApproximates(timestamp.now().minute, timestamp.minute, 2)

        self.assertTrue(self.removeTestDescriptorsFile(descFileOne))
        self.assertTrue(self.removeTestDescriptorsFile(descFileTwo))
        self.assertTrue(self.removeTestDescriptorsFile(descFileThree))
        self.assertTrue(
            self.removeTestDescriptorsFile(os.path.abspath(newFile)))
 def test_parse_descriptors_parseExtraInfoFiles_benchmark_100_bridges(self):
     """Benchmark test for ``b.p.descriptors.parseExtraInfoFiles``."""
     print()
     for i in range(1, 6):
         descFiles = self.createDuplicatesForBenchmark(b=100, n=i)
         with Benchmarker():
             routers = descriptors.parseExtraInfoFiles(*descFiles)
Пример #3
0
 def test_parse_descriptors_parseExtraInfoFiles_empty_file(self):
     """Test parsing an empty extrainfo descriptors file."""
     descFile = self.writeTestDescriptorsToFile('cached-extrainfo', b'')
     routers = descriptors.parseExtraInfoFiles(descFile)
     self.assertIsInstance(routers, dict)
     self.assertEqual(len(routers), 0)
     self.assertTrue(self.removeTestDescriptorsFile(descFile))
Пример #4
0
    def test_parse_descriptors_parseExtraInfoFiles_invalid(self):
        """Test parsing three extrainfo descriptors: one is a valid descriptor,
        and the other is completely invalid (lacking a signature).
        BridgeDB should parse the valid descriptor if it preceeds the mangled
        one and then terminate.
        """
        # Give it a bad geoip-db-digest:
        unparseable = b'DontParseMe F373CC1D86D82267F1F1F5D39470F0E0A022122E'

        descFile = self.writeTestDescriptorsToFile(
            'cached-extrainfo', BRIDGE_EXTRA_INFO_DESCRIPTOR_NEWEST_DUPLICATE,
            unparseable, BRIDGE_EXTRA_INFO_DESCRIPTOR)
        routers = descriptors.parseExtraInfoFiles(descFile)

        self.assertIsInstance(routers, dict)
        self.assertEqual(len(routers), 2, (
            "There were three extrainfo descriptors: one lacks a signature, "
            "and one was duplicate. Given our trust-but-verify policy that should "
            "give us two descriptors"))

        bridge = list(routers.values())[0]
        self.assertEqual(
            bridge.fingerprint, "E08B324D20AD0A13E114F027AB9AC3F32CA696A0",
            ("It looks like the (supposedly) unparseable bridge was returned "
             "instead of the valid one!"))
        self.assertTrue(self.removeTestDescriptorsFile(descFile))
 def test_parse_descriptors_parseExtraInfoFiles_return_type(self):
     """The return type of ``b.p.descriptors.parseExtraInfoFiles``
     should be a dictionary (after deduplication).
     """
     descFile = io.BytesIO(BRIDGE_EXTRA_INFO_DESCRIPTOR)
     routers = descriptors.parseExtraInfoFiles(descFile)
     self.assertIsInstance(routers, dict)
Пример #6
0
 def test_parse_descriptors_parseExtraInfoFiles_return_type(self):
     """The return type of ``b.p.descriptors.parseExtraInfoFiles``
     should be a dictionary (after deduplication).
     """
     descFile = io.BytesIO(BRIDGE_EXTRA_INFO_DESCRIPTOR)
     routers = descriptors.parseExtraInfoFiles(descFile)
     self.assertIsInstance(routers, dict)
Пример #7
0
 def test_parse_descriptors_parseExtraInfoFiles_benchmark_100_bridges(self):
     """Benchmark test for ``b.p.descriptors.parseExtraInfoFiles``."""
     print()
     for i in range(1, 6):
         descFiles = self.createDuplicatesForBenchmark(b=100, n=i)
         with Benchmarker():
             routers = descriptors.parseExtraInfoFiles(*descFiles)
Пример #8
0
 def test_parse_descriptors_parseExtraInfoFiles_no_validate(self):
     """Test for ``b.p.descriptors.parseExtraInfoFiles`` with
     descriptor validation disabled.
     """
     descFileOne = io.BytesIO(BRIDGE_EXTRA_INFO_DESCRIPTOR)
     routers = descriptors.parseExtraInfoFiles(descFileOne, validate=False)
     self.assertGreaterEqual(len(routers), 1)
 def test_parse_descriptors_parseExtraInfoFiles_no_validate(self):
     """Test for ``b.p.descriptors.parseExtraInfoFiles`` with
     descriptor validation disabled.
     """
     descFileOne = io.BytesIO(BRIDGE_EXTRA_INFO_DESCRIPTOR)
     routers = descriptors.parseExtraInfoFiles(descFileOne, validate=False)
     self.assertGreaterEqual(len(routers), 1)
Пример #10
0
    def test_parse_descriptors_parseExtraInfoFiles_three_files(self):
        """Test for ``b.p.descriptors.parseExtraInfoFiles`` with three
        bridge extrainfo files, and check that only the newest extrainfo
        descriptor is used.
        """
        descFileOne = self.writeTestDescriptorsToFile(
            'cached-extrainfo', BRIDGE_EXTRA_INFO_DESCRIPTOR_NEWER_DUPLICATE)
        descFileTwo = self.writeTestDescriptorsToFile(
            'cached-extrainfo.2', BRIDGE_EXTRA_INFO_DESCRIPTOR)
        descFileThree = self.writeTestDescriptorsToFile(
            'cached-extrainfo.3',
            BRIDGE_EXTRA_INFO_DESCRIPTOR_NEWEST_DUPLICATE)
        routers = descriptors.parseExtraInfoFiles(descFileOne, descFileTwo,
                                                  descFileThree)

        # We shouldn't have duplicates:
        self.assertEqual(len(routers), 1,
                         "We shouldn't have any duplicate descriptors.")

        # We should only have the newest descriptor:
        bridge = list(routers.values())[0]
        self.assertEqual(
            bridge.published,
            datetime.datetime.strptime("2014-12-04 03:10:25",
                                       "%Y-%m-%d %H:%M:%S"),
            "We should have the newest available descriptor for this router.")
        self.assertTrue(self.removeTestDescriptorsFile(descFileOne))
        self.assertTrue(self.removeTestDescriptorsFile(descFileTwo))
        self.assertTrue(self.removeTestDescriptorsFile(descFileThree))
Пример #11
0
 def test_parse_descriptors_parseExtraInfoFiles_ed25519(self):
     """Test parsing an extrainfo descriptor with Ed25519 keys/certificates.
     """
     descFile = self.writeTestDescriptorsToFile(
         'cached-extrainfo', BRIDGE_EXTRA_INFO_DESCRIPTOR_ED25519)
     routers = descriptors.parseExtraInfoFiles(descFile)
     self.assertEqual(len(routers), 1)
     self.assertTrue(self.removeTestDescriptorsFile(descFile))
 def test_parse_descriptors_parseExtraInfoFiles_has_BridgeExtraInfoDescriptor(self):
     """The return of ``b.p.descriptors.parseExtraInfoFiles`` should
     contain ``BridgeExtraInfoDescriptor``s.
     """
     descFile = io.BytesIO(BRIDGE_EXTRA_INFO_DESCRIPTOR)
     routers = descriptors.parseExtraInfoFiles(descFile)
     bridge = routers.values()[0]
     self.assertIsInstance(bridge, RelayExtraInfoDescriptor)
Пример #13
0
 def test_parse_descriptors_parseExtraInfoFiles_return_type(self):
     """The return type of ``b.p.descriptors.parseExtraInfoFiles``
     should be a dictionary (after deduplication).
     """
     descFile = self.writeTestDescriptorsToFile(
         'cached-extrainfo', BRIDGE_EXTRA_INFO_DESCRIPTOR)
     routers = descriptors.parseExtraInfoFiles(descFile)
     self.assertIsInstance(routers, dict)
     self.assertTrue(self.removeTestDescriptorsFile(descFile))
    def test_parse_descriptors_parseExtraInfoFiles_benchmark_1000_bridges(self):
        """Benchmark test for ``b.p.descriptors.parseExtraInfoFiles``."""
        raise SkipTest(("This test can take several minutes to complete. "
                        "Run it on your own free time."))

        print()
        for i in range(1, 6):
            descFiles = self.createDuplicatesForBenchmark(b=1000, n=i)
            with Benchmarker():
                routers = descriptors.parseExtraInfoFiles(*descFiles)
Пример #15
0
    def test_parse_descriptors_deduplicate_identical_timestamps(self):
        """Parsing two descriptors for the same bridge with identical
        timestamps should log a ``b.p.descriptors.DescriptorWarning``
        and retain only one copy of the descriptor.
        """
        descFileOne = io.BytesIO(BRIDGE_EXTRA_INFO_DESCRIPTOR)
        descFileTwo = io.BytesIO(BRIDGE_EXTRA_INFO_DESCRIPTOR)
        routers = descriptors.parseExtraInfoFiles(descFileOne, descFileTwo)

        self.assertEqual(len(routers), 1)
    def test_parse_descriptors_deduplicate_identical_timestamps(self):
        """Parsing two descriptors for the same bridge with identical
        timestamps should log a ``b.p.descriptors.DescriptorWarning``
        and retain only one copy of the descriptor.
        """
        descFileOne = io.BytesIO(BRIDGE_EXTRA_INFO_DESCRIPTOR)
        descFileTwo = io.BytesIO(BRIDGE_EXTRA_INFO_DESCRIPTOR)
        routers = descriptors.parseExtraInfoFiles(descFileOne, descFileTwo)

        self.assertEqual(len(routers), 1)
Пример #17
0
 def test_parse_descriptors_parseExtraInfoFiles_has_BridgeExtraInfoDescriptor(
         self):
     """The return of ``b.p.descriptors.parseExtraInfoFiles`` should
     contain ``BridgeExtraInfoDescriptor``s.
     """
     descFile = self.writeTestDescriptorsToFile(
         'cached-extrainfo', BRIDGE_EXTRA_INFO_DESCRIPTOR)
     routers = descriptors.parseExtraInfoFiles(descFile)
     bridge = list(routers.values())[0]
     self.assertIsInstance(bridge, RelayExtraInfoDescriptor)
     self.assertTrue(self.removeTestDescriptorsFile(descFile))
    def test_parse_descriptors_parseExtraInfoFiles_benchmark_10000_bridges(self):
        """Benchmark test for ``b.p.descriptors.parseExtraInfoFiles``.
        The algorithm should grow linearly in the number of duplicates.
        """
        raise SkipTest(("This test takes ~7 minutes to complete. " "Run it on your own free time."))

        print()
        for i in range(1, 6):
            descFiles = self.createDuplicatesForBenchmark(b=10000, n=i)
            with Benchmarker():
                routers = descriptors.parseExtraInfoFiles(*descFiles)
Пример #19
0
    def test_parse_descriptors_deduplicate_identical_timestamps(self):
        """Parsing two descriptors for the same bridge with identical
        timestamps should log a ``b.p.descriptors.DescriptorWarning``
        and retain only one copy of the descriptor.
        """
        descFile = self.writeTestDescriptorsToFile(
            'cached-extrainfo', BRIDGE_EXTRA_INFO_DESCRIPTOR,
            BRIDGE_EXTRA_INFO_DESCRIPTOR)
        routers = descriptors.parseExtraInfoFiles(descFile)

        self.assertEqual(len(routers), 1)
        self.assertTrue(self.removeTestDescriptorsFile(descFile))
Пример #20
0
    def test_parse_descriptors_parseExtraInfoFiles_unparseable_and_parseable(
            self):
        """Test parsing four extrainfo descriptors: two are valid descriptors,
        one is an older duplicate of one of the valid descriptors, and fails
        Stem's validation. Since the bridge authority already does validation,
        we should except the unparseable descriptor. There should be three
        descriptors returned after parsing.
        """
        # Mess up the bridge-ip-transports line:
        unparseable = BRIDGE_EXTRA_INFO_DESCRIPTOR.replace(
            b"MiserLandfalls E08B324D20AD0A13E114F027AB9AC3F32CA696A0",
            b"DontParseMe F373CC1D86D82267F1F1F5D39470F0E0A022122E").replace(
                b"bridge-ip-transports <OR>=8", b"bridge-ip-transports <OR>")

        parseable = BRIDGE_EXTRA_INFO_DESCRIPTOR.replace(
            b"MiserLandfalls E08B324D20AD0A13E114F027AB9AC3F32CA696A0",
            b"ImOkWithBeingParsed 2B5DA67FBA13A6449DE625673B7AE9E3AA7DF75F")

        # This must be a "real" file or _copyUnparseableDescriptorFile() will
        # raise an AttributeError saying:
        # '_io.BytesIO' object has no attribute 'rpartition'"
        descFile = self.writeTestDescriptorsToFile(
            "unparseable-descriptor.new", BRIDGE_EXTRA_INFO_DESCRIPTOR,
            BRIDGE_EXTRA_INFO_DESCRIPTOR_NEWEST_DUPLICATE, unparseable,
            parseable)
        routers = descriptors.parseExtraInfoFiles(descFile)

        self.assertIsInstance(routers, dict)
        self.assertEqual(
            len(routers), 3,
            ("There were four extrainfo descriptors: one was a duplicate, "
             "and one will throw a validation error, but all three "
             "descriptors should be returned."))

        self.assertIn(
            "F373CC1D86D82267F1F1F5D39470F0E0A022122E", routers.keys(),
            "The 'unparseable' descriptor was not returned by the parser.")

        self.assertIn(
            "E08B324D20AD0A13E114F027AB9AC3F32CA696A0", routers.keys(),
            ("A bridge extrainfo which had duplicates was completely missing "
             "from the data which the parser returned."))
        self.assertEqual(
            routers["E08B324D20AD0A13E114F027AB9AC3F32CA696A0"].published,
            datetime.datetime.strptime("2014-12-04 03:10:25",
                                       "%Y-%m-%d %H:%M:%S"),
            "We should have the newest available descriptor for this router.")

        self.assertIn(
            "2B5DA67FBA13A6449DE625673B7AE9E3AA7DF75F", routers.keys(),
            "The 'parseable' descriptor wasn't returned by the parser.")
        self.assertTrue(self.removeTestDescriptorsFile(descFile))
    def test_parse_descriptors_parseExtraInfoFiles_one_file(self):
        """Test for ``b.p.descriptors.parseExtraInfoFiles`` with only one
        bridge extrainfo file.
        """
        descFile = io.BytesIO(BRIDGE_EXTRA_INFO_DESCRIPTOR)
        routers = descriptors.parseExtraInfoFiles(descFile)
        bridge = routers.values()[0]

        # The number of transports we parsed should be equal to the number of
        # 'transport' lines in the descriptor:
        self.assertEqual(len(bridge.transport), BRIDGE_EXTRA_INFO_DESCRIPTOR.count("transport "))

        self.assertEqual(bridge.fingerprint, self.expectedFprBridge0)
    def test_parse_descriptors_parseExtraInfoFiles_benchmark_10000_bridges(
            self):
        """Benchmark test for ``b.p.descriptors.parseExtraInfoFiles``.
        The algorithm should grow linearly in the number of duplicates.
        """
        raise SkipTest(("This test takes ~7 minutes to complete. "
                        "Run it on your own free time."))

        print()
        for i in range(1, 6):
            descFiles = self.createDuplicatesForBenchmark(b=10000, n=i)
            with Benchmarker():
                routers = descriptors.parseExtraInfoFiles(*descFiles)
    def test_parse_descriptors_parseExtraInfoFiles_missing_signature(self):
        """Calling parseExtraInfoFiles() with a descriptor which is
        missing the signature should return zero parsed descriptors.
        """
        # Remove the signature
        BEGIN_SIG = "-----BEGIN SIGNATURE-----"
        unparseable, _ = BRIDGE_EXTRA_INFO_DESCRIPTOR.split(BEGIN_SIG)
        # This must be a "real" file or _copyUnparseableDescriptorFile() will
        # raise an AttributeError saying:
        # '_io.BytesIO' object has no attribute 'rpartition'"
        descFileOne = self.writeTestDescriptorsToFile("missing-signature", unparseable)
        routers = descriptors.parseExtraInfoFiles(descFileOne)

        self.assertEqual(len(routers), 0)
Пример #24
0
    def test_parse_descriptors_parseExtraInfoFiles_one_file(self):
        """Test for ``b.p.descriptors.parseExtraInfoFiles`` with only one
        bridge extrainfo file.
        """
        descFile = io.BytesIO(BRIDGE_EXTRA_INFO_DESCRIPTOR)
        routers = descriptors.parseExtraInfoFiles(descFile)
        bridge = list(routers.values())[0]

        # The number of transports we parsed should be equal to the number of
        # 'transport' lines in the descriptor:
        self.assertEqual(len(bridge.transport),
                         BRIDGE_EXTRA_INFO_DESCRIPTOR.count(b'transport '))

        self.assertEqual(bridge.fingerprint, self.expectedFprBridge0)
Пример #25
0
    def test_parse_descriptors_parseExtraInfoFiles_bad_signature_footer(self):
        """Calling parseExtraInfoFiles() with a descriptor which has a
        signature with a bad "-----END SIGNATURE-----" footer should return
        zero parsed descriptors.
        """
        unparseable = BRIDGE_EXTRA_INFO_DESCRIPTOR.replace(
            b'-----END SIGNATURE-----', b'-----END SIGNATURE FOR REALZ-----')
        # This must be a "real" file or _copyUnparseableDescriptorFile() will
        # raise an AttributeError saying:
        # '_io.BytesIO' object has no attribute 'rpartition'"
        descFileOne = self.writeTestDescriptorsToFile("bad-signature-footer",
                                                      unparseable)
        routers = descriptors.parseExtraInfoFiles(descFileOne)

        self.assertEqual(len(routers), 0)
    def test_parse_descriptors_parseExtraInfoFiles_bad_signature_footer(self):
        """Calling parseExtraInfoFiles() with a descriptor which has a
        signature with a bad "-----END SIGNATURE-----" footer should return
        zero parsed descriptors.
        """
        unparseable = BRIDGE_EXTRA_INFO_DESCRIPTOR.replace(
            "-----END SIGNATURE-----", "-----END SIGNATURE FOR REALZ-----"
        )
        # This must be a "real" file or _copyUnparseableDescriptorFile() will
        # raise an AttributeError saying:
        # '_io.BytesIO' object has no attribute 'rpartition'"
        descFileOne = self.writeTestDescriptorsToFile("bad-signature-footer", unparseable)
        routers = descriptors.parseExtraInfoFiles(descFileOne)

        self.assertEqual(len(routers), 0)
    def test_parse_descriptors_parseExtraInfoFiles_bad_signature_too_short(self):
        """Calling _verifyExtraInfoSignature() with a descriptor which has a
        bad signature should raise an InvalidExtraInfoSignature exception.
        """
        # Truncate the signature to 50 bytes
        BEGIN_SIG = "-----BEGIN SIGNATURE-----"
        doc, sig = BRIDGE_EXTRA_INFO_DESCRIPTOR.split(BEGIN_SIG)
        unparseable = BEGIN_SIG.join([doc, sig[:50]])
        # This must be a "real" file or _copyUnparseableDescriptorFile() will
        # raise an AttributeError saying:
        # '_io.BytesIO' object has no attribute 'rpartition'"
        descFileOne = self.writeTestDescriptorsToFile("truncated-signature", unparseable)
        routers = descriptors.parseExtraInfoFiles(descFileOne)

        self.assertEqual(len(routers), 0)
Пример #28
0
    def test_parse_descriptors_parseExtraInfoFiles_missing_signature(self):
        """Calling parseExtraInfoFiles() with a descriptor which is
        missing the signature should return zero parsed descriptors.
        """
        # Remove the signature
        BEGIN_SIG = b'-----BEGIN SIGNATURE-----'
        unparseable, _ = BRIDGE_EXTRA_INFO_DESCRIPTOR.split(BEGIN_SIG)
        # This must be a "real" file or _copyUnparseableDescriptorFile() will
        # raise an AttributeError saying:
        # '_io.BytesIO' object has no attribute 'rpartition'"
        descFileOne = self.writeTestDescriptorsToFile("missing-signature",
                                                      unparseable)
        routers = descriptors.parseExtraInfoFiles(descFileOne)

        self.assertEqual(len(routers), 0)
    def test_parse_descriptors_parseExtraInfoFiles_unparseable_and_parseable(self):
        """Test parsing four extrainfo descriptors: two are valid descriptors,
        one is an older duplicate of one of the valid descriptors, and one is
        unparseable (it has a line we shouldn't recognise). There should be
        only two descriptors returned after parsing.
        """
        # Mess up the bridge-ip-transports line:
        unparseable = BRIDGE_EXTRA_INFO_DESCRIPTOR.replace(
            "MiserLandfalls E08B324D20AD0A13E114F027AB9AC3F32CA696A0",
            "DontParseMe F373CC1D86D82267F1F1F5D39470F0E0A022122E").replace(
                "bridge-ip-transports <OR>=8",
                "bridge-ip-transports <OR>")

        parseable = BRIDGE_EXTRA_INFO_DESCRIPTOR.replace(
            "MiserLandfalls E08B324D20AD0A13E114F027AB9AC3F32CA696A0",
            "ImOkWithBeingParsed 2B5DA67FBA13A6449DE625673B7AE9E3AA7DF75F")

        descFileOne = io.BytesIO(BRIDGE_EXTRA_INFO_DESCRIPTOR)
        descFileTwo = io.BytesIO(BRIDGE_EXTRA_INFO_DESCRIPTOR_NEWEST_DUPLICATE)
        # This must be a "real" file or _copyUnparseableDescriptorFile() will
        # raise an AttributeError saying:
        # '_io.BytesIO' object has no attribute 'rpartition'"
        descFileThree = self.writeTestDescriptorsToFile(
            "unparseable-descriptor.new", unparseable)
        descFileFour = io.BytesIO(parseable)
        routers = descriptors.parseExtraInfoFiles(descFileOne,
                                                  descFileTwo,
                                                  descFileThree,
                                                  descFileFour)
        self.assertIsInstance(routers, dict)
        self.assertEqual(len(routers), 2, (
            "There were four extrainfo descriptors: one was a duplicate, "
            "and one was unparseable, so that should only leave two "
            "descriptors remaining."))

        self.assertNotIn("F373CC1D86D82267F1F1F5D39470F0E0A022122E", routers.keys(),
                         "The 'unparseable' descriptor was returned by the parser.")

        self.assertIn("E08B324D20AD0A13E114F027AB9AC3F32CA696A0", routers.keys(),
            ("A bridge extrainfo which had duplicates was completely missing "
             "from the data which the parser returned."))
        self.assertEqual(
            routers["E08B324D20AD0A13E114F027AB9AC3F32CA696A0"].published,
            datetime.datetime.strptime("2014-12-04 03:10:25", "%Y-%m-%d %H:%M:%S"),
            "We should have the newest available descriptor for this router.")

        self.assertIn("2B5DA67FBA13A6449DE625673B7AE9E3AA7DF75F", routers.keys(),
                      "The 'parseable' descriptor wasn't returned by the parser.")
    def test_parse_descriptors_parseExtraInfoFiles_bad_signature_too_short(self):
        """Calling _verifyExtraInfoSignature() with a descriptor which has a
        bad signature should raise an InvalidExtraInfoSignature exception.
        """
        # Truncate the signature to 50 bytes
        BEGIN_SIG = '-----BEGIN SIGNATURE-----'
        doc, sig = BRIDGE_EXTRA_INFO_DESCRIPTOR.split(BEGIN_SIG)
        unparseable = BEGIN_SIG.join([doc, sig[:50]])
        # This must be a "real" file or _copyUnparseableDescriptorFile() will
        # raise an AttributeError saying:
        # '_io.BytesIO' object has no attribute 'rpartition'"
        descFileOne = self.writeTestDescriptorsToFile(
            "truncated-signature", unparseable)
        routers = descriptors.parseExtraInfoFiles(descFileOne)

        self.assertEqual(len(routers), 0)
    def test_parse_descriptors_parseExtraInfoFiles_two_files_reverse(self):
        """Test for ``b.p.descriptors.parseExtraInfoFiles`` with two bridge
        extrainfo files. This time, they are processed in reverse to ensure
        that we only keep the newer duplicates of descriptors, no matter what
        order they appeared in the files.
        """
        descFileOne = io.BytesIO(BRIDGE_EXTRA_INFO_DESCRIPTOR_NEWER_DUPLICATE)
        descFileTwo = io.BytesIO(BRIDGE_EXTRA_INFO_DESCRIPTOR)
        routers = descriptors.parseExtraInfoFiles(descFileOne, descFileTwo)

        self.assertEqual(len(routers), 1,
                         "We shouldn't have any duplicate descriptors.")

        bridge = routers.values()[0]
        self.assertEqual(
            bridge.published,
            datetime.datetime.strptime("2014-11-04 08:10:25", "%Y-%m-%d %H:%M:%S"),
            "We should have the newest available descriptor for this router.")
    def test_parse_descriptors_parseExtraInfoFiles_unparseable(self):
        """Test parsing three extrainfo descriptors: one is a valid descriptor,
        one is an older duplicate, and one is unparseable (it has a bad
        geoip-db-digest line). There should be only one descriptor returned
        after parsing.
        """
        # Give it a bad geoip-db-digest:
        unparseable = BRIDGE_EXTRA_INFO_DESCRIPTOR.replace(
            "MiserLandfalls E08B324D20AD0A13E114F027AB9AC3F32CA696A0",
            "DontParseMe F373CC1D86D82267F1F1F5D39470F0E0A022122E",
        ).replace(
            "geoip-db-digest 09A0E093100B279AD9CFF47A67B13A21C6E1483F",
            "geoip-db-digest FOOOOOOOOOOOOOOOOOOBAAAAAAAAAAAAAAAAAARR",
        )

        descFileOne = io.BytesIO(BRIDGE_EXTRA_INFO_DESCRIPTOR_NEWEST_DUPLICATE)
        descFileTwo = io.BytesIO(BRIDGE_EXTRA_INFO_DESCRIPTOR)
        # This must be a "real" file or _copyUnparseableDescriptorFile() will
        # raise an AttributeError saying:
        # '_io.BytesIO' object has no attribute 'rpartition'"
        descFileThree = self.writeTestDescriptorsToFile("unparseable-descriptor", unparseable)
        routers = descriptors.parseExtraInfoFiles(descFileOne, descFileTwo, descFileThree)
        self.assertIsInstance(routers, dict)
        self.assertEqual(
            len(routers),
            1,
            (
                "There were three extrainfo descriptors: one was a duplicate, "
                "and one was unparseable, so that should only leave one "
                "descriptor remaining."
            ),
        )

        bridge = routers.values()[0]
        self.assertEqual(
            bridge.fingerprint,
            "E08B324D20AD0A13E114F027AB9AC3F32CA696A0",
            ("It looks like the (supposedly) unparseable bridge was returned " "instead of the valid one!"),
        )
        self.assertEqual(
            bridge.published,
            datetime.datetime.strptime("2014-12-04 03:10:25", "%Y-%m-%d %H:%M:%S"),
            "We should have the newest available descriptor for this router.",
        )
Пример #33
0
    def test_parse_descriptors_parseExtraInfoFiles_unparseable(self):
        """Test parsing three extrainfo descriptors: one is a valid descriptor,
        one is an older duplicate, and one is unparseable (it has a bad
        geoip-db-digest line). There should be only one descriptor returned
        after parsing.
        """
        # Give it a bad geoip-db-digest:
        unparseable = BRIDGE_EXTRA_INFO_DESCRIPTOR.replace(
            b"MiserLandfalls E08B324D20AD0A13E114F027AB9AC3F32CA696A0",
            b"DontParseMe F373CC1D86D82267F1F1F5D39470F0E0A022122E").replace(
                b"geoip-db-digest 09A0E093100B279AD9CFF47A67B13A21C6E1483F",
                b"geoip-db-digest FOOOOOOOOOOOOOOOOOOBAAAAAAAAAAAAAAAAAARR")

        descFileOne = self.writeTestDescriptorsToFile(
            'cached-extrainfo', BRIDGE_EXTRA_INFO_DESCRIPTOR_NEWEST_DUPLICATE)
        descFileTwo = self.writeTestDescriptorsToFile(
            'cached-extrainfo.2', BRIDGE_EXTRA_INFO_DESCRIPTOR)
        # This must be a "real" file or _copyUnparseableDescriptorFile() will
        # raise an AttributeError saying:
        # '_io.BytesIO' object has no attribute 'rpartition'"
        descFileThree = self.writeTestDescriptorsToFile(
            "unparseable-descriptor", unparseable)
        routers = descriptors.parseExtraInfoFiles(descFileOne, descFileTwo,
                                                  descFileThree)
        self.assertIsInstance(routers, dict)
        self.assertEqual(
            len(routers), 1,
            ("There were three extrainfo descriptors: one was a duplicate, "
             "and one was unparseable, so that should only leave one "
             "descriptor remaining."))

        bridge = list(routers.values())[0]
        self.assertEqual(
            bridge.fingerprint, "E08B324D20AD0A13E114F027AB9AC3F32CA696A0",
            ("It looks like the (supposedly) unparseable bridge was returned "
             "instead of the valid one!"))
        self.assertEqual(
            bridge.published,
            datetime.datetime.strptime("2014-12-04 03:10:25",
                                       "%Y-%m-%d %H:%M:%S"),
            "We should have the newest available descriptor for this router.")
        self.assertTrue(self.removeTestDescriptorsFile(descFileOne))
        self.assertTrue(self.removeTestDescriptorsFile(descFileTwo))
        self.assertTrue(self.removeTestDescriptorsFile(descFileThree))
    def test_parse_descriptors_parseExtraInfoFiles_two_files(self):
        """Test for ``b.p.descriptors.parseExtraInfoFiles`` with two
        bridge extrainfo files, and check that only the newest extrainfo
        descriptor is used.
        """
        descFileOne = io.BytesIO(BRIDGE_EXTRA_INFO_DESCRIPTOR)
        descFileTwo = io.BytesIO(BRIDGE_EXTRA_INFO_DESCRIPTOR_NEWER_DUPLICATE)
        routers = descriptors.parseExtraInfoFiles(descFileOne, descFileTwo)

        # We shouldn't have duplicates:
        self.assertEqual(len(routers), 1,
                         "We shouldn't have any duplicate descriptors.")

        # We should only have the newest descriptor:
        bridge = routers.values()[0]
        self.assertEqual(
            bridge.published,
            datetime.datetime.strptime("2014-11-04 08:10:25", "%Y-%m-%d %H:%M:%S"),
            "We should have the newest available descriptor for this router.")
 def test_parse_descriptors_parseExtraInfoFiles_ed25519(self):
     """Test parsing an extrainfo descriptor with Ed25519 keys/certificates.
     """
     descFileOne = io.BytesIO(BRIDGE_EXTRA_INFO_DESCRIPTOR_ED25519)
     routers = descriptors.parseExtraInfoFiles(descFileOne)
     self.assertEqual(len(routers), 1)
 def test_parse_descriptors_parseExtraInfoFiles_empty_file(self):
     """Test parsing an empty extrainfo descriptors file."""
     routers = descriptors.parseExtraInfoFiles(io.BytesIO(''))
     self.assertIsInstance(routers, dict)
     self.assertEqual(len(routers), 0)
Пример #37
0
def load(state, hashring, clear=False):
    """Read and parse all descriptors, and load into a bridge hashring.

    Read all the appropriate bridge files from the saved
    :class:`~bridgedb.persistent.State`, parse and validate them, and then
    store them into our ``state.hashring`` instance. The ``state`` will be
    saved again at the end of this function.

    :type hashring: :class:`~bridgedb.Bridges.BridgeSplitter`
    :param hashring: A class which provides a mechanism for HMACing
        Bridges in order to assign them to hashrings.
    :param boolean clear: If True, clear all previous bridges from the
        hashring before parsing for new ones.
    """
    if not state:
        logging.fatal("bridgedb.main.load() could not retrieve state!")
        sys.exit(2)

    if clear:
        logging.info("Clearing old bridges...")
        hashring.clear()

    logging.info("Loading bridges...")

    ignoreNetworkstatus = state.IGNORE_NETWORKSTATUS
    if ignoreNetworkstatus:
        logging.info("Ignoring BridgeAuthority networkstatus documents.")

    for auth in state.BRIDGE_AUTHORITY_DIRECTORIES:
        logging.info("Processing descriptors in %s directory..." % auth)

        bridges = {}
        timestamps = {}

        fn = expandBridgeAuthDir(auth, state.STATUS_FILE)
        logging.info("Opening networkstatus file: %s" % fn)
        networkstatuses = descriptors.parseNetworkStatusFile(fn)
        logging.debug("Closing networkstatus file: %s" % fn)

        logging.info("Processing networkstatus descriptors...")
        for router in networkstatuses:
            bridge = Bridge()
            bridge.updateFromNetworkStatus(router, ignoreNetworkstatus)
            try:
                bridge.assertOK()
            except MalformedBridgeInfo as error:
                logging.warn(str(error))
            else:
                bridges[bridge.fingerprint] = bridge

        for filename in state.BRIDGE_FILES:
            fn = expandBridgeAuthDir(auth, filename)
            logging.info("Opening bridge-server-descriptor file: '%s'" % fn)
            serverdescriptors = descriptors.parseServerDescriptorsFile(fn)
            logging.debug("Closing bridge-server-descriptor file: '%s'" % fn)

            for router in serverdescriptors:
                try:
                    bridge = bridges[router.fingerprint]
                except KeyError:
                    logging.warn(
                        ("Received server descriptor for bridge '%s' which wasn't "
                         "in the networkstatus!") % router.fingerprint)
                    if ignoreNetworkstatus:
                        bridge = Bridge()
                    else:
                        continue

                try:
                    bridge.updateFromServerDescriptor(router, ignoreNetworkstatus)
                except (ServerDescriptorWithoutNetworkstatus,
                        MissingServerDescriptorDigest,
                        ServerDescriptorDigestMismatch) as error:
                    logging.warn(str(error))
                    # Reject any routers whose server descriptors didn't pass
                    # :meth:`~bridges.Bridge._checkServerDescriptor`, i.e. those
                    # bridges who don't have corresponding networkstatus
                    # documents, or whose server descriptor digests don't check
                    # out:
                    bridges.pop(router.fingerprint)
                    continue

                if state.COLLECT_TIMESTAMPS:
                    # Update timestamps from server descriptors, not from network
                    # status descriptors (because networkstatus documents and
                    # descriptors aren't authenticated in any way):
                    if bridge.fingerprint in timestamps.keys():
                        timestamps[bridge.fingerprint].append(router.published)
                    else:
                        timestamps[bridge.fingerprint] = [router.published]

        eifiles = [expandBridgeAuthDir(auth, fn) for fn in state.EXTRA_INFO_FILES]
        extrainfos = descriptors.parseExtraInfoFiles(*eifiles)
        for fingerprint, router in extrainfos.items():
            try:
                bridges[fingerprint].updateFromExtraInfoDescriptor(router)
            except MalformedBridgeInfo as error:
                logging.warn(str(error))
            except KeyError as error:
                logging.warn(("Received extrainfo descriptor for bridge '%s', "
                              "but could not find bridge with that fingerprint.")
                             % router.fingerprint)

        blacklist = parseBridgeBlacklistFile(state.NO_DISTRIBUTION_FILE)

        inserted = 0
        logging.info("Inserting %d bridges into hashring..." % len(bridges))
        for fingerprint, bridge in bridges.items():
            # Skip insertion of bridges which are geolocated to be in one of the
            # NO_DISTRIBUTION_COUNTRIES, a.k.a. the countries we don't distribute
            # bridges from:
            if bridge.country in state.NO_DISTRIBUTION_COUNTRIES:
                logging.warn("Not distributing Bridge %s %s:%s in country %s!" %
                             (bridge, bridge.address, bridge.orPort, bridge.country))
            # Skip insertion of blacklisted bridges.
            elif bridge in blacklist.keys():
                logging.warn("Not distributing blacklisted Bridge %s %s:%s: %s" %
                             (bridge, bridge.address, bridge.orPort, blacklist[bridge]))
            else:
                # If the bridge is not running, then it is skipped during the
                # insertion process.
                hashring.insert(bridge)
                inserted += 1
        logging.info("Done inserting %d bridges into hashring." % inserted)

        if state.COLLECT_TIMESTAMPS:
            reactor.callInThread(updateBridgeHistory, bridges, timestamps)

        state.save()
Пример #38
0
 def test_parse_descriptors_parseExtraInfoFiles_empty_file(self):
     """Test parsing an empty extrainfo descriptors file."""
     routers = descriptors.parseExtraInfoFiles(io.BytesIO(b''))
     self.assertIsInstance(routers, dict)
     self.assertEqual(len(routers), 0)
Пример #39
0
 def test_parse_descriptors_parseExtraInfoFiles_ed25519(self):
     """Test parsing an extrainfo descriptor with Ed25519 keys/certificates.
     """
     descFileOne = io.BytesIO(BRIDGE_EXTRA_INFO_DESCRIPTOR_ED25519)
     routers = descriptors.parseExtraInfoFiles(descFileOne)
     self.assertEqual(len(routers), 1)
Пример #40
0
def load(state, hashring, clear=False):
    """Read and parse all descriptors, and load into a bridge hashring.

    Read all the appropriate bridge files from the saved
    :class:`~bridgedb.persistent.State`, parse and validate them, and then
    store them into our ``state.hashring`` instance. The ``state`` will be
    saved again at the end of this function.

    :type hashring: :class:`~bridgedb.bridgerings.BridgeSplitter`
    :param hashring: A class which provides a mechanism for HMACing
        Bridges in order to assign them to hashrings.
    :param boolean clear: If True, clear all previous bridges from the
        hashring before parsing for new ones.
    """
    if not state:
        logging.fatal("bridgedb.main.load() could not retrieve state!")
        sys.exit(2)

    if clear:
        logging.info("Clearing old bridges...")
        hashring.clear()

    logging.info("Loading bridges...")

    ignoreNetworkstatus = state.IGNORE_NETWORKSTATUS
    if ignoreNetworkstatus:
        logging.info("Ignoring BridgeAuthority networkstatus documents.")

    for auth in state.BRIDGE_AUTHORITY_DIRECTORIES:
        logging.info("Processing descriptors in %s directory..." % auth)

        bridges = {}
        timestamps = {}

        fn = expandBridgeAuthDir(auth, state.STATUS_FILE)
        logging.info("Opening networkstatus file: %s" % fn)
        networkstatuses = descriptors.parseNetworkStatusFile(fn)
        logging.debug("Closing networkstatus file: %s" % fn)

        logging.info("Processing networkstatus descriptors...")
        for router in networkstatuses:
            bridge = Bridge()
            bridge.updateFromNetworkStatus(router, ignoreNetworkstatus)
            try:
                bridge.assertOK()
            except MalformedBridgeInfo as error:
                logging.warn(str(error))
            else:
                bridges[bridge.fingerprint] = bridge

        for filename in state.BRIDGE_FILES:
            fn = expandBridgeAuthDir(auth, filename)
            logging.info("Opening bridge-server-descriptor file: '%s'" % fn)
            serverdescriptors = descriptors.parseServerDescriptorsFile(fn)
            logging.debug("Closing bridge-server-descriptor file: '%s'" % fn)

            for router in serverdescriptors:
                try:
                    bridge = bridges[router.fingerprint]
                except KeyError:
                    logging.warn((
                        "Received server descriptor for bridge '%s' which wasn't "
                        "in the networkstatus!") % router.fingerprint)
                    if ignoreNetworkstatus:
                        bridge = Bridge()
                    else:
                        continue

                try:
                    bridge.updateFromServerDescriptor(router,
                                                      ignoreNetworkstatus)
                except (ServerDescriptorWithoutNetworkstatus,
                        MissingServerDescriptorDigest,
                        ServerDescriptorDigestMismatch) as error:
                    logging.warn(str(error))
                    # Reject any routers whose server descriptors didn't pass
                    # :meth:`~bridges.Bridge._checkServerDescriptor`, i.e. those
                    # bridges who don't have corresponding networkstatus
                    # documents, or whose server descriptor digests don't check
                    # out:
                    bridges.pop(router.fingerprint)
                    continue

                if state.COLLECT_TIMESTAMPS:
                    # Update timestamps from server descriptors, not from network
                    # status descriptors (because networkstatus documents and
                    # descriptors aren't authenticated in any way):
                    if bridge.fingerprint in timestamps.keys():
                        timestamps[bridge.fingerprint].append(router.published)
                    else:
                        timestamps[bridge.fingerprint] = [router.published]

        eifiles = [
            expandBridgeAuthDir(auth, fn) for fn in state.EXTRA_INFO_FILES
        ]
        extrainfos = descriptors.parseExtraInfoFiles(*eifiles)
        for fingerprint, router in extrainfos.items():
            try:
                bridges[fingerprint].updateFromExtraInfoDescriptor(router)
            except MalformedBridgeInfo as error:
                logging.warn(str(error))
            except KeyError as error:
                logging.warn(
                    ("Received extrainfo descriptor for bridge '%s', "
                     "but could not find bridge with that fingerprint.") %
                    router.fingerprint)

        blacklist = parseBridgeBlacklistFile(state.NO_DISTRIBUTION_FILE)

        inserted = 0
        logging.info(
            "Trying to insert %d bridges into hashring, %d of which "
            "have the 'Running' flag..." %
            (len(bridges),
             len(list(filter(lambda b: b.flags.running, bridges.values())))))

        for fingerprint, bridge in bridges.items():
            # Skip insertion of bridges which are geolocated to be in one of the
            # NO_DISTRIBUTION_COUNTRIES, a.k.a. the countries we don't distribute
            # bridges from:
            if bridge.country in state.NO_DISTRIBUTION_COUNTRIES:
                logging.warn(
                    "Not distributing Bridge %s %s:%s in country %s!" %
                    (bridge, bridge.address, bridge.orPort, bridge.country))
            # Skip insertion of blacklisted bridges.
            elif bridge in blacklist.keys():
                logging.warn(
                    "Not distributing blacklisted Bridge %s %s:%s: %s" %
                    (bridge, bridge.address, bridge.orPort, blacklist[bridge]))
            # Skip bridges that are running a blacklisted version of Tor.
            elif bridge.runsVersion(state.BLACKLISTED_TOR_VERSIONS):
                logging.warn(
                    "Not distributing bridge %s because it runs blacklisted "
                    "Tor version %s." % (router.fingerprint, bridge.software))
            else:
                # If the bridge is not running, then it is skipped during the
                # insertion process.
                hashring.insert(bridge)
                inserted += 1
        logging.info("Tried to insert %d bridges into hashring.  Resulting "
                     "hashring is of length %d." % (inserted, len(hashring)))

        if state.COLLECT_TIMESTAMPS:
            reactor.callInThread(updateBridgeHistory, bridges, timestamps)

        state.save()
Пример #41
0
def load(state, splitter, clear=False):
    """Read and parse all descriptors, and load into a bridge splitter.

    Read all the appropriate bridge files from the saved
    :class:`~bridgedb.persistent.State`, parse and validate them, and then
    store them into our ``state.splitter`` instance. The ``state`` will be
    saved again at the end of this function.

    :type splitter: :class:`BridgeSplitter <bridgedb.Bridges.BridgeHolder>`
    :param splitter: A class which provides a mechanism for HMACing
        Bridges in order to assign them to hashrings.
    :param boolean clear: If True, clear all previous bridges from the
        splitter before parsing for new ones.
    """
    if not state:
        logging.fatal("bridgedb.Main.load() could not retrieve state!")
        sys.exit(2)

    if clear:
        logging.info("Clearing old bridges...")
        splitter.clear()

    logging.info("Loading bridges...")

    ignoreNetworkstatus = state.IGNORE_NETWORKSTATUS
    if ignoreNetworkstatus:
        logging.info("Ignoring BridgeAuthority networkstatus documents.")

    bridges = {}
    timestamps = {}

    logging.info("Opening networkstatus file: %s" % state.STATUS_FILE)
    networkstatuses = descriptors.parseNetworkStatusFile(state.STATUS_FILE)
    logging.debug("Closing networkstatus file: %s" % state.STATUS_FILE)

    logging.info("Processing networkstatus descriptors...")
    for router in networkstatuses:
        bridge = Bridge()
        bridge.updateFromNetworkStatus(router, ignoreNetworkstatus)
        try:
            bridge.assertOK()
        except MalformedBridgeInfo as error:
            logging.warn(str(error))
        else:
            bridges[bridge.fingerprint] = bridge

    for filename in state.BRIDGE_FILES:
        logging.info("Opening bridge-server-descriptor file: '%s'" % filename)
        serverdescriptors = descriptors.parseServerDescriptorsFile(filename)
        logging.debug("Closing bridge-server-descriptor file: '%s'" % filename)

        for router in serverdescriptors:
            try:
                bridge = bridges[router.fingerprint]
            except KeyError:
                logging.warn(
                    ("Received server descriptor for bridge '%s' which wasn't "
                     "in the networkstatus!") % router.fingerprint)
                if ignoreNetworkstatus:
                    bridge = Bridge()
                else:
                    continue

            try:
                bridge.updateFromServerDescriptor(router, ignoreNetworkstatus)
            except (ServerDescriptorWithoutNetworkstatus,
                    MissingServerDescriptorDigest,
                    ServerDescriptorDigestMismatch) as error:
                logging.warn(str(error))
                # Reject any routers whose server descriptors didn't pass
                # :meth:`~bridges.Bridge._checkServerDescriptor`, i.e. those
                # bridges who don't have corresponding networkstatus
                # documents, or whose server descriptor digests don't check
                # out:
                bridges.pop(router.fingerprint)
                continue

            if state.COLLECT_TIMESTAMPS:
                # Update timestamps from server descriptors, not from network
                # status descriptors (because networkstatus documents and
                # descriptors aren't authenticated in any way):
                if bridge.fingerprint in timestamps.keys():
                    timestamps[bridge.fingerprint].append(router.published)
                else:
                    timestamps[bridge.fingerprint] = [router.published]

    extrainfos = descriptors.parseExtraInfoFiles(*state.EXTRA_INFO_FILES)
    for fingerprint, router in extrainfos.items():
        try:
            bridges[fingerprint].updateFromExtraInfoDescriptor(router)
        except MalformedBridgeInfo as error:
            logging.warn(str(error))
        except KeyError as error:
            logging.warn(("Received extrainfo descriptor for bridge '%s', "
                          "but could not find bridge with that fingerprint.") %
                         router.fingerprint)

    inserted = 0
    logging.info("Inserting %d bridges into splitter..." % len(bridges))
    for fingerprint, bridge in bridges.items():
        # Skip insertion of bridges which are geolocated to be in one of the
        # NO_DISTRIBUTION_COUNTRIES, a.k.a. the countries we don't distribute
        # bridges from:
        if bridge.country in state.NO_DISTRIBUTION_COUNTRIES:
            logging.warn(
                "Not distributing Bridge %s %s:%s in country %s!" %
                (bridge, bridge.address, bridge.orPort, bridge.country))
        else:
            # If the bridge is not running, then it is skipped during the
            # insertion process.
            splitter.insert(bridge)
            inserted += 1
    logging.info("Done inserting %d bridges into splitter." % inserted)

    if state.COLLECT_TIMESTAMPS:
        reactor.callInThread(updateBridgeHistory, bridges, timestamps)

    state.save()