Esempio n. 1
0
class DetokenizeBase64(unittest.TestCase):
    """Tests detokenizing Base64 messages."""

    JELLO = b'$' + base64.b64encode(JELLO_WORLD_TOKEN)

    RECURSION_STRING = f'The secret message is "{JELLO.decode()}"'
    RECURSION = b'$' + base64.b64encode(
        struct.pack('I', tokens.default_hash(RECURSION_STRING)))

    RECURSION_STRING_2 = f"'{RECURSION.decode()}', said the spy."
    RECURSION_2 = b'$' + base64.b64encode(
        struct.pack('I', tokens.default_hash(RECURSION_STRING_2)))

    TEST_CASES = (
        (b'', b''),
        (JELLO, b'Jello, world!'),
        (b'Hello ' + JELLO + b'?', b'Hello Jello, world!?'),
        (b'$' + JELLO, b'$Jello, world!'),
        (JELLO + JELLO, b'Jello, world!Jello, world!'),
        (JELLO + b'$' + JELLO, b'Jello, world!$Jello, world!'),
        (b'$3141', b'$3141'),
        (JELLO + b'$3141', b'Jello, world!$3141'),
        (RECURSION, b'The secret message is "Jello, world!"'),
        (RECURSION_2,
         b'\'The secret message is "Jello, world!"\', said the spy.'),
    )

    def setUp(self):
        super().setUp()
        db = database.load_token_database(
            io.BytesIO(ELF_WITH_TOKENIZER_SECTIONS))
        db.add([self.RECURSION_STRING, self.RECURSION_STRING_2])
        self.detok = detokenize.Detokenizer(db)

    def test_detokenize_base64_live(self):
        for data, expected in self.TEST_CASES:
            output = io.BytesIO()
            detokenize.detokenize_base64_live(self.detok, io.BytesIO(data),
                                              output, '$')

            self.assertEqual(expected, output.getvalue())

    def test_detokenize_base64_to_file(self):
        for data, expected in self.TEST_CASES:
            output = io.BytesIO()
            detokenize.detokenize_base64_to_file(self.detok, data, output, '$')

            self.assertEqual(expected, output.getvalue())

    def test_detokenize_base64(self):
        for data, expected in self.TEST_CASES:
            self.assertEqual(
                expected, detokenize.detokenize_base64(self.detok, data, b'$'))
Esempio n. 2
0
 def setUp(self):
     super().setUp()
     db = database.load_token_database(
         io.BytesIO(ELF_WITH_TOKENIZER_SECTIONS))
     db.add(
         tokens.TokenizedStringEntry(tokens.default_hash(s), s)
         for s in [self.RECURSION_STRING, self.RECURSION_STRING_2])
     self.detok = detokenize.Detokenizer(db)
Esempio n. 3
0
    def test_mark_removals(self):
        """Tests that date_removed field is set by mark_removals."""
        db = tokens.Database.from_strings(
            ['MILK', 'apples', 'oranges', 'CHEESE', 'pears'])

        self.assertTrue(
            all(entry.date_removed is None for entry in db.entries()))
        date_1 = datetime.datetime(1, 2, 3)

        db.mark_removals(_entries('apples', 'oranges', 'pears'), date_1)

        self.assertEqual(
            db.token_to_entries[default_hash('MILK')][0].date_removed, date_1)
        self.assertEqual(
            db.token_to_entries[default_hash('CHEESE')][0].date_removed,
            date_1)

        now = datetime.datetime.now()
        db.mark_removals(_entries('MILK', 'CHEESE', 'pears'))

        # New strings are not added or re-added in mark_removed().
        self.assertGreaterEqual(
            db.token_to_entries[default_hash('MILK')][0].date_removed, date_1)
        self.assertGreaterEqual(
            db.token_to_entries[default_hash('CHEESE')][0].date_removed,
            date_1)

        # These strings were removed.
        self.assertGreaterEqual(
            db.token_to_entries[default_hash('apples')][0].date_removed, now)
        self.assertGreaterEqual(
            db.token_to_entries[default_hash('oranges')][0].date_removed, now)
        self.assertIsNone(
            db.token_to_entries[default_hash('pears')][0].date_removed)
Esempio n. 4
0
def _entries(*strings: str) -> Iterator[tokens.TokenizedStringEntry]:
    for string in strings:
        yield tokens.TokenizedStringEntry(default_hash(string), string)