Ejemplo n.º 1
0
    def test_create_detokenizer_with_token_database(self):
        detok = detokenize.Detokenizer(io.BytesIO(ELF_WITH_TOKENIZER_SECTIONS))
        expected_tokens = frozenset(detok.database.token_to_entries.keys())

        detok = detokenize.Detokenizer(detok.database)
        self.assertEqual(expected_tokens,
                         frozenset(detok.database.token_to_entries.keys()))
Ejemplo n.º 2
0
    def test_decode_from_elf_file(self):
        detok = detokenize.Detokenizer(io.BytesIO(ELF_WITH_TOKENIZER_SECTIONS))
        expected_tokens = frozenset(detok.database.token_to_entries.keys())

        elf = tempfile.NamedTemporaryFile('wb', delete=False)
        try:
            elf.write(ELF_WITH_TOKENIZER_SECTIONS)
            elf.close()

            # Open ELF by file object
            with open(elf.name, 'rb') as fd:
                detok = detokenize.Detokenizer(fd)

            self.assertEqual(expected_tokens,
                             frozenset(detok.database.token_to_entries.keys()))

            # Open ELF by path
            detok = detokenize.Detokenizer(elf.name)
            self.assertEqual(expected_tokens,
                             frozenset(detok.database.token_to_entries.keys()))

            # Open ELF by elf_reader.Elf
            with open(elf.name, 'rb') as fd:
                detok = detokenize.Detokenizer(elf_reader.Elf(fd))

            self.assertEqual(expected_tokens,
                             frozenset(detok.database.token_to_entries.keys()))
        finally:
            os.unlink(elf.name)
Ejemplo n.º 3
0
    def test_decode_from_csv_file(self):
        detok = detokenize.Detokenizer(io.BytesIO(ELF_WITH_TOKENIZER_SECTIONS))
        expected_tokens = frozenset(detok.database.token_to_entries.keys())

        csv_database = str(detok.database)
        self.assertEqual(len(csv_database.splitlines()), TOKENS_IN_ELF)

        csv_file = tempfile.NamedTemporaryFile('w', delete=False)
        try:
            csv_file.write(csv_database)
            csv_file.close()

            # Open CSV by path
            detok = detokenize.Detokenizer(csv_file.name)
            self.assertEqual(expected_tokens,
                             frozenset(detok.database.token_to_entries.keys()))

            # Open CSV by file object
            with open(csv_file.name) as fd:
                detok = detokenize.Detokenizer(fd)

            self.assertEqual(expected_tokens,
                             frozenset(detok.database.token_to_entries.keys()))
        finally:
            os.unlink(csv_file.name)
Ejemplo n.º 4
0
 def setUp(self):
     super().setUp()
     db = database.load_token_database(
         io.BytesIO(ELF_WITH_TOKENIZER_SECTIONS))
     db.add(
         tokens.TokenizedStringEntry(tokens.default_hash(s), s)
         for s in [self.RECURSION_STRING, self.RECURSION_STRING_2])
     self.detok = detokenize.Detokenizer(db)
Ejemplo n.º 5
0
 def test_simple(self):
     detok = detokenize.Detokenizer(
         tokens.Database([
             tokens.TokenizedStringEntry(0xcdab, '%02d %s %c%%',
                                         dt.datetime.now())
         ]))
     self.assertEqual(str(detok.detokenize(b'\xab\xcd\0\0\x02\x03Two\x66')),
                      '01 Two 3%')
Ejemplo n.º 6
0
    def test_empty_db_show_errors(self):
        detok = detokenize.Detokenizer(io.BytesIO(EMPTY_ELF), show_errors=True)
        self.assertFalse(detok.detokenize(b'\x12\x34\0\0').ok())
        self.assertIn('unknown token',
                      detok.detokenize(b'1234').error_message())
        self.assertIn('unknown token', repr(detok.detokenize(b'1234')))
        self.assertIn('unknown token', str(detok.detokenize(b'1234')))

        self.assertIsNone(detok.detokenize(b'').token)
Ejemplo n.º 7
0
 def setUp(self):
     super().setUp()
     self.detok = detokenize.Detokenizer(
         tokens.Database([
             tokens.TokenizedStringEntry(0, '$AAAAAA=='),  # token for 0
             tokens.TokenizedStringEntry(1, '$AgAAAA=='),  # token for 2
             tokens.TokenizedStringEntry(2, '$AwAAAA=='),  # token for 3
             tokens.TokenizedStringEntry(3, '$AgAAAA=='),  # token for 2
         ]))
Ejemplo n.º 8
0
    def test_decode_from_csv_file(self):
        detok = detokenize.Detokenizer(io.BytesIO(ELF_WITH_TOKENIZER_SECTIONS))
        expected_tokens = frozenset(detok.database.token_to_entries.keys())

        csv_database = str(detok.database)
        self.assertEqual(len(csv_database.splitlines()), 16)

        with tempfile.NamedTemporaryFile('r+') as csv_file:
            csv_file.write(csv_database)
            csv_file.seek(0)

            # Open CSV by path
            detok = detokenize.Detokenizer(csv_file.name)
            self.assertEqual(expected_tokens,
                             frozenset(detok.database.token_to_entries.keys()))

            # Open CSV by file object
            detok = detokenize.Detokenizer(csv_file)
            self.assertEqual(expected_tokens,
                             frozenset(detok.database.token_to_entries.keys()))
Ejemplo n.º 9
0
 def test_unparsed_data(self):
     detok = detokenize.Detokenizer(
         tokens.Database([
             tokens.TokenizedStringEntry(1, 'no args',
                                         dt.datetime(100, 1, 1)),
         ]))
     result = detok.detokenize(b'\x01\0\0\0o_o')
     self.assertFalse(result.ok())
     self.assertEqual('no args', str(result))
     self.assertIn('o_o', repr(result))
     self.assertIn('decoding failed', result.error_message())
Ejemplo n.º 10
0
    def test_empty_db(self):
        detok = detokenize.Detokenizer(io.BytesIO(EMPTY_ELF))
        self.assertFalse(detok.detokenize(b'\x12\x34\0\0').ok())
        self.assertIn('unknown token',
                      detok.detokenize(b'1234').error_message())
        self.assertIn('unknown token', repr(detok.detokenize(b'1234')))

        self.assertEqual('$' + base64.b64encode(b'1234').decode(),
                         str(detok.detokenize(b'1234')))

        self.assertIsNone(detok.detokenize(b'').token)
Ejemplo n.º 11
0
    def test_detokenize_missing_data_with_errors_is_unsuccessful(self):
        detok = detokenize.Detokenizer(tokens.Database(
            [tokens.TokenizedStringEntry(2, '%s', dt.datetime(1, 1, 1))]),
                                       show_errors=True)

        result = detok.detokenize(b'\x02\0\0\0')
        string, args, remaining = result.failures[0]
        self.assertIn('%s MISSING', string)
        self.assertEqual(len(args), 1)
        self.assertEqual(b'', remaining)
        self.assertEqual(len(result.failures), 1)
        self.assertIn('%s MISSING', str(result))
Ejemplo n.º 12
0
    def test_decode_from_elf_data(self):
        detok = detokenize.Detokenizer(io.BytesIO(ELF_WITH_TOKENIZER_SECTIONS))

        self.assertTrue(detok.detokenize(JELLO_WORLD_TOKEN).ok())
        self.assertEqual(str(detok.detokenize(JELLO_WORLD_TOKEN)),
                         'Jello, world!')

        undecoded_args = detok.detokenize(JELLO_WORLD_TOKEN + b'some junk')
        self.assertFalse(undecoded_args.ok())
        self.assertEqual(str(undecoded_args), 'Jello, world!')

        self.assertTrue(detok.detokenize(b'\0\0\0\0').ok())
        self.assertEqual(str(detok.detokenize(b'\0\0\0\0')), '')
Ejemplo n.º 13
0
    def test_detokenize_extra_data_is_unsuccessful(self):
        detok = detokenize.Detokenizer(
            tokens.Database([
                tokens.TokenizedStringEntry(1, 'no args', dt.datetime(1, 1, 1))
            ]))

        result = detok.detokenize(b'\x01\0\0\0\x04args')
        self.assertEqual(len(result.failures), 1)
        string, args, remaining = result.failures[0]
        self.assertEqual('no args', string)
        self.assertFalse(args)
        self.assertEqual(b'\x04args', remaining)
        self.assertEqual('no args', string)
        self.assertEqual('no args', str(result))
Ejemplo n.º 14
0
    def test_decode_from_elf_file(self):
        detok = detokenize.Detokenizer(io.BytesIO(ELF_WITH_TOKENIZER_SECTIONS))
        expected_tokens = frozenset(detok.database.token_to_entries.keys())

        with tempfile.NamedTemporaryFile() as elf:
            elf.write(ELF_WITH_TOKENIZER_SECTIONS)
            elf.seek(0)

            # Open ELF by file object
            detok = detokenize.Detokenizer(elf)
            self.assertEqual(expected_tokens,
                             frozenset(detok.database.token_to_entries.keys()))

            # Open ELF by path
            detok = detokenize.Detokenizer(elf.name)
            self.assertEqual(expected_tokens,
                             frozenset(detok.database.token_to_entries.keys()))

            # Open ELF by elf_reader.Elf
            elf.seek(0)
            detok = detokenize.Detokenizer(elf_reader.Elf(elf))
            self.assertEqual(expected_tokens,
                             frozenset(detok.database.token_to_entries.keys()))
Ejemplo n.º 15
0
    def test_missing_token(self):
        detok = detokenize.Detokenizer(io.BytesIO(EMPTY_ELF))
        self.assertIn('missing token', detok.detokenize(b'').error_message())
        self.assertEqual('', str(detok.detokenize(b'')))
        self.assertIn('missing token', repr(detok.detokenize(b'123')))

        self.assertIn('missing token', detok.detokenize(b'1').error_message())
        self.assertEqual('', str(detok.detokenize(b'1')))
        self.assertIn('missing token', repr(detok.detokenize(b'1')))

        self.assertIn('missing token',
                      detok.detokenize(b'123').error_message())
        self.assertEqual('', str(detok.detokenize(b'123')))
        self.assertIn('missing token', repr(detok.detokenize(b'123')))
Ejemplo n.º 16
0
    def test_missing_token_show_errors(self):
        detok = detokenize.Detokenizer(io.BytesIO(EMPTY_ELF), show_errors=True)
        self.assertIn('missing token', detok.detokenize(b'').error_message())
        self.assertIn('missing token', str(detok.detokenize(b'')))
        self.assertIn('missing token', repr(detok.detokenize(b'123')))

        self.assertIn('missing token', detok.detokenize(b'1').error_message())
        self.assertIn('missing token', str(detok.detokenize(b'1')))
        self.assertIn('missing token', repr(detok.detokenize(b'1')))

        self.assertIn('missing token',
                      detok.detokenize(b'123').error_message())
        self.assertIn('missing token', str(detok.detokenize(b'123')))
        self.assertIn('missing token', repr(detok.detokenize(b'123')))
Ejemplo n.º 17
0
    def test_detokenize_missing_data_is_unsuccessful(self):
        detok = detokenize.Detokenizer(
            tokens.Database([
                tokens.TokenizedStringEntry(2,
                                            '%s',
                                            date_removed=dt.datetime(1, 1, 1))
            ]))

        result = detok.detokenize(b'\x02\0\0\0')
        string, args, remaining = result.failures[0]
        self.assertEqual('%s', string)
        self.assertEqual(len(args), 1)
        self.assertEqual(b'', remaining)
        self.assertEqual(len(result.failures), 1)
        self.assertEqual('%s', str(result))
Ejemplo n.º 18
0
def _detokenize_serial(databases: Iterable, device: serial.Serial,
                       baudrate: int, show_errors: bool, output: BinaryIO,
                       prefix: str) -> None:
    if output is sys.stdout:
        output = sys.stdout.buffer

    detokenizer = detokenize.Detokenizer(tokens.Database.merged(*databases),
                                         show_errors=show_errors)
    serial_device = serial.Serial(port=device, baudrate=baudrate)

    try:
        detokenize.detokenize_base64_live(detokenizer, serial_device, output,
                                          prefix)
    except KeyboardInterrupt:
        output.flush()
Ejemplo n.º 19
0
    def setUp(self):
        super().setUp()
        token = 0xbaad

        # Database with several conflicting tokens.
        self.detok = detokenize.Detokenizer(tokens.Database([
            tokens.TokenizedStringEntry(token, 'REMOVED', dt.datetime(9, 1, 1)),
            tokens.TokenizedStringEntry(token, 'newer'),
            tokens.TokenizedStringEntry(token, 'A: %d', dt.datetime(30, 5, 9)),
            tokens.TokenizedStringEntry(token, 'B: %c', dt.datetime(30, 5, 10)),
            tokens.TokenizedStringEntry(token, 'C: %s'),
            tokens.TokenizedStringEntry(token, '%d%u'),
            tokens.TokenizedStringEntry(token, '%s%u %d'),
            tokens.TokenizedStringEntry(1, '%s'),
            tokens.TokenizedStringEntry(1, '%d'),
            tokens.TokenizedStringEntry(2, 'Three %s %s %s'),
            tokens.TokenizedStringEntry(2, 'Five %d %d %d %d %s'),
        ]))  # yapf: disable
Ejemplo n.º 20
0
 def setUp(self):
     super().setUp()
     db = database.load_token_database(
         io.BytesIO(ELF_WITH_TOKENIZER_SECTIONS))
     db.add([self.RECURSION_STRING, self.RECURSION_STRING_2])
     self.detok = detokenize.Detokenizer(db)