Esempio n. 1
0
    def test_token_identifier_with_non_alphanum_chars(self):
        lexer = Lexer(FileReader(io.StringIO('id@as')))
        token = lexer.read_next_token()

        self.assertEqual(token.token_type, TokenType.IDENTIFIER)
        self.assertEqual(token.token_value, 'id')
        self.assertRaises(UndefinedSymbolException, lexer.read_next_token)
Esempio n. 2
0
    def test_read_token_with_white_space_at_beginning(self):
        lexer = Lexer(FileReader(io.StringIO('        \t\n id')))
        token = lexer.read_next_token()

        self.assertIsInstance(token, Token)
        self.assertEqual(token.token_type, TokenType.IDENTIFIER)
        self.assertEqual(token.token_value, 'id')
Esempio n. 3
0
    def test_read_token(self):
        lexer = Lexer(FileReader(io.StringIO('id')))
        token = lexer.read_next_token()

        self.assertIsInstance(token, Token)
        self.assertEqual(token.token_type, TokenType.IDENTIFIER)
        self.assertEqual(token.token_value, 'id')
Esempio n. 4
0
    def assertParserTest(self, data_structure, result):
        file = io.StringIO(data_structure)
        lexer = Lexer(FileReader(file))
        parser = Parser(lexer)
        tree = parser.parse()

        self.assertEqual(''.join(str(tree).split()), ''.join(result.split()))
Esempio n. 5
0
    def test_read_token_all_tokens(self):
        for token_type in TokenType:
            with self.subTest(token=token_type):
                lexer = Lexer(FileReader(io.StringIO(token_type.value)))
                token = lexer.read_next_token()

                self.assertEqual(token.token_type, token_type)
                self.assertEqual(token.token_value, token_type.value)
Esempio n. 6
0
    def test_two_token_with_required_space(self):
        lexer = Lexer(FileReader(io.StringIO('123 asd')))
        token_1 = lexer.read_next_token()
        token_2 = lexer.read_next_token()

        self.assertEqual(token_1.token_type, TokenType.NUMBER_LITERAL)
        self.assertEqual(token_1.token_value, '123')
        self.assertEqual(token_2.token_type, TokenType.IDENTIFIER)
        self.assertEqual(token_2.token_value, 'asd')
Esempio n. 7
0
    def test_two_token_without_required_space_with_space(self):
        lexer = Lexer(FileReader(io.StringIO(' [ ] ')))
        token_1 = lexer.read_next_token()
        token_2 = lexer.read_next_token()

        self.assertEqual(token_1.token_type, TokenType.SQUARE_LEFT_BRACKET)
        self.assertEqual(token_1.token_value,
                         TokenType.SQUARE_LEFT_BRACKET.value)
        self.assertEqual(token_2.token_type, TokenType.SQUARE_RIGHT_BRACKET)
        self.assertEqual(token_2.token_value,
                         TokenType.SQUARE_RIGHT_BRACKET.value)
Esempio n. 8
0
def main(args):
    try:
        if (len(args)) < 3:
            print(
                "Missing arguments!(structure.miniasn data.bin name [params])")
            return

        file_reader = FileReader(open(args[0]))
        lexer = Lexer(file_reader)
        p = Parser(lexer)
        tree = p.parse()

        reader = ByteReader(open(args[1]))
        print(tree.read_value(reader, args[2], arguments=args[3:]))
    except Exception as e:
        print(e)
Esempio n. 9
0
    def assertMiniASNTest(self,
                          data_structure,
                          hex_data,
                          result,
                          name,
                          params=None):
        if params is None:
            params = []

        file = io.StringIO(data_structure)
        lexer = Lexer(FileReader(file))
        parser = Parser(lexer)
        tree = parser.parse()
        reader = ByteReader(io.BytesIO(bytearray.fromhex(hex_data)))

        self.assertEqual(
            ''.join(tree.read_value(reader, name, params).split()),
            ''.join(result.split()))
Esempio n. 10
0
    def test_instance(self):
        file_reader = FileReader(io.StringIO(''))

        self.assertIsInstance(file_reader, FileReader)
Esempio n. 11
0
    def test_preview_next_byte_when_end_of_file(self):
        file_reader = FileReader(io.StringIO(''))
        preview_char = file_reader.preview_next_char()

        self.assertEqual(preview_char, '')
Esempio n. 12
0
    def test_preview_next_byte(self):
        file_reader = FileReader(io.StringIO('a'))
        preview_char = file_reader.preview_next_char()
        char = file_reader.read_char()

        self.assertEqual(char, preview_char)
Esempio n. 13
0
    def test_update_position_with_newline(self):
        file_reader = FileReader(io.StringIO("\n"))
        file_reader.read_char()

        self.assertEqual(file_reader.current_line, 2)
        self.assertEqual(file_reader.current_column, 1)
Esempio n. 14
0
    def test_read_byte_when_end_of_file(self):
        file_reader = FileReader(io.StringIO(''))
        char = file_reader.read_char()

        self.assertEqual(char, '')
Esempio n. 15
0
    def test_token_identifier_with_digits_at_end(self):
        lexer = Lexer(FileReader(io.StringIO('id123')))
        token = lexer.read_next_token()

        self.assertEqual(token.token_type, TokenType.IDENTIFIER)
Esempio n. 16
0
    def assertParserRaiseTest(self, data_structure, exception):
        file = io.StringIO(data_structure)
        lexer = Lexer(FileReader(file))
        parser = Parser(lexer)

        self.assertRaises(exception, parser.parse)
Esempio n. 17
0
    def test_undefined_symbol_after_token(self):
        lexer = Lexer(FileReader(io.StringIO('123@')))
        token = lexer.read_next_token()

        self.assertEqual(token.token_type, TokenType.NUMBER_LITERAL)
        self.assertRaises(UndefinedSymbolException, lexer.read_next_token)
Esempio n. 18
0
    def test_two_token_with_required_space_without_space(self):
        lexer = Lexer(FileReader(io.StringIO('123asd')))

        self.assertRaises(RequiredSpaceException, lexer.read_next_token)
Esempio n. 19
0
    def test_token_number(self):
        lexer = Lexer(FileReader(io.StringIO('123')))
        token = lexer.read_next_token()

        self.assertEqual(token.token_type, TokenType.NUMBER_LITERAL)
Esempio n. 20
0
    def test_read_token_when_no_token(self):
        lexer = Lexer(FileReader(io.StringIO('')))
        token = lexer.read_next_token()

        self.assertEqual(token.token_type, TokenType.END_OF_FILE)
Esempio n. 21
0
    def test_token_identifier_with_digits_at_beginning(self):
        lexer = Lexer(FileReader(io.StringIO('123ar')))

        self.assertRaises(RequiredSpaceException, lexer.read_next_token)
Esempio n. 22
0
    def test_when_file_does_not_exists(self):
        file_reader = FileReader(None)

        self.assertRaises(AttributeError, file_reader.read_char)
Esempio n. 23
0
    def test_undefined_symbol_after_end_of_token_which_may_be_sub_token(self):
        lexer = Lexer(FileReader(io.StringIO('<@')))
        token = lexer.read_next_token()

        self.assertEqual(token.token_type, TokenType.LESS)
        self.assertRaises(UndefinedSymbolException, lexer.read_next_token)
Esempio n. 24
0
    def test_read_byte(self):
        file_reader = FileReader(io.StringIO('a'))
        char = file_reader.read_char()

        self.assertEqual(char, 'a')
Esempio n. 25
0
    def test_instance(self):
        lexer = Lexer(FileReader(io.StringIO('')))
        parser = Parser(lexer)

        self.assertIsInstance(parser, Parser)
Esempio n. 26
0
    def test_get_token(self):
        lexer = Lexer(FileReader(io.StringIO('id')))
        token = lexer.read_next_token()
        get_token = lexer.get_token()

        self.assertEqual(token, get_token)