예제 #1
0
    def test_throw_exception_with_has_error_characters(self):

        # Arrange:
        code = "ADD #V0, 0xFF"

        # Act:
        lexical.tokenize(code)

        # Assert:
        self.assertTrue(logger.has_error)
예제 #2
0
    def test_tokenize_sound_timer(self):

        # Arrange:
        code = "LD ST, V0"

        # Act:
        tokens = lexical.tokenize(code)

        # Assert:
        self.assertEqual('T_SOUND', tokens[2]['class'])
예제 #3
0
    def test_analyze_entire_ast(self):

        # Arrange:
        code = "LD VA, #02\nLD VB, #02\n"
        tokens = lexical.tokenize(code)
        ast = syntactic.Ast(tokens)

        # Act:
        # Arrange:
        self.assertTrue(semantic.analyze(ast))
예제 #4
0
    def test_tokenize_delay_timer(self):

        # Arrange:
        code = "LD V0, DT"

        # Act:
        tokens = lexical.tokenize(code)

        # Assert:
        self.assertEqual('T_DELAY', tokens[5]['class'])
예제 #5
0
    def test_tokenize_keyboard(self):

        # Arrange:
        code = "LD V0, K"

        # Act:
        tokens = lexical.tokenize(code)

        # Assert:
        self.assertEqual('T_KEYBOARD', tokens[5]['class'])
예제 #6
0
    def test_tokenize_font(self):

        # Arrange:
        code = "LD F, V0"

        # Act:
        tokens = lexical.tokenize(code)

        # Assert:
        self.assertEqual('T_FONT', tokens[2]['class'])
예제 #7
0
    def test_tokenize_binary(self):

        # Arrange:
        code = "LD B, V0"

        # Act:
        tokens = lexical.tokenize(code)

        # Assert:
        self.assertEqual('T_BINARY', tokens[2]['class'])
예제 #8
0
    def test_tokenize_value(self):

        # Arrange:
        code = "LD V1, 2"

        # Act:
        tokens = lexical.tokenize(code)

        # Assert:
        self.assertEqual('T_NUMBER', tokens[5]['class'])
예제 #9
0
    def test_tokenize_registerI(self):

        # Arrange:
        code = "LD I, V0"

        # Act:
        tokens = lexical.tokenize(code)

        # Assert:
        self.assertEqual('T_REGISTER_I', tokens[2]['class'])
예제 #10
0
    def test_should_generate_simple_ast(self):

        # Arrange:
        code = "LD VA, 0x02\n"
        tokens = lexical.tokenize(code)

        # Act:
        ast = syntactic.Ast(tokens)

        # Assert:
        self.assertTrue(len(ast.nodes) > 0)
예제 #11
0
    def test_tokenize_label(self):

        # Arrange:
        code = "Start:\n    ADD V0, 0xFF"

        # Act:
        tokens = lexical.tokenize(code)

        # Assert:
        self.assertEqual('T_LABEL', tokens[0]['class'])
        self.assertEqual('Start:', tokens[0]['lexeme'])
예제 #12
0
    def test_tokenize_register(self):

        # Arrange:
        code = "ADD V0, #EF"

        # Act:
        tokens = lexical.tokenize(code)

        # Assert:
        self.assertEqual('T_REGISTER', tokens[2]['class'])
        self.assertEqual('V0', tokens[2]['lexeme'])
예제 #13
0
    def test_tokenize_nibble(self):

        # Arrange:
        code = "DRW V0, V1, 15"

        # Act:
        tokens = lexical.tokenize(code)

        # Assert:
        self.assertEqual('T_NUMBER', tokens[8]['class'])
        self.assertEqual('15', tokens[8]['lexeme'])
예제 #14
0
    def test_log_when_repeat_symbol(self):

        # Arrange:
        code = "Draw:\n   DRW V0, V1, 0x1\nDraw:    LD, V0, 0x40\n    LD DT, V0"
        tokens = lexical.tokenize(code)

        # Act:
        syntactic.Ast(tokens)

        # Assert:
        self.assertTrue(logger.has_error)
예제 #15
0
    def test_construct_symbolic_table_in_ast(self):

        # Arrange:
        code = "Draw:\n   DRW V0, V1, 0x1\nPlay:    LD, V0, 0x40\n    LD DT, V0"
        tokens = lexical.tokenize(code)

        # Act:
        ast = syntactic.Ast(tokens)

        # Assert:
        self.assertEquals(len(ast.symbols), 2)
예제 #16
0
    def test_throws_syntactic_error_when_initialize_with_has_error_value(self):

        # Arrange:
        code = "V0, V1"
        tokens = lexical.tokenize(code)

        # Act:
        syntactic.Ast(tokens)

        # Assert:
        self.assertTrue(logger.has_error)
예제 #17
0
    def test_throws_syntactic_error_when_sequence_is_has_error(self):

        # Arrange:
        code = "LD ,"
        tokens = lexical.tokenize(code)

        # Act:
        syntactic.Ast(tokens)

        # Assert:
        self.assertTrue(logger.has_error)
예제 #18
0
    def test_tokenize_byte(self):

        # Arrange:
        code = "ADD V0, 239"

        # Act:
        tokens = lexical.tokenize(code)

        # Assert:
        self.assertEqual('T_NUMBER', tokens[5]['class'])
        self.assertEqual('239', tokens[5]['lexeme'])
예제 #19
0
    def test_tokenize_asm(self):

        # Arrange:
        with open(helpers.FIXTURES_PATH + '/pong.asm') as fd:
            code = fd.read()

        # Act:
        tokens = lexical.tokenize(code)

        # Assert:
        self.assertTrue(len(tokens) > 0)
예제 #20
0
    def test_tokenize_command(self):

        # Arrange:
        code = "ADD V0, #EF"

        # Act:
        tokens = lexical.tokenize(code)

        # Assert:
        self.assertEqual('T_COMMAND', tokens[0]['class'])
        self.assertEqual('ADD', tokens[0]['lexeme'])
예제 #21
0
    def test_tokenize_addr(self):

        # Arrange:
        code = "JMP #FFF"

        # Act:
        tokens = lexical.tokenize(code)

        # Assert:
        self.assertEqual('T_CONSTANT', tokens[2]['class'])
        self.assertEqual('#FFF', tokens[2]['lexeme'])
예제 #22
0
    def test_tokenize_eol(self):

        # Arrange:
        code = "Start:\n  ADD V0, #EF"

        # Act:
        tokens = lexical.tokenize(code)

        # Assert:
        self.assertEqual('T_EOL', tokens[1]['class'])
        self.assertEqual('\n', tokens[1]['lexeme'])
예제 #23
0
    def test_tokenize_name(self):

        # Arrange:
        code = "JMP Draw"

        # Act:
        tokens = lexical.tokenize(code)

        # Assert:
        self.assertEqual('T_NAME', tokens[2]['class'])
        self.assertEqual('Draw', tokens[2]['lexeme'])
예제 #24
0
    def test_show_line_from_asm(self):

        # Arrange:
        code = "ADD V0, #FF\nLD V0, #FE"

        # Act:
        tokens = lexical.tokenize(code)

        # Assert:
        self.assertEqual(1, tokens[0]['line'])
        self.assertEqual(2, tokens[7]['line'])
예제 #25
0
    def test_tokenize_whitespace(self):

        # Arrange:
        code = "ADD V0, #EF"

        # Act:
        tokens = lexical.tokenize(code)

        # Assert:
        self.assertEqual('T_WHITESPACE', tokens[1]['class'])
        self.assertEqual(' ', tokens[1]['lexeme'])
예제 #26
0
    def test_show_column_from_asm(self):

        # Arrange:
        code = "ADD V0, #FF\nLD V0, #FE"

        # Act:
        tokens = lexical.tokenize(code)

        # Assert:
        self.assertEqual(1, tokens[0]['column'])
        self.assertEqual(5, tokens[2]['column'])
        self.assertEqual(1, tokens[7]['column'])
예제 #27
0
    def test_validate_if_name_exists_in_symbol_table(self):

        # Arrange:
        code = "Play: LD VA, 2\nJP Args\nArgs: DRW V0, V1, 1"
        tokens = lexical.tokenize(code)
        ast = syntactic.Ast(tokens)

        # Act:
        semantic.analyze(ast)

        # Arrange:
        self.assertFalse(logger.has_error)
예제 #28
0
    def test_has_error_analyze_entire_ast(self):

        # Arrange:
        code = "LD VA, Play\nJP 409\n"
        tokens = lexical.tokenize(code)
        ast = syntactic.Ast(tokens)

        # Act:
        semantic.analyze(ast)

        # Arrange:
        self.assertTrue(logger.has_error)
예제 #29
0
    def test_convert_SE_3XNN_node_to_opcode(self):

        # Arrange:
        code = "SE V0, 255"
        tokens = lexical.tokenize(code)
        ast = syntactic.Ast(tokens)
        semantic.analyze(ast)

        # Act:
        opcodes = assembler.generate(ast)

        # Arrange:
        self.assertEqual(self.pack('30FF'), opcodes[0])
예제 #30
0
    def test_tokenize_comment(self):

        # Arrange:
        code = "; CHIP8 Assembler\n"

        # Act:
        tokens = lexical.tokenize(code)

        # Assert:
        self.assertEqual('T_COMMENT', tokens[0]['class'])
        self.assertEqual('; CHIP8 Assembler', tokens[0]['lexeme'])

        self.assertEqual('T_EOL', tokens[1]['class'])