def test_assignment2(self):
        input_data = "a=1337;"
        tokens = Lexer(input_data, lexer.STRING_INPUT_FILE).tokenize()
        parser = Parser(tokens)
        ast = parser.parse()
        self.assertEqual(1, len(ast))

        left_token = Token(TokenType.WORD,
                           lexer.STRING_INPUT_FILE,
                           1,
                           1,
                           value='a')
        equals_token = Token(TokenType.EQUALS, lexer.STRING_INPUT_FILE, 1, 2)
        right_token = Token(TokenType.NUMBER,
                            lexer.STRING_INPUT_FILE,
                            1,
                            3,
                            value='1337')
        semicolon_token = Token(TokenType.SEMICOLON, lexer.STRING_INPUT_FILE,
                                1, 7)

        left_ast = Identifier(left_token)
        right_ast = Constant(right_token)

        assignment = Assignment(left_ast, equals_token, right_ast,
                                semicolon_token)
        self.assertEqual([assignment], ast)
    def test_array_simple5(self):
        input_data = "array[] = {};"
        tokens = Lexer(input_data, lexer.STRING_INPUT_FILE).tokenize()
        parser = Parser(tokens)
        ast = parser.parse()
        self.assertEqual(1, len(ast))

        name = Token(TokenType.WORD,
                     lexer.STRING_INPUT_FILE,
                     1,
                     1,
                     value='array')
        l_square_token = Token(TokenType.L_SQUARE, lexer.STRING_INPUT_FILE, 1,
                               6)
        r_square_token = Token(TokenType.R_SQUARE, lexer.STRING_INPUT_FILE, 1,
                               7)
        identifier = Identifier(name)
        array_declaration = ArrayDeclaration(identifier, l_square_token,
                                             r_square_token)

        equals_token = Token(TokenType.EQUALS, lexer.STRING_INPUT_FILE, 1, 9)

        l_curly_token = Token(TokenType.L_CURLY, lexer.STRING_INPUT_FILE, 1,
                              11)
        r_curly_token = Token(TokenType.R_CURLY, lexer.STRING_INPUT_FILE, 1,
                              12)
        array = Array(l_curly_token, [], r_curly_token)

        semicolon_token = Token(TokenType.SEMICOLON, lexer.STRING_INPUT_FILE,
                                1, 13)

        assignment = Assignment(array_declaration, equals_token, array,
                                semicolon_token)

        self.assertEqual([assignment], ast)
    def test_assignment4(self):
        input_data = 'author = "Schwaggot";'
        tokens = Lexer(input_data, lexer.STRING_INPUT_FILE).tokenize()
        parser = Parser(tokens)
        ast = parser.parse()
        self.assertEqual(1, len(ast))

        left_token = Token(TokenType.WORD,
                           lexer.STRING_INPUT_FILE,
                           1,
                           1,
                           value='author')
        equals_token = Token(TokenType.EQUALS, lexer.STRING_INPUT_FILE, 1, 8)
        right_tokens = [
            Token(TokenType.DOUBLE_QUOTES, lexer.STRING_INPUT_FILE, 1, 10),
            Token(TokenType.WORD,
                  lexer.STRING_INPUT_FILE,
                  1,
                  11,
                  value='Schwaggot'),
            Token(TokenType.DOUBLE_QUOTES, lexer.STRING_INPUT_FILE, 1, 20)
        ]
        semicolon_token = Token(TokenType.SEMICOLON, lexer.STRING_INPUT_FILE,
                                1, 21)

        left_ast = Identifier(left_token)
        right_ast = StringLiteral(right_tokens)

        assignment = Assignment(left_ast, equals_token, right_ast,
                                semicolon_token)
        self.assertEqual([assignment], ast)
    def test_assignment3(self):
        input_data = "a = 'Hello';"
        tokens = Lexer(input_data, lexer.STRING_INPUT_FILE).tokenize()
        parser = Parser(tokens)
        ast = parser.parse()
        self.assertEqual(1, len(ast))

        left_token = Token(TokenType.WORD,
                           lexer.STRING_INPUT_FILE,
                           1,
                           1,
                           value='a')
        equals_token = Token(TokenType.EQUALS, lexer.STRING_INPUT_FILE, 1, 3)
        right_tokens = [
            Token(TokenType.QUOTE, lexer.STRING_INPUT_FILE, 1, 5),
            Token(TokenType.WORD, lexer.STRING_INPUT_FILE, 1, 6,
                  value='Hello'),
            Token(TokenType.QUOTE, lexer.STRING_INPUT_FILE, 1, 11)
        ]
        semicolon_token = Token(TokenType.SEMICOLON, lexer.STRING_INPUT_FILE,
                                1, 12)

        left_ast = Identifier(left_token)
        right_ast = StringLiteral(right_tokens)

        assignment = Assignment(left_ast, equals_token, right_ast,
                                semicolon_token)
        self.assertEqual([assignment], ast)
    def test_identifier3(self):
        input_data = "class0"
        tokens = Lexer(input_data, lexer.STRING_INPUT_FILE).tokenize()
        self.assertEqual(1, len(tokens))

        parser = Parser(tokens)
        ast = parser.parse()
        self.assertEqual(1, len(ast))

        expected_token = Token(TokenType.WORD,
                               lexer.STRING_INPUT_FILE,
                               1,
                               1,
                               value="class0")
        identifier = Identifier(expected_token)
        self.assertEqual([identifier], ast)
    def test_array_declaration1(self):
        input_data = "array[]"
        tokens = Lexer(input_data, lexer.STRING_INPUT_FILE).tokenize()
        parser = Parser(tokens)
        ast = parser.parse()
        self.assertEqual(1, len(ast))

        name = Token(TokenType.WORD,
                     lexer.STRING_INPUT_FILE,
                     1,
                     1,
                     value='array')
        l_square_token = Token(TokenType.L_SQUARE, lexer.STRING_INPUT_FILE, 1,
                               6)
        r_square_token = Token(TokenType.R_SQUARE, lexer.STRING_INPUT_FILE, 1,
                               7)

        identifier = Identifier(name)
        array_declaration = ArrayDeclaration(identifier, l_square_token,
                                             r_square_token)

        self.assertEqual([array_declaration], ast)