Esempio n. 1
0
    def test_multiline_not_closed(self):
        '''Test that lexer fails when a multiline string isn't closed'''
        data = '\n"""one\n\n\n\ntwo'
        lexer.input(data)

        with self.assertRaisesRegexp(lex.LexError, 'Multiline string not closed at line 2 col 1'):
            list(lexer)
Esempio n. 2
0
    def test_bad_indentation(self):
        'Test lexer failing on bad dedentation'
        data = 'one\n    two\n  three'
        lexer.input(data)

        with self.assertRaisesRegexp(lex.LexError, 'Invalid indentation at line 3 col 2'):
            list(lexer)
Esempio n. 3
0
    def test_multiline_not_closed(self):
        '''Test that lexer fails when a multiline string isn't closed'''
        data = '\n"""one\n\n\n\ntwo'
        lexer.input(data)

        with self.assertRaisesRegexp(
                lex.LexError, 'Multiline string not closed at line 2 col 1'):
            list(lexer)
Esempio n. 4
0
    def test_bad_indentation(self):
        'Test lexer failing on bad dedentation'
        data = 'one\n    two\n  three'
        lexer.input(data)

        with self.assertRaisesRegexp(lex.LexError,
                                     'Invalid indentation at line 3 col 2'):
            list(lexer)
Esempio n. 5
0
    def test_basic(self):
        'Basic lexing test'
        data = 'print("It\'s Never Lupus!")'
        lexer.input(data)

        tokens = list(lexer)
        self.assertEqual([tok.type for tok in tokens],
                         ['ID', '(', 'STRING', ')'])
Esempio n. 6
0
    def test_whitespace(self):
        'Test that lexer ignores in/trailing whitespace, but not leading whitespace'
        data = 'if a>0: \n    print(a + 10) \n    return a'
        lexer.input(data)

        tokens = list(lexer)
        self.assertEqual(
            [tok.type for tok in tokens],
            ['IF', 'ID', 'LCOMP', 'NUMBER', ':', 'INDENT', 'ID', '(', 'ID', '+', 'NUMBER', ')', 'RETURN', 'ID', 'DEDENT'])
Esempio n. 7
0
    def test_indentation(self):
        'Test lexing indentation handling'
        data = 'one\n  \n    two\n    three\n        four\n          five\n\n    six'
        lexer.input(data)

        tokens = list(lexer)
        self.assertEqual(
            [tok.type for tok in tokens],
            ['ID', 'INDENT', 'ID', 'ID', 'INDENT', 'ID', 'INDENT', 'ID', 'DEDENT', 'DEDENT', 'ID', 'DEDENT'])
Esempio n. 8
0
    def test_basic(self):
        'Basic lexing test'
        data = 'print("It\'s Never Lupus!")'
        lexer.input(data)

        tokens = list(lexer)
        self.assertEqual(
            [tok.type for tok in tokens],
            ['ID', '(', 'STRING', ')'])
Esempio n. 9
0
    def test_whitespace(self):
        'Test that lexer ignores in/trailing whitespace, but not leading whitespace'
        data = 'if a>0: \n    print(a + 10) \n    return a'
        lexer.input(data)

        tokens = list(lexer)
        self.assertEqual([tok.type for tok in tokens], [
            'IF', 'ID', 'LCOMP', 'NUMBER', ':', 'INDENT', 'ID', '(', 'ID', '+',
            'NUMBER', ')', 'RETURN', 'ID', 'DEDENT'
        ])
Esempio n. 10
0
    def test_types(self):
        'Basic type lexing test'
        data = '1\n1.5\nTrue\nFalse\nNone'
        lexer.input(data)

        tokens = list(lexer)
        self.assertEqual([tok.type for tok in tokens],
                         ['NUMBER', 'NUMBER', 'TRUE', 'FALSE', 'NONE'])
        self.assertEqual([tok.value for tok in tokens],
                         [1, 1.5, True, False, None])
Esempio n. 11
0
    def test_numbers(self):
        'Basic number test'
        data = '1\n1.0\n1.1\n'
        lexer.input(data)

        tokens = list(lexer)
        self.assertEqual([tok.type for tok in tokens],
                         ['NUMBER', 'NUMBER', 'NUMBER'])
        self.assertEqual([repr(tok.value) for tok in tokens],
                         ['1', '1.0', '1.1'])
Esempio n. 12
0
    def test_indentation(self):
        'Test lexing indentation handling'
        data = 'one\n  \n    two\n    three\n        four\n          five\n\n    six'
        lexer.input(data)

        tokens = list(lexer)
        self.assertEqual([tok.type for tok in tokens], [
            'ID', 'INDENT', 'ID', 'ID', 'INDENT', 'ID', 'INDENT', 'ID',
            'DEDENT', 'DEDENT', 'ID', 'DEDENT'
        ])
Esempio n. 13
0
    def test_types(self):
        'Basic type lexing test'
        data = '1\n1.5\nTrue\nFalse\nNone'
        lexer.input(data)

        tokens = list(lexer)
        self.assertEqual(
            [tok.type for tok in tokens],
            ['NUMBER', 'NUMBER', 'TRUE', 'FALSE', 'NONE'])
        self.assertEqual(
            [tok.value for tok in tokens],
            [1, 1.5, True, False, None])
Esempio n. 14
0
    def test_numbers(self):
        'Basic number test'
        data = '1\n1.0\n1.1\n'
        lexer.input(data)

        tokens = list(lexer)
        self.assertEqual(
            [tok.type for tok in tokens],
            ['NUMBER', 'NUMBER', 'NUMBER'])
        self.assertEqual(
            [repr(tok.value) for tok in tokens],
            ['1', '1.0', '1.1'])
Esempio n. 15
0
    def test_strings(self):
        'Test lexing string/multiline-string handling'
        data = '"str1" id1\n  \'str2\'\n  id2\n  """str3\nstr4"""\nid3\n\'\'\'\n\nstr5\n\'\'\'\nid4'
        lexer.input(data)

        tokens = list(lexer)
        self.assertEqual(
            [tok.type for tok in tokens],
            ['STRING', 'ID', 'INDENT', 'STRING', 'ID', 'STRING', 'DEDENT', 'ID', 'STRING', 'ID'])
        self.assertEqual(
            [tok.value for tok in tokens],
            ['str1', 'id1', '', 'str2', 'id2', 'str3\nstr4', '', 'id3', '\n\nstr5\n', 'id4'])
        self.assertEqual(
            [tok.lineno for tok in tokens],
            [1, 1, 2, 2, 3, 4, 6, 6, 7, 11])
Esempio n. 16
0
    def test_strings(self):
        'Test lexing string/multiline-string handling'
        data = '"str1" id1\n  \'str2\'\n  id2\n  """str3\nstr4"""\nid3\n\'\'\'\n\nstr5\n\'\'\'\nid4'
        lexer.input(data)

        tokens = list(lexer)
        self.assertEqual([tok.type for tok in tokens], [
            'STRING', 'ID', 'INDENT', 'STRING', 'ID', 'STRING', 'DEDENT', 'ID',
            'STRING', 'ID'
        ])
        self.assertEqual([tok.value for tok in tokens], [
            'str1', 'id1', '', 'str2', 'id2', 'str3\nstr4', '', 'id3',
            '\n\nstr5\n', 'id4'
        ])
        self.assertEqual([tok.lineno for tok in tokens],
                         [1, 1, 2, 2, 3, 4, 6, 6, 7, 11])