Esempio n. 1
0
 def test_number_one_digit(self):
     for digit in list("0123456789"):
         with self.subTest(digit):
             tokens = list(Lexer(Source(digit)).tokenize())
             self.assertEqual(1, len(tokens))
             self.assertEqual(SyntaxKind.NumericLiteralToken, tokens[0][0])
             self.assertEqual(digit, tokens[0][1])
Esempio n. 2
0
 def test_number_dot_number_dot(self):
     tokens = list(Lexer(Source("3.14.")).tokenize())
     self.assertEqual(2, len(tokens))
     self.assertEqual(SyntaxKind.NumericLiteralToken, tokens[0][0])
     self.assertEqual("3.14", tokens[0][1])
     self.assertEqual(SyntaxKind.BadToken, tokens[1][0])
     self.assertEqual(".", tokens[1][1])
    def test_number_plus_number_equals_number_expression(self):
        tokens = list(Lexer(Source("2 + 3 = 5;")).tokenize())
        self.assertEqual(6, len(tokens))

        self.assertEqual(SyntaxKind.NumericLiteralToken, tokens[0][0])
        self.assertEqual("2", tokens[0][1])

        self.assertEqual(SyntaxKind.PlusToken, tokens[1][0])

        self.assertEqual(SyntaxKind.NumericLiteralToken, tokens[2][0])
        self.assertEqual("3", tokens[2][1])

        self.assertEqual(SyntaxKind.EqualsToken, tokens[3][0])

        self.assertEqual(SyntaxKind.NumericLiteralToken, tokens[4][0])
        self.assertEqual("5", tokens[4][1])

        self.assertEqual(SyntaxKind.SemicolonToken, tokens[5][0])
 def test_less_than_token(self):
     tokens = list(Lexer(Source("<")).tokenize())
     self.assertEqual(1, len(tokens))
     self.assertEqual(SyntaxKind.LessThanToken, tokens[0][0])
Esempio n. 5
0
 def test_not_date(self):
     tokens = list(
         Lexer(Source('''"ДФ=dd.MM.yyyy; ДП='Нет даты'"''')).tokenize())
     self.assertNotEqual(SyntaxKind.DateLiteralToken, tokens[0][0])
Esempio n. 6
0
 def test_empty_date(self):
     tokens = list(Lexer(Source("'00010101'")).tokenize())
     self.assertEqual(1, len(tokens))
     self.assertEqual(SyntaxKind.DateLiteralToken, tokens[0][0])
     self.assertEqual("00010101", tokens[0][1])
 def test_question_token(self):
     tokens = list(Lexer(Source("?")).tokenize())
     self.assertEqual(1, len(tokens))
     self.assertEqual(SyntaxKind.QuestionToken, tokens[0][0])
 def test_slash_token(self):
     tokens = list(Lexer(Source("/")).tokenize())
     self.assertEqual(1, len(tokens))
     self.assertEqual(SyntaxKind.SlashToken, tokens[0][0])
 def test_tilde_token(self):
     tokens = list(Lexer(Source("~")).tokenize())
     self.assertEqual(1, len(tokens))
     self.assertEqual(SyntaxKind.TildeToken, tokens[0][0])
 def test_dot_token(self):
     tokens = list(Lexer(Source(".")).tokenize())
     self.assertEqual(1, len(tokens))
     self.assertEqual(SyntaxKind.DotToken, tokens[0][0])
 def test_percent_token(self):
     tokens = list(Lexer(Source("%")).tokenize())
     self.assertEqual(1, len(tokens))
     self.assertEqual(SyntaxKind.PercentToken, tokens[0][0])
 def test_multiline_string(self):
     tokens = list(Lexer(Source('"многострочная\n|строка"')).tokenize())
     self.assertEqual(1, len(tokens))
     self.assertEqual(SyntaxKind.StringLiteralToken, tokens[0][0])
     self.assertEqual('многострочная\n|строка', tokens[0][1])
 def test_semicolon_token(self):
     tokens = list(Lexer(Source(";")).tokenize())
     self.assertEqual(1, len(tokens))
     self.assertEqual(SyntaxKind.SemicolonToken, tokens[0][0])
Esempio n. 14
0
 def test_number_many_digits(self):
     tokens = list(Lexer(Source("554433")).tokenize())
     self.assertEqual(1, len(tokens))
     self.assertEqual(SyntaxKind.NumericLiteralToken, tokens[0][0])
     self.assertEqual("554433", tokens[0][1])
 def test_close_bracket_token(self):
     tokens = list(Lexer(Source("]")).tokenize())
     self.assertEqual(1, len(tokens))
     self.assertEqual(SyntaxKind.CloseBracketToken, tokens[0][0])
 def test_open_bracket_token(self):
     tokens = list(Lexer(Source("[")).tokenize())
     self.assertEqual(1, len(tokens))
     self.assertEqual(SyntaxKind.OpenBracketToken, tokens[0][0])
 def test_equals_token(self):
     tokens = list(Lexer(Source("=")).tokenize())
     self.assertEqual(1, len(tokens))
     self.assertEqual(SyntaxKind.EqualsToken, tokens[0][0])
 def test_close_paren_token(self):
     tokens = list(Lexer(Source(")")).tokenize())
     self.assertEqual(1, len(tokens))
     self.assertEqual(SyntaxKind.CloseParenToken, tokens[0][0])
 def test_asterisk_token(self):
     tokens = list(Lexer(Source("*")).tokenize())
     self.assertEqual(1, len(tokens))
     self.assertEqual(SyntaxKind.AsteriskToken, tokens[0][0])
 def test_comma_token(self):
     tokens = list(Lexer(Source(",")).tokenize())
     self.assertEqual(1, len(tokens))
     self.assertEqual(SyntaxKind.CommaToken, tokens[0][0])
 def test_empty_list(self):
     tokens = list(Lexer(Source("")).tokenize())
     self.assertNotEqual(tokens, None)
     self.assertFalse(tokens)
 def test_string_with_quotation_mark(self):
     tokens = list(Lexer(Source('"какая-то ""строка"""')).tokenize())
     self.assertEqual(1, len(tokens))
     self.assertEqual(SyntaxKind.StringLiteralToken, tokens[0][0])
     self.assertEqual('какая-то "строка"', tokens[0][1])
 def test_greater_than_token(self):
     tokens = list(Lexer(Source(">")).tokenize())
     self.assertEqual(1, len(tokens))
     self.assertEqual(SyntaxKind.GreaterThanToken, tokens[0][0])
 def test_empty_string(self):
     tokens = list(Lexer(Source('""')).tokenize())
     self.assertEqual(1, len(tokens))
     self.assertEqual(SyntaxKind.StringLiteralToken, tokens[0][0])
     self.assertEqual("", tokens[0][1])
 def test_ampersand_token(self):
     tokens = list(Lexer(Source("&")).tokenize())
     self.assertEqual(1, len(tokens))
     self.assertEqual(SyntaxKind.AmpersandToken, tokens[0][0])