Пример #1
0
 def testParsingFuncDef(self):
     self.assertEqual(
         ParseToAst('写九九表咋整:整完了。'),
         [
             Statement(
                 STMT_FUNC_DEF,
                 (
                     IdentifierToken('写九九表'),
                     [],  # Formal parameters.
                     []  # Function body.
                 ))
         ])
     self.assertEqual(
         ParseToAst('写九九表咋整:唠唠:1。整完了。'),
         [
             Statement(
                 STMT_FUNC_DEF,
                 (
                     IdentifierToken('写九九表'),
                     [],  # Formal parameters.
                     # Function body.
                     [
                         Statement(
                             STMT_SAY,
                             LiteralExpr(Token(TK_INTEGER_LITERAL, 1)))
                     ]))
         ])
Пример #2
0
 def testParsingFuncDefWithParam(self):
   self.assertEqual(
       ParseToAst('【阶乘】(那啥)咋整:整完了。'),
       [Statement(STMT_FUNC_DEF,
                  (IdentifierToken('阶乘'),
                   [IdentifierToken('那啥')],  # Formal parameters.
                   []  # Function body.
                  ))])
Пример #3
0
 def testParsingDecrements(self):
     self.assertEqual(ParseToAst('老王稍稍。'), [
         Statement(STMT_DEC_BY,
                   (IdentifierToken('老王'), IntegerLiteralExpr(1)))
     ])
     self.assertEqual(ParseToAst('老王稍三步。'), [
         Statement(STMT_DEC_BY,
                   (IdentifierToken('老王'), IntegerLiteralExpr(3)))
     ])
Пример #4
0
 def testParsingIncrements(self):
     self.assertEqual(ParseToAst('老王走走。'), [
         Statement(STMT_INC_BY,
                   (IdentifierToken('老王'), IntegerLiteralExpr(1)))
     ])
     self.assertEqual(ParseToAst('老王走两步。'), [
         Statement(STMT_INC_BY,
                   (IdentifierToken('老王'), IntegerLiteralExpr(2)))
     ])
Пример #5
0
 def testTokenizingDecrements(self):
     self.assertEqual(list(Tokenize('老王稍稍')), [
         IdentifierToken('老王'),
         Keyword('稍稍'),
     ])
     self.assertEqual(list(Tokenize('老王稍三步')), [
         IdentifierToken('老王'),
         Keyword('稍'),
         Token(TK_INTEGER_LITERAL, 3),
         Keyword('步'),
     ])
Пример #6
0
 def testTokenizingIncrements(self):
     self.assertEqual(list(Tokenize('老王走走')), [
         IdentifierToken('老王'),
         Keyword('走走'),
     ])
     self.assertEqual(list(Tokenize('老王走两步')), [
         IdentifierToken('老王'),
         Keyword('走'),
         Token(TK_INTEGER_LITERAL, 2),
         Keyword('步'),
     ])
Пример #7
0
 def testTokenizingFuncDef(self):
     self.assertEqual(list(Tokenize('写九九表咋整:整完了。')), [
         IdentifierToken('写九九表'),
         Keyword('咋整:'),
         Keyword('整完了'),
         Keyword('。'),
     ])
Пример #8
0
 def testParsingLoop(self):
   self.assertEqual(
       ParseToAst('老王从1到9磨叽:磨叽完了。'),
       [Statement(
           STMT_LOOP,
           (IdentifierToken('老王'),
            IntegerLiteralExpr(1),
            IntegerLiteralExpr(9),
            []))])
Пример #9
0
 def testTokenizeCompound(self):
     self.assertEqual(list(Tokenize('开整:\n  唠唠:老王。\n整完了。')), [
         Keyword('开整:'),
         Keyword('唠唠'),
         Keyword(':'),
         IdentifierToken('老王'),
         Keyword('。'),
         Keyword('整完了'),
         Keyword('。'),
     ])
Пример #10
0
 def testTokenizeLoop(self):
     self.assertEqual(list(Tokenize('老王从1到9磨叽:磨叽完了。')), [
         IdentifierToken('老王'),
         Keyword('从'),
         Token(TK_INTEGER_LITERAL, 1),
         Keyword('到'),
         Token(TK_INTEGER_LITERAL, 9),
         Keyword('磨叽:'),
         Keyword('磨叽完了'),
         Keyword('。'),
     ])
Пример #11
0
 def testTokenizingFuncCall(self):
     self.assertEqual(list(Tokenize('整写九九表')), [
         Keyword('整'),
         IdentifierToken('写九九表'),
     ])
Пример #12
0
 def testTokenizingConcat(self):
     self.assertEqual(list(Tokenize('老刘、二')), [
         IdentifierToken('老刘'),
         Keyword('、'),
         Token(TK_INTEGER_LITERAL, 2),
     ])
Пример #13
0
 def testTokenize(self):
     self.assertEqual(list(BasicTokenize('【阶乘】')), [
         IdentifierToken('阶乘'),
     ])
     self.assertEqual(list(BasicTokenize('【 阶  乘   】')), [
         IdentifierToken('阶乘'),
     ])
     self.assertEqual(list(BasicTokenize('【阶乘】(那啥)')), [
         IdentifierToken('阶乘'),
         Keyword('('),
         Token(TK_CHAR, '那'),
         Token(TK_CHAR, '啥'),
         Keyword(')'),
     ])
     self.assertEqual(list(BasicTokenize('“ ”')), [
         Keyword('“'),
         Token(TK_STRING_LITERAL, ' '),
         Keyword('”'),
     ])
     self.assertEqual(list(BasicTokenize('“”')), [
         Keyword('“'),
         Token(TK_STRING_LITERAL, ''),
         Keyword('”'),
     ])
     self.assertEqual(list(BasicTokenize('“ A B ”')), [
         Keyword('“'),
         Token(TK_STRING_LITERAL, ' A B '),
         Keyword('”'),
     ])
     self.assertEqual(list(BasicTokenize('老张')), [
         Token(TK_CHAR, '老'),
         Token(TK_CHAR, '张'),
     ])
     self.assertEqual(list(BasicTokenize('  老 张   ')), [
         Token(TK_CHAR, '老'),
         Token(TK_CHAR, '张'),
     ])
     self.assertEqual(list(Tokenize('# 123456\n老张')),
                      [IdentifierToken('老张')])
     self.assertEqual(list(Tokenize('老张')), [IdentifierToken('老张')])
     self.assertEqual(ParseInteger('老张'), (None, '老张'))
     self.assertEqual(list(ParseChars('老张')), [IdentifierToken('老张')])
     self.assertEqual(
         list(Tokenize('老张是活雷锋')),
         [IdentifierToken('老张'), Keyword('是活雷锋')])
     self.assertEqual(list(Tokenize('老张是 活雷\n锋 。 ')), [
         IdentifierToken('老张'),
         Keyword('是活雷锋'),
         Keyword('。'),
     ])
     self.assertEqual(list(Tokenize('老张是活雷锋。\n老王是活雷锋。\n')), [
         IdentifierToken('老张'),
         Keyword('是活雷锋'),
         Keyword('。'),
         IdentifierToken('老王'),
         Keyword('是活雷锋'),
         Keyword('。'),
     ])
     self.assertEqual(list(Tokenize('老张装250。\n老王装老张。\n')), [
         IdentifierToken('老张'),
         Keyword('装'),
         Token(TK_INTEGER_LITERAL, 250),
         Keyword('。'),
         IdentifierToken('老王'),
         Keyword('装'),
         IdentifierToken('老张'),
         Keyword('。')
     ])
     self.assertEqual(list(Tokenize('唠唠:“你好”。')), [
         Keyword('唠唠'),
         Keyword(':'),
         Keyword('“'),
         Token(TK_STRING_LITERAL, '你好'),
         Keyword('”'),
         Keyword('。')
     ])