def testTokenizingDecrements(self): self.assertEqual(list(Tokenize('老王稍稍')), [ IdentifierToken('老王'), Keyword('稍稍'), ]) self.assertEqual(list(Tokenize('老王稍三步')), [ IdentifierToken('老王'), Keyword('稍'), Token(TK_INTEGER_LITERAL, 3), Keyword('步'), ])
def testTokenizingIncrements(self): self.assertEqual(list(Tokenize('老王走走')), [ IdentifierToken('老王'), Keyword('走走'), ]) self.assertEqual(list(Tokenize('老王走两步')), [ IdentifierToken('老王'), Keyword('走'), Token(TK_INTEGER_LITERAL, 2), Keyword('步'), ])
def testTokenizingDecrements(self): self.assertEqual(list(Tokenize(u'老王退退')), [ Token(TK_IDENTIFIER, u'老王'), Keyword(u'退退'), ]) self.assertEqual(list(Tokenize(u'老王退三步')), [ Token(TK_IDENTIFIER, u'老王'), Keyword(u'退'), Token(TK_INTEGER_LITERAL, 3), Keyword(u'步'), ])
def testTokenizingIncrements(self): self.assertEqual(list(Tokenize(u'老王走走')), [ Token(TK_IDENTIFIER, u'老王'), Keyword(u'走走'), ]) self.assertEqual(list(Tokenize(u'老王走两步')), [ Token(TK_IDENTIFIER, u'老王'), Keyword(u'走'), Token(TK_INTEGER_LITERAL, 2), Keyword(u'步'), ])
def testTokenizingFuncDef(self): self.assertEqual(list(Tokenize('写九九表咋整:整完了。')), [ Token(TK_IDENTIFIER, '写九九表'), Keyword('咋整:'), Keyword('整完了'), Keyword('。'), ])
def testTokenizingFuncDef(self): self.assertEqual(list(Tokenize('写九九表咋整:整完了。')), [ IdentifierToken('写九九表'), Keyword('咋整:'), Keyword('整完了'), Keyword('。'), ])
def testTokenizeCompound(self): self.assertEqual(list(Tokenize('开整:\n 唠唠:老王。\n整完了。')), [ Keyword('开整:'), Keyword('唠唠'), Keyword(':'), Token(TK_IDENTIFIER, '老王'), Keyword('。'), Keyword('整完了'), Keyword('。'), ])
def testTokenizeCompound(self): self.assertEqual(list(Tokenize('开整:\n 唠唠:老王。\n整完了。')), [ Keyword('开整:'), Keyword('唠唠'), Keyword(':'), IdentifierToken('老王'), Keyword('。'), Keyword('整完了'), Keyword('。'), ])
def testTokenizeLoop(self): self.assertEqual(list(Tokenize('老王从1到9磨叽:磨叽完了。')), [ Token(TK_IDENTIFIER, '老王'), Keyword('从'), Token(TK_INTEGER_LITERAL, 1), Keyword('到'), Token(TK_INTEGER_LITERAL, 9), Keyword('磨叽:'), Keyword('磨叽完了'), Keyword('。'), ])
def testTokenizeArithmetic(self): self.assertEqual(list(Tokenize('250加13减二乘五除以九')), [ Token(TK_INTEGER_LITERAL, 250), Keyword('加'), Token(TK_INTEGER_LITERAL, 13), Keyword('减'), Token(TK_INTEGER_LITERAL, 2), Keyword('乘'), Token(TK_INTEGER_LITERAL, 5), Keyword('除以'), Token(TK_INTEGER_LITERAL, 9), ])
def testTokenizingFuncCall(self): self.assertEqual(list(Tokenize('整写九九表')), [ Keyword('整'), Token(TK_IDENTIFIER, '写九九表'), ])
def testTokenizingConcat(self): self.assertEqual(list(Tokenize('老刘、二')), [ Token(TK_IDENTIFIER, '老刘'), Keyword('、'), Token(TK_INTEGER_LITERAL, 2), ])
def testTokenize(self): self.assertEqual(list(BasicTokenize('【阶乘】')), [ Token(TK_IDENTIFIER, '阶乘'), ]) self.assertEqual(list(BasicTokenize('【 阶 乘 】')), [ Token(TK_IDENTIFIER, '阶乘'), ]) self.assertEqual(list(BasicTokenize('【阶乘】(那啥)')), [ Token(TK_IDENTIFIER, '阶乘'), Keyword('('), Token(TK_CHAR, '那'), Token(TK_CHAR, '啥'), Keyword(')'), ]) self.assertEqual(list(BasicTokenize('“ ”')), [ Keyword('“'), Token(TK_STRING_LITERAL, ' '), Keyword('”'), ]) self.assertEqual(list(BasicTokenize('“”')), [ Keyword('“'), Token(TK_STRING_LITERAL, ''), Keyword('”'), ]) self.assertEqual(list(BasicTokenize('“ A B ”')), [ Keyword('“'), Token(TK_STRING_LITERAL, ' A B '), Keyword('”'), ]) self.assertEqual(list(BasicTokenize('老张')), [ Token(TK_CHAR, '老'), Token(TK_CHAR, '张'), ]) self.assertEqual(list(BasicTokenize(' 老 张 ')), [ Token(TK_CHAR, '老'), Token(TK_CHAR, '张'), ]) self.assertEqual(list(Tokenize('# 123456\n老张')), [Token(TK_IDENTIFIER, '老张')]) self.assertEqual(list(Tokenize('老张')), [Token(TK_IDENTIFIER, '老张')]) self.assertEqual(ParseInteger('老张'), (None, '老张')) self.assertEqual(list(ParseChars('老张')), [Token(TK_IDENTIFIER, '老张')]) self.assertEqual(list(Tokenize('老张是活雷锋')), [Token(TK_IDENTIFIER, '老张'), Keyword('是活雷锋')]) self.assertEqual(list(Tokenize('老张是 活雷\n锋 。 ')), [ Token(TK_IDENTIFIER, '老张'), Keyword('是活雷锋'), Keyword('。'), ]) self.assertEqual(list(Tokenize('老张是活雷锋。\n老王是活雷锋。\n')), [ Token(TK_IDENTIFIER, '老张'), Keyword('是活雷锋'), Keyword('。'), Token(TK_IDENTIFIER, '老王'), Keyword('是活雷锋'), Keyword('。'), ]) self.assertEqual(list(Tokenize('老张装250。\n老王装老张。\n')), [ Token(TK_IDENTIFIER, '老张'), Keyword('装'), Token(TK_INTEGER_LITERAL, 250), Keyword('。'), Token(TK_IDENTIFIER, '老王'), Keyword('装'), Token(TK_IDENTIFIER, '老张'), Keyword('。') ]) self.assertEqual(list(Tokenize('唠唠:“你好”。')), [ Keyword('唠唠'), Keyword(':'), Keyword('“'), Token(TK_STRING_LITERAL, '你好'), Keyword('”'), Keyword('。') ])
def testTokenizingFuncCall(self): self.assertEqual(list(Tokenize('整写九九表')), [ Keyword('整'), IdentifierToken('写九九表'), ])
def testTokenizingConcat(self): self.assertEqual(list(Tokenize('老刘、二')), [ IdentifierToken('老刘'), Keyword('、'), Token(TK_INTEGER_LITERAL, 2), ])
def testTokenize(self): self.assertEqual(list(BasicTokenize(u'【阶乘】')), [ Token(TK_IDENTIFIER, u'阶乘'), ]) self.assertEqual(list(BasicTokenize(u'【 阶 乘 】')), [ Token(TK_IDENTIFIER, u'阶乘'), ]) self.assertEqual(list(BasicTokenize(u'【阶乘】(几)')), [ Token(TK_IDENTIFIER, u'阶乘'), Keyword(u'('), Token(TK_CHAR, u'几'), Keyword(u')'), ]) self.assertEqual(list(BasicTokenize(u'“ ”')), [ Keyword(u'“'), Token(TK_STRING_LITERAL, u' '), Keyword(u'”'), ]) self.assertEqual(list(BasicTokenize(u'“”')), [ Keyword(u'“'), Token(TK_STRING_LITERAL, u''), Keyword(u'”'), ]) self.assertEqual(list(BasicTokenize(u'“ A B ”')), [ Keyword(u'“'), Token(TK_STRING_LITERAL, u' A B '), Keyword(u'”'), ]) self.assertEqual(list(BasicTokenize(u'老张')), [ Token(TK_CHAR, u'老'), Token(TK_CHAR, u'张'), ]) self.assertEqual(list(BasicTokenize(u' 老 张 ')), [ Token(TK_CHAR, u'老'), Token(TK_CHAR, u'张'), ]) self.assertEqual(list(Tokenize(u'# 123456\n老张')), [Token(TK_IDENTIFIER, u'老张')]) self.assertEqual(list(Tokenize(u'老张')), [Token(TK_IDENTIFIER, u'老张')]) self.assertEqual(ParseInteger(u'老张'), (None, u'老张')) self.assertEqual(list(ParseChars(u'老张')), [Token(TK_IDENTIFIER, u'老张')]) self.assertEqual(list(Tokenize(u'老张是活雷锋')), [Token(TK_IDENTIFIER, u'老张'), Keyword(u'是活雷锋')]) self.assertEqual(list(Tokenize(u'老张是 活雷\n锋 。 ')), [ Token(TK_IDENTIFIER, u'老张'), Keyword(u'是活雷锋'), Keyword(u'。'), ]) self.assertEqual(list(Tokenize(u'老张是活雷锋。\n老王是活雷锋。\n')), [ Token(TK_IDENTIFIER, u'老张'), Keyword(u'是活雷锋'), Keyword(u'。'), Token(TK_IDENTIFIER, u'老王'), Keyword(u'是活雷锋'), Keyword(u'。'), ]) self.assertEqual(list(Tokenize(u'老张装250。\n老王装老张。\n')), [ Token(TK_IDENTIFIER, u'老张'), Keyword(u'装'), Token(TK_INTEGER_LITERAL, 250), Keyword(u'。'), Token(TK_IDENTIFIER, u'老王'), Keyword(u'装'), Token(TK_IDENTIFIER, u'老张'), Keyword(u'。') ]) self.assertEqual(list(Tokenize(u'唠唠:“你好”。')), [ Keyword(u'唠唠'), Keyword(u':'), Keyword(u'“'), Token(TK_STRING_LITERAL, u'你好'), Keyword(u'”'), Keyword(u'。') ])