示例#1
0
 def test_TokenizeKeyWords(self):
     wrong = ["some","of","these","words","are","not","keywords"]
     keywords = ["make","if","else","return","class","method"]
     input = wrong + keywords
     random.shuffle(input)
     output = [ word for word in input if Tokenize.TokenizeKeywords(word)]
     self.assertItemsEqual(output,keywords,"find the keywords")
示例#2
0
def classifyToken(word):
    if (Tokenize.TokenizeKeywords(word)):
        return Token.Token(word + ": is a keyword of type ~:~ " +
                           keyword_dict[word])
    elif (Tokenize.TokenizeStrings(word)):
        return Token.Token(word + ": is a string")
    elif (Tokenize.TokenizeOperators(word)):
        return Token.Token(word + ": is an operator")
    elif (Tokenize.TokenizeEndOfStatment(word)):
        return Token.Token("; : is an end of statment")
    elif (Tokenize.TokenizeDigits(word)):
        return Token.Token(word + ": is a number")
    elif (Tokenize.TokenizeIdentifiers(word)):
        return Token.Token(word + ": is an identefier")
    else:
        return Token.Token(word)