def test_TokenizeIdentifiers(self): opperators = ["+","-","*","=","+=","-=",">","<","!=",">=","<="] keywords = ["make","if","else","return","class","method"] invalids = ["2er4",",sdf","@sd"] identifiers = ["x","y","count","total","r3","R2","totalMoney","i"] input = opperators + keywords + invalids + identifiers random.shuffle(input) output = [ word for word in input if Tokenize.TokenizeIdentifiers(word)] self.assertItemsEqual(output,identifiers,"find the indentifiers")
def classifyToken(word): if (Tokenize.TokenizeKeywords(word)): return Token.Token(word + ": is a keyword of type ~:~ " + keyword_dict[word]) elif (Tokenize.TokenizeStrings(word)): return Token.Token(word + ": is a string") elif (Tokenize.TokenizeOperators(word)): return Token.Token(word + ": is an operator") elif (Tokenize.TokenizeEndOfStatment(word)): return Token.Token("; : is an end of statment") elif (Tokenize.TokenizeDigits(word)): return Token.Token(word + ": is a number") elif (Tokenize.TokenizeIdentifiers(word)): return Token.Token(word + ": is an identefier") else: return Token.Token(word)