def test_TokenizeStrings(self): strings = ["\"string with spaces\"","\"stringWithNoSpaces\""] opperators = ["+","-","*","=","+=","-=",">","<","!=",">=","<="] keywords = ["make","if","else","return","class","method"] invalids = ["2er4",",sdf","@sd"] input = strings + opperators + keywords + invalids random.shuffle(input) output = [word for word in input if Tokenize.TokenizeStrings(word)] self.assertItemsEqual(output,strings,"find strings")
def classifyToken(word): if (Tokenize.TokenizeKeywords(word)): return Token.Token(word + ": is a keyword of type ~:~ " + keyword_dict[word]) elif (Tokenize.TokenizeStrings(word)): return Token.Token(word + ": is a string") elif (Tokenize.TokenizeOperators(word)): return Token.Token(word + ": is an operator") elif (Tokenize.TokenizeEndOfStatment(word)): return Token.Token("; : is an end of statment") elif (Tokenize.TokenizeDigits(word)): return Token.Token(word + ": is a number") elif (Tokenize.TokenizeIdentifiers(word)): return Token.Token(word + ": is an identefier") else: return Token.Token(word)