예제 #1
0
    elif (Tokenize.TokenizeEndOfStatment(word)):
        return Token.Token("; : is an end of statment")
    elif (Tokenize.TokenizeDigits(word)):
        return Token.Token(word + ": is a number")
    elif (Tokenize.TokenizeIdentifiers(word)):
        return Token.Token(word + ": is an identefier")
    else:
        return Token.Token(word)


contents = ""
try:
    codeFile = open("myCode.txt", "r")
    contents = codeFile.read()
    codeFile.close()
except:
    print("there was an issue reading the file")

comments = Tokenize.TokenizeComments(contents)
for word in comments:
    print(word)
    print("is a comment and is now removed")

print
print

contents = Tokenize.cleanComments(contents)
words = Tokenize.TokenizeOutWords(contents)
tokens = [classifyToken(word) for word in words]
for token in tokens:
    print(token.description)
예제 #2
0
 def test_cleanCommentsOut(self):
     notAComment = '\nNot a comment'
     input = "//here is a comment"+notAComment+"//asdfasdf"
     output = Tokenize.cleanComments(input)
     self.assertTrue("//" not in output)
     self.assertEqual(output,notAComment)