Beispiel #1
0
    elif (Tokenize.TokenizeEndOfStatment(word)):
        return Token.Token("; : is an end of statment")
    elif (Tokenize.TokenizeDigits(word)):
        return Token.Token(word + ": is a number")
    elif (Tokenize.TokenizeIdentifiers(word)):
        return Token.Token(word + ": is an identefier")
    else:
        return Token.Token(word)


contents = ""
try:
    codeFile = open("myCode.txt", "r")
    contents = codeFile.read()
    codeFile.close()
except:
    print("there was an issue reading the file")

comments = Tokenize.TokenizeComments(contents)
for word in comments:
    print(word)
    print("is a comment and is now removed")

print
print

contents = Tokenize.cleanComments(contents)
words = Tokenize.TokenizeOutWords(contents)
tokens = [classifyToken(word) for word in words]
for token in tokens:
    print(token.description)
Beispiel #2
0
 def test_TokenizeComments(self):
     expectedOutput = "//TEST"
     input = expectedOutput + '\n the rest of the file'
     commentArray = Tokenize.TokenizeComments(input)
     output = commentArray[0]
     self.assertEquals(output,expectedOutput,"first element was not the comment")