Exemple #1
0
 def __init__(self, rfile, wfile, wVmFile):
     self.rfile = rfile
     self.wfile = wfile  #Write XML file
     self.vmWriter = VMWriter.VMwriter(wVmFile)  #Write VM file
     self.tokenizer = JackTokenizer.Tokenizer(self.rfile)
     self.class_symbol = SymbolTable.SymbolTable()
     self.sub_symbol = SymbolTable.SymbolTable()
     self.Stype = ''  #Stype records the type of the identifier.
     self.Skind = ''
     #ClassName records the name of the class, used to make the sub_functionName
     self.ClassName = ''
     self.expressionListNum = 0  #Record the number of expression in ExpressionList.
     self.WHILEFLAG = 0  #the index of while_loop in case of tautonomy
     self.IFFLAG = 0
Exemple #2
0
def tokenize_file(input, output=None):
    root = ET.Element("tokens")

    tk = JackTokenizer.Tokenizer(input)
    while (tk.hasMoreTokens()):
        tk.advance()

        tk_type = tk.tokenType()
        element = None
        if tk_type == JackTokenizer.TYPE_KEYWORD:
            kw = tk.keyword()
            ET.SubElement(root, 'keyword').text = pretty_token(kw)

        elif tk_type == JackTokenizer.TYPE_SYMBOL:
            kw = tk.symbol()
            ET.SubElement(root, 'symbol').text = pretty_token(kw)

        elif tk_type == JackTokenizer.TYPE_IDENTIFIER:
            kw = tk.identifier()

            #TODO: output symbol table metadata.

            ET.SubElement(root, 'identifier').text = pretty_token(kw)

        elif tk_type == JackTokenizer.TYPE_INT_CONST:
            kw = tk.intVal()
            ET.SubElement(root, 'integerConstant').text = pretty_token(kw)

        elif tk_type == JackTokenizer.TYPE_STRING_CONT:
            kw = tk.stringVal()
            ET.SubElement(root, 'stringConstant').text = pretty_token(kw)

    tree = ET.ElementTree(root)
    lib.indent(root)

    if output:
        fn = output
    else:
        fn = '%sT.xml' % input

    tree.write(fn)
Exemple #3
0
 def __init__(self, rfile, wfile):
     self.rfile = rfile
     self.wfile = wfile
     self.tokenizer = JackTokenizer.Tokenizer(self.rfile)