Example #1
0
def generateFile(foldername, filename):  # (Square/mine/, Main.jack)

    jack_filename = foldername.rsplit(
        "/", 2)[0] + "/" + filename  # "Square" + "/" + "Main.jack"
    tokens = JackTokenizer.tokenize(
        jack_filename)  # returns tokens generated from .jack file

    vm_filename = foldername + filename.split(
        ".")[0] + ".vm"  # let VMWriter know what file to write to
    VMWriter.initializeFile(
        vm_filename)  # open .vm file to begin writing to it

    # pass tokens from Tokenizer to CompilationEngine
    class_name = filename.split(".")[0]
    CompilationEngine.compileTokens(tokens, class_name)
Example #2
0
    def __processFile__(self, filePath):
        ''' processes a single file, first feeding the file to JackTokenizer to generate a list of tokens
            (output as T.xml files for debugging use) and that token list is fed through
            CompilationEngine to generate a final result list of XML tokens which is output into an .xml file. '''

        #Phase 1 Tokenize/Analyze
        tokenizer = JackTokenizer(filePath)
        print(filePath)

        xmlTokenList = ["<tokens>"]
        taggedTokenList = [("listStart", "tokens", xmlTokenList[0])]

        token = tokenizer.advance()
        while token:
            taggedToken = self.__wrapTokenInXML__(token)
            taggedTokenList += [taggedToken]
            xmlTokenList += [taggedToken[TT_XML]]
            token = tokenizer.advance()

        xmlTokenList += ["</tokens>"]
        length = len(xmlTokenList)
        taggedTokenList += [("listEnd", "tokens", xmlTokenList[length - 1])]

        Tfilename = str(filePath.parent) + '/' + filePath.stem + "T.xml"
        self.__output__(Tfilename, xmlTokenList)

        #Phase 2 Compile/Translate
        compiler = CompilationEngine(taggedTokenList)
        compiledXMLList = compiler.compileTokens()

        Cfilename = str(filePath.parent) + '/' + filePath.stem + ".xml"
        self.__output__(Cfilename, compiledXMLList)
Example #3
0
    def __processFile__(self, filePath):
        ''' processes a single file, first feeding the file to JackTokenizer to generate a list of tokens
            (output as T.xml files for debugging use) and that token list is fed through
            CompilationEngine to generate a final result list of XML tokens which is output into an .xml file. '''

        #TODO  make it work

        # create opening token tag for tokenizing lines of .jack
        tokens = ["<tokens>"]
        tokenizer = JackTokenizer(filePath)

        line =  tokenizer.advance()

        # tokenize each line of .jack
        while line:
            tokens += [self.__wrapTokenInXML__(line)]
            line = tokenizer.advance()

        tokens += ["</tokens>"]

        # 2. create a list for compiled tokens to go into, create compEngine instance
        #     compile the tokens
        compiledTokens = []
        compEngine = CompilationEngine(tokens)
        compiledTokens += compEngine.compileTokens()

        # create the filepath names for writing the tokens and full blown xml
        xml_T_FilePath = Path(filePath.parent / (filePath.stem + 'T.xml'))
        finalTokenPath = Path(filePath.parent / (filePath.stem + '.xml'))

        # write out the raw tokens
        self.__output__(xml_T_FilePath, tokens)
        self.__output__(finalTokenPath, compiledTokens)
Example #4
0
def generateXML(foldername, filename):  # (Square/mine/, Main.jack)

    jack_filename = foldername.rsplit(
        "/", 2)[0] + "/" + filename  # "Square" + "/" + "Main.jack"
    tokens = JackTokenizer.tokenize(
        jack_filename)  # returns tokens generated from .jack file

    xml_filename = foldername + filename.split(
        ".")[0] + ".xml"  # Square/mine/Main.xml
    xml_file = open(xml_filename, "w")

    # for i, token in enumerate(tokens):
    #     tokens[i] += "\n"

    parsed_tokens = CompilationEngine.compileTokens(
        tokens)  # return parsed tokens

    for i, token in enumerate(parsed_tokens):
        parsed_tokens[i] += "\n"

    xml_file.writelines(parsed_tokens)  # write parsed tokens to xml file