def run(self):
        for input_filename in self.input_filenames:
            self.tokenizer_output_filename = input_filename.split(
                '.')[0] + 'T.xml'
            self.final_output_filename = input_filename.split('.')[0] + '.xml'
            with open(input_filename) as in_f:
                print('Compiling ' + input_filename)
                with open(self.tokenizer_output_filename, 'w') as out_f:
                    jt = JackTokenizer.JackTokenizer(in_f, out_f)
                    while jt.has_more_tokens():
                        token_type = jt.token_type()
                        if token_type == 'KEYWORD':
                            jt.keyword()
                        elif token_type == 'SYMBOL':
                            jt.symbol()
                        elif token_type == 'IDENTIFIER':
                            jt.identifier()
                        elif token_type == 'INT_CONST':
                            jt.int_val()
                        elif token_type == 'STRING_CONST':
                            jt.string_val()
                        jt.advance()
                    jt.save_xml()

            with open(self.final_output_filename, 'w') as out_f:
                ce = CompilationEngine.CompilationEngine(
                    self.tokenizer_output_filename, out_f)
        print('Compiling finished.')
    def __processFile__(self, filePath):
        ''' processes a single file, first feeding the file to JackTokenizer to generate a list of tokens
            (output as T.xml files for debugging use) and that token list is fed through
            CompilationEngine to generate a final result list of XML tokens which is output into an .xml file. '''

        #TODO  make it work

        # create opening token tag for tokenizing lines of .jack
        tokens = ["<tokens>"]
        tokenizer = JackTokenizer(filePath)

        line =  tokenizer.advance()

        # tokenize each line of .jack
        while line:
            tokens += [self.__wrapTokenInXML__(line)]
            line = tokenizer.advance()

        tokens += ["</tokens>"]

        # 2. create a list for compiled tokens to go into, create compEngine instance
        #     compile the tokens
        compiledTokens = []
        compEngine = CompilationEngine(tokens)
        compiledTokens += compEngine.compileTokens()

        # create the filepath names for writing the tokens and full blown xml
        xml_T_FilePath = Path(filePath.parent / (filePath.stem + 'T.xml'))
        finalTokenPath = Path(filePath.parent / (filePath.stem + '.xml'))

        # write out the raw tokens
        self.__output__(xml_T_FilePath, tokens)
        self.__output__(finalTokenPath, compiledTokens)
Exemple #3
0
    def __init__(self, input_file, output_file):
        """
        creates new compilation engine with given input and output
        It gets its input from a JackTokenizer and emits its parsed structure into an output file/stream.

        The output is generated by a series of compilexxx() routines,
        one for every syntactic element xxx of the Jack grammar.

        The contract between these routines is that each compilexxx() routine should
        read the syntactic construct xxx from the input, advance() the tokenizer
        exactly beyond xxx, and output the parsing of xxx.

        Thus, compilexxx()may only be called if indeed xxx is the next syntactic
        element of the input.
        :param input:
        :param output:
        """
        self.tokenizer = JackTokenizer.JackTokenizer(input_file)
        self.parsed_rules = []  # used for keeping track of open and close <> statements
        self.indent = ""
        # initialise expression sets
        self.binary_op = self.binary_op()
        self.unary_op = self.unary_op()
        self.keyword_constant = self.keyword_constant()

        self.output_file = open(output_file, "w")
Exemple #4
0
 def __init__(self, inputString, outputFile):
     self.tk = JackTokenizer.JackTokenizer(inputString)
     self.outputString = ''
     self.outputFile = outputFile
     self.compClassDict = {
         'field': self.compileClassVarDec,
         'static': self.compileClassVarDec,
         'constructor': self.compileSubroutineDec,
         'function': self.compileSubroutineDec,
         'method': self.compileSubroutineDec
     }
     self.compStatementDict = {
         'let': self.compileLet,
         'if': self.compileIf,
         'while': self.compileWhile,
         'do': self.compileDo,
         'return': self.compileReturn
     }
     self.compTokenDict = {
         'symbol': self.tk.symbol,
         'keyword': self.tk.keyword,
         'identifier': self.tk.identifier,
         'integerConstant': self.tk.intVal,
         'stringConstant': self.tk.stringVal
     }
     self.XMLSymDict = {
         '<': '&lt;',
         '>': '&gt;',
         '&': '&amp;',
         '"': '&quot'
     }
     self.multiTermExpression = False
     self.indent = ''
 def __init__(self, file_in, file_out):
     '''
     Creates a new compilation engine with the given input and output. 
     Input: file_in (string), file_out (_io.TextIOWrapper)
     '''
     self.file_out = file_out
     self.tokenizer = JackTokenizer.JackTokenizer(file_in)
     self.indent = 0
Exemple #6
0
 def __init__(self, filename):
     """
     constructor, creates a new compilation engine with the given input
     :param filename: the input file name
     """
     self._tokenizer = JackTokenizer.JackTokenizer(filename)
     self._filename = filename.split('.')[0] + '.' + co.NEW_SUFFIX
     self._writer = open(self._filename, 'w')
Exemple #7
0
 def __init__(self, file):
     """ """
     self.tokenizer = JackTokenizer.JackTokenizer(file)
     self.parsed_rules = []
     self.open_outfile(file)
     self.tokenizer.advance()
     self.compile_class()
     self.close_outfile()
Exemple #8
0
 def __init__(self, input_file, output_file):
     """
     与えられた入力と出力に対して新しいコンパイルエンジンを生成する
     次に呼ぶルーチンは compileClass() でなければならない
     str, str -> void
     """
     self.j = jt.JackTokenizer(input_file)
     self.fout = open(output_file, 'wt')
 def __init__(self, inputPath, outputPath):
     self._jackTokenizer = JackTokenizer.JackTokenizer(inputPath)
     if self._jackTokenizer.hasMoreTokens():
         self._jackTokenizer.advance()
     self._vmWriter = VMWriter.VMWriter(outputPath)
     self._symbolTable = SymbolTable.SymbolTable()
     self._currentClassName = ''
     self._whileCount = self._ifCount = 0
 def __init__(self, in_file, out_file):
     """
     A compilation engine constructor
     :param in_file: the file we are currently compiling
     :param out_file: the file where we save the output
     """
     self.tokenizer = JackTokenizer(in_file)
     self.out_file = open(out_file, 'w')
     self._indent_count = 0
    def __init__(self, input_file, output_file):
        self.tokenizer = JackTokenizer(input_file)
        self.xml_file = open(output_file, "w")
        self.space_depth = 0

        # starts the process
        self.tokenizer.advance()
        self.compile_class()
        self.xml_file.close()