Пример #1
0
 def __init__(self, in_address):
     self.tokenizer = Tokenizer(in_address)
     self.curr_token = self.tokenizer.get_current_token()
     self.out_address = in_address.replace(".jack", ".xml")
     self.output = ""
     self.indent = 0
     self.compile_class()
Пример #2
0
def tokens_to_xml(path):
    """Write the tokens into a xml file with its type as tags. The outpath
    is the dirpath of the a new directory in the module path to avoid name
    clashes."""
    paths = retrive_files(path)
    out_dirpath = os.path.join(path, 'Xmlresult')
    for path in paths:
        outfile = os.path.basename(path).replace('.jack', 'T.xml')
        outpath = os.path.join(out_dirpath, outfile)
        tokenizer = Tokenizer(path)
        analyzer = TokenAnalyzer(outpath)
        while tokenizer.has_more_tokens():
            tokenizer.advance()
            t_type = tokenizer.token_type
            tag = token_tags[t_type]
            if t_type == T_KEYWORD:
                analyzer.write_info(tokenizer.keyword, tag)
            elif t_type == T_SYMBOL:
                analyzer.write_info(tokenizer.symbol, tag)
            elif t_type == T_ID:
                analyzer.write_info(tokenizer.identifier, tag)
            elif t_type == T_INTEGER:
                analyzer.write_info(tokenizer.intval, tag)
            elif t_type == T_STRING:
                analyzer.write_info(tokenizer.stringval, tag)
        analyzer.close()
Пример #3
0
 def __init__(self, inpath, outpath):
     self.tokenizer = Tokenizer(inpath)
     self.symboltable = SymbolTable()
     self.vmwriter = VMWriter(outpath)
     self._class_name = None
     if self.tokenizer.has_more_tokens():
         self.compile_class()
     self.vmwriter.close()
     print("{0} completed.".format(outpath))
Пример #4
0
 def __init__(self, filepath):
     self.jackfiles = []
     self.fileOrDir(filepath)
     for file in self.jackfiles:
         print(file)
         tokenizer = Tokenizer(file)
         outputname = filepath.split('.')[0]+".0000.xml"
         print(outputname)
         print(tokenizer.file)
Пример #5
0
 def __init__(self, in_address):
     self.tokenizer = Tokenizer(in_address)
     self.symbol_table = SymbolTable()
     self.vm_writer = VMWriter(in_address.replace(".jack", ".vm"))
     self.curr_token = self.tokenizer.get_current_token()
     self.out_address = in_address.replace(".jack", ".xml")
     self.output = ""
     self.indent = 0
     self.label_count = -1
     self.class_name = ""
     self.compile_class()
 def __init__(self, inFile):
     self.t = Tokenizer(inFile)
     self.symTable = SymbolTable()
     self.vmName = inFile.rstrip('.jack') + '.vm'
     self.vm = VMWriter(self.vmName)
     self.className = ''
     self.types = ['int', 'char', 'boolean', 'void']
     self.stmnt = ['do', 'let', 'if', 'while', 'return']
     self.subroutType = ''
     self.whileIndex = 0
     self.ifIndex = 0
     self.fieldNum = 0
Пример #7
0
def run_jack_compiler(file_path):
    jack_files = []
    if file_path.endswith(".jack"):
        jack_files.append(file_path)
    else:
        file_list = listdir(file_path)
        for file in file_list:
            if file.endswith(".jack"):
                jack_files.append(file_path + '/' + file)

    for jack_file in jack_files:
        # print("   Now processing: ", jack_file)
        curr_tokenizer = Tokenizer(jack_file)
        curr_compilation_engine = CompilationEngine(curr_tokenizer, jack_file)
        curr_compilation_engine.compile_class()
Пример #8
0
def run_jack_tokenizer(file_path):
    jack_files = []
    if file_path.endswith(".jack"):
        jack_files.append(file_path)
    else:
        file_list = listdir(file_path)
        for file in file_list:
            if file.endswith(".jack"):
                jack_files.append(file_path + '/' + file)

    for jack_file in jack_files:
        print("   Now processing: ", jack_file)
        curr_tokenizer = Tokenizer(jack_file)
        print("writing into", jack_file[:-5]+"test.xml")
        xml_stream = open(jack_file[:-5]+"test.xml", "w+")
        xml_stream.write("<tokens>\n")
        single_file_tokenize(curr_tokenizer, xml_stream)
        xml_stream.write("</tokens>\n")
        xml_stream.close()
Пример #9
0
# Developed for project 10 of nand2tetris course
from JackParser import Parser
from JackTokenizer import Tokenizer

from pathlib import Path
import sys

if __name__ == '__main__':
    input_arg = sys.argv[1]

    input_path = Path(input_arg).resolve()

    files = input_path.glob('*.jack')
    for file in files:
        print(file)
        output_path = str(file.with_name(file.stem + '.xml'))

        tokenizer = Tokenizer(input_file=str(file)).tokenize()

        parser = Parser(tokenizer)
        parser.parse_to_file(output_path)
Пример #10
0
            token.text = token.text.strip()
        value, type = token.text, token.tag
        return value, type

    def _format_string(self, s):
        if s[0] == '<':
            if s[1] == '/':
                return s
            else:
                return '\n' + s
        elif s[0] == '>':
            return s
        else:
            return ' {} '.format(s)

    def _dump_tree(self, filename='/Users/rsenseman/Desktop/dump.xml'):
        with open(filename, 'w') as f:
            f.write(tostring(self.tree.getroot(), encoding='utf8').decode('utf8'))
        return None

if __name__ == '__main__':
    input_arg = sys.argv[1]

    input_path = Path(input_arg).resolve()
    output_path = input_path.with_name(input_path.stem + '_parsed.xml')

    tokenizer = Tokenizer(input_file=input_path).tokenize()

    parser = Parser(tokenizer)
    parser.parse_to_file(output_path)
Пример #11
0
 def __init__(self, inpath, outpath):
     self.tokenizer = Tokenizer(inpath)
     XMLWriter.set_filepath(outpath)
     if self.tokenizer.has_more_tokens():
         self.compile_class()
     XMLWriter.close()