def __init__(self, jack_file):
        self.vm_writer = VMWriter(jack_file)
        self.tokenizer = JackTokenizer(jack_file)
        self.symbol_table = SymbolTable()

        self.if_index = -1
        self.while_index = -1
Exemple #2
0
 def __init__(self, tokens, filepath):
     # compilation engine init
     self.lex = tokens
     self.symbols = SymbolTable()
     self.vm = VMWriter(filepath)
     self.compile_class()
     self.vm.closeout()
Exemple #3
0
 def compile(self, out_fname: str) -> None:
     tknizer = Tokenizer(self._jack_fname)
     with VMWriter(out_fname) as writer:
         self._writer = writer
         token = self._compile_class(tknizer, tknizer.next_token())
         if token:
             raise CompilationException(
                 f"Expected end of file, found {token}")
Exemple #4
0
    def __init__(self, input_file, output_file):
        self.tokenizer = JackTokenizer(input_file)
        self.out = open(output_file, 'w')
        self.token = None
        self.class_name = None

        #######################
        ### PROJECT 11 CODE ###
        #######################

        self.symbol_table = SymbolTable()
        self.vm_writer = VMWriter(output_file)
Exemple #5
0
    def __init__(self, token_stream, out_file, xml_name):
        '''
        creates a new compilation engine with the given input and output.
        The next method called must be compileClass().
        '''
        self.stream = token_stream
        self.writer = VMWriter(out_file)
        self.symbols = SymbolTable()
        self.xml_name = xml_name
        self.root = ET.Element('class')

        self.stream.advance()
        assert self.stream.keyword() == 'class'
Exemple #6
0
    def __init__(self, jack_tokenizer: JackTokenizer, output_path: str):
        super().__init__()
        self.tokenizer = jack_tokenizer
        self.table = SymbolTable()
        self.writer = VMWriter(output_path)
        if self.tokenizer.has_more_tokens():
            self.tokenizer.advance()

        self.class_name = ''
        self.curr_func_name = ''
        self._if_count = 0
        self._while_count = 0

        self.compile_class()
def main(argv):
    """
    Main flow of program dealing with extracting files for reading and initializing files to translate into
    """
    if not check_args(argv):
        return

    #  extracting jack file to be processed
    jack_files_path = argv[1]

    #  creating a .vm file to contain jack files translation to vm language
    if os.path.isdir(jack_files_path):
        for file in os.listdir(jack_files_path):
            if file.endswith(".jack"):
                vm_file_name = "{0}/{1}.vm".format(
                    jack_files_path,
                    os.path.splitext(os.path.basename(file))[0])
                vm_writer = VMWriter(vm_file_name)
                CompilationEngine('{0}/{1}'.format(jack_files_path, file),
                                  vm_writer)
    else:
        vm_file_name = "{0}.vm".format(os.path.splitext(jack_files_path)[0])
        vm_writer = VMWriter(vm_file_name)
        CompilationEngine(jack_files_path, vm_writer)
Exemple #8
0
    def __init__(self, tokenizer: JackTokenizer, out_path: Path):
        self.tokenizer = tokenizer

        # Create symbol tables
        self.class_level_st = SymbolTable()
        self.subroutine_level_st = SymbolTable()

        # class's name
        self.class_name = None
        self.func_name = None
        self.sub_type = None

        # Open the output file for writing
        self.out_stream = out_path.open('w')

        # Create a new VM writer for writing
        self.vm_writer = VMWriter(out_path.with_suffix(".vm"))

        # For generating labels
        self.label_count = {"if": 0, "while": 0}
Exemple #9
0
def main():
    # Input
    if len(sys.argv) != 2:
        raise ValueError('Invalid file name.')
    input_file_path = sys.argv[1]
    input_texts = get_file_text(input_file_path)
    splited_input_file_path = input_file_path.split('/')
    input_file_name = splited_input_file_path[-1]
    # Output
    output_tokenizer_file_name = '{}.xml'.format(input_file_name.split('.')[0])
    output_tokenizer_file_path = '/'.join([*splited_input_file_path[:-1], output_tokenizer_file_name])
    output_vm_file_name = '{}.vm'.format(input_file_name.split('.')[0])
    output_vm_file_path = '/'.join([*splited_input_file_path[:-1], output_vm_file_name])
    # Text Processing
    del_blank_content = lambda value: value != ''
    del_new_line_in_text = lambda value: value.replace('\n', '')
    # 文中の // を削除して先頭と末尾と空白の文字列を削除
    del_comment_in_line = lambda string: re.sub(r'//\s.*', '', string).strip()
    input_texts = list(
        filter(
            del_blank_content, map(
                del_comment_in_line, filter(
                    remove_comments, map(
                        del_new_line_in_text, input_texts
                    )
                )
            )
        )
    )
    update_input_texts = []
    for input_text in input_texts:
        # プログラム中のコメントアウト (/** */) は上のテキスト処理では削除できないのでこの処理を追加
        if remove_comments(input_text):
            update_input_texts.append(input_text)

    print('output_tokenizer_file_name: {}'.format(output_tokenizer_file_name))
    print('output_vm_file_name: {}'.format(output_vm_file_name))
    with VMWriter(output_vm_file_path) as vmw:
        with CompilationEngine(update_input_texts, output_tokenizer_file_path, vmw) as engine:
            engine.compile()
 def build_vm_writer(self, jack_file):
     self.vm_writer = VMWriter(jack_file)
Exemple #11
0
 def __init__(self, source, destination):
     self.src = source
     self.dst = destination
     self.writer = VMWriter(destination)
     self.iter = Lookahead(tokenizor.newTokenizor(self.src))
     self._symbol_table = SymbolTable()