def __init__(self, tokenizer, vm_writer): # We will use the passed-in JackTokenizer to parse the given Jack code. self.tokenizer = tokenizer # We will use the passed-in VMWriter to write our compiled VM code. # The VMWriter instance should have already create the .vm file for us. self.vm_writer = vm_writer # When handling Jack variable declarations, you need two symbol tables: # - a SymbolTable for the class scope, and # - a SymbolTable for the subroutine scope. self.class_symbol_table = SymbolTable() self.subroutine_symbol_table = SymbolTable() # Even though a class can contain multiple different subroutines, # we only ever need one subroutine symbol table. # # We can simply reset the subroutine symbol table # every time we encounter a new subroutine! # We will use simple counters to create distinct labels # for each if/while statement in the compiled VM code. self.if_counter = 0 self.while_counter = 0 # We'll need to track the currently-parsing class and subroutine for various reasons. self.current_class_name = None self.current_subroutine_name = None # We'll need to track a subroutine's type as we parse it. # Its value is always one of ["function", "method", "constructor"]. self.subroutine_type = None
def __init__(self): self.functionTable = SymbolTable() self.classTable = SymbolTable() self.currentFunctionId = '' self.currentClass: ClassSymbol = None # can be function table or method table of current class self.currentFunctionTable = self.functionTable self.__defineBuiltInFunctions() self.__defineBuiltInClasses()
def __init__(self, functionDefinitionTable, classTable): self.localSymbolTable = SymbolTable() self.functionTable: SymbolTable() = functionDefinitionTable self.preemptiveFunctionCallTable = SymbolTable() self.semanticsChecker = SemanticsChecker() self.codeGenerator = CodeGenerator() self.expressionStack = deque() self.currentFunctionId = '' self.currentFunction = None self.classTable = classTable self.checkClassDefinitionsSemantics() self.currentClass = None self.currentFunctionReturn = False self.functionCallStack = list()
def main(): try: filename = sys.argv[1] except IndexError: print( "Este programa aceita um arquivo .su como entrada. Executando arquivo teste..." ) filename = "test_file.su" with open(filename, 'r') as file: raw_code = file.read() # Pre Processing code = re.sub("^(\s*(\r\n|\n|\r))", '', re.sub("'.*\n", "\n", raw_code)) + "\n" # Lexer lexer = Lexer().createLexer() tokens = lexer.lex(code) # Symbol Table st = SymbolTable(None) # Parser parser = Parser() parser.parse() parser.get_parser().parse(tokens).eval(st)
def __init__(self, problem_file, tokens, separators): self.__problem_file = problem_file self.__tokens = tokens self.__separators = separators self.__pif = HashTable() self.__st = SymbolTable()
def test_compile_single_multi_line_comments(self): # Test single and multi line comments opcodes = [ OpCode("single_line_comment", " single line", ""), OpCode( "multi_line_comment", """ Multi line """, "", ), ] table = SymbolTable() compile(opcodes, "testing.c", table) with open("testing.c", "r") as file: data = file.read().split("\n") os.remove("testing.c") self.assertEqual( data, [ "", "\t// single line ", "/* ", " Multi line", " */", "", ], )
def test_compile_while(self): opcodes = [ OpCode("var_assign", "i---0", "int"), OpCode("while", "i < 10", None), OpCode("scope_begin", "", ""), OpCode("print", '"%d", i', None), OpCode("scope_over", "", ""), ] table = SymbolTable() table.symbol_table = { 1: ["i", "int", "variable"], 2: ["0", "int", "constant"], 3: ["10", "int", "constant"], } compile(opcodes, "testing.c", table) with open("testing.c", "r") as file: data = file.read().split("\n") os.remove("testing.c") self.assertEqual( data, [ "#include <stdio.h>", "\tint i = 0;", "\twhile(i < 10) {", '\tprintf("%d", i);', "}", "", ], )
def test_lexical_analyze_divide_single_line_multi_line(self): # Test divide, single_line_comment, and multi_line_comment source_code = """var a = 1 / 2 // Hello World /* Bye World */ """ with open("testing.simc", "w") as file: file.write(source_code) table = SymbolTable() tokens = lexical_analyze("testing.simc", table) divide = Token("divide", "", 1) single_line_comment = Token("single_line_comment", " Hello World", 2) multi_line_comment = Token( "multi_line_comment", """ Bye World """, 3, ) self.assertEqual(tokens[4], divide) self.assertEqual(tokens[7], single_line_comment) self.assertEqual(tokens[-4], multi_line_comment)
def test_compile_func_call(self): # This also tests scope_begin and scope_over opcodes = [ OpCode("func_decl", "hello---", ""), OpCode("scope_begin", "", ""), OpCode("print", '"World"', None), OpCode("scope_over", "", ""), OpCode("func_call", "hello---", ""), ] table = SymbolTable() table.symbol_table = { 1: ["hello", "var", "function"], 2: ['"World"', "string", "constant"], } compile(opcodes, "testing.c", table) with open("testing.c", "r") as file: data = file.read().split("\n") os.remove("testing.c") self.assertEqual( data, [ "#include <stdio.h>", "", "void hello(void) {", '\tprintf("World");', "}", "\thello();", "", ], )
def __init__(self, modules, module): self.modules = modules self.module = module self.functions = [ self.module.new_function('main', types.Function([], types.UNIT)) ] self.symbols = SymbolTable()
def __init__(self, filepath, vm_writer): self.wf = open(filepath[:-5] + ".myImpl.xml", 'w') self.tokenizer = JackTokenizer(filepath) self.symbol_table = SymbolTable() self.vmw = vm_writer self.compiled_class_name = None self.label_num = 0
def __init__(self) -> None: self.syntax_table = SymbolTable() self.syntax_table.set(LangBool.true_name(), LangBool.true_def()) self.syntax_table.set(LangBool.false_name(), LangBool.false_def()) self.syntax_table.set( LangVariantTypeDefinition.some_type_name(), LangVariantTypeDefinition.some_type_def(), ) self.syntax_table.set( LangVariantTypeDefinition.none_type_name(), LangVariantTypeDefinition.none_type_def(), ) self.syntax_table.set(LangTypeDefinition.option_type_name(), LangTypeDefinition.option_type_def()) self.syntax_table.set( LangVariantTypeDefinition.list_type_name(), LangVariantTypeDefinition.list_type_def(), ) self.syntax_table.set( LangVariantTypeDefinition.empty_list_type_name(), LangVariantTypeDefinition.empty_list_type_def(), ) self.syntax_table.set( LangTypeDefinition.linked_list_type_name(), LangTypeDefinition.linked_list_type_def(), )
def __init__(self, block: Block): self.proto = Protocol() self.prev = None self.block = block self.pc = 0 self.number_constant = 0 self.number_active_var = 0 self.number_up_value = 0 # symbol table stack if self.prev is not None: self.symbol_table = SymbolTable(self.prev.symbol_table) else: self.symbol_table = SymbolTable() self.constant_pool = ConstantPool()
def visit(self, context: Context) -> LangType: assert self.var is not None type_name = self.types[0][0].value arg_tokens = self.types[0][1] new_ctx = Context( f"Case {type_name}", SymbolTable(), context, self.pos_start, ) every = type_name == "_" if every: if arg_tokens: return self._fail_with("Unexpected arguments", context) else: if isinstance(self.var, LangTuple): return self._handle_tuple(context) if not self.var.is_of_type(type_name): return LangNoMatchType.instance() node = self._convert_nth_value_to_assignment_node(self.types[0]) node.set_value(self.var) node.visit(context) return self.expr_node.visit(new_ctx)
def __init__(self, tokens, filepath): # compilation engine init self.lex = tokens self.symbols = SymbolTable() self.vm = VMWriter(filepath) self.compile_class() self.vm.closeout()
def test_compile_for(self): opcodes = [ OpCode("for", "i&&&1&&&10&&&+&&&<&&&1", None), OpCode("scope_begin", "", ""), OpCode("print", '"%d", i', None), OpCode("scope_over", "", ""), ] table = SymbolTable() table.symbol_table = { 1: ["i", "int", "variable"], 2: ["1", "int", "constant"], 3: ["10", "int", "constant"], 4: ["1", "int", "constant"], } compile(opcodes, "testing.c", table) with open("testing.c", "r") as file: data = file.read().split("\n") os.remove("testing.c") self.assertEqual( data, [ "#include <stdio.h>", "\tfor(int i = 1; i < 10; i+=1) {", '\tprintf("%d", i);', "}", "", ], )
def test_assn6(): src = \ ''' x = a + x a = b + 1 b = c + 1 c = 1 x = 1 x = 2 ''' a = gen.Generation(src) symtab = a.symbol_pass(SymbolTable()) numNames = symtab.num_symbols() while True: a.replace_idents(symtab) symtab = a.symbol_pass(symtab) if symtab.num_symbols() == numNames: break numNames = symtab.num_symbols() assert symtab.resolveName('c') == ast.Number(1) assert symtab.resolveName('b') == ast.Number(2) assert symtab.resolveName('a') == ast.Number(3) assert symtab.resolveName('x') == ast.Number(2) assert symtab.num_symbols() == 4
def __init__(self, jack_file): self.vm_writer = VMWriter(jack_file) self.tokenizer = JackTokenizer(jack_file) self.symbol_table = SymbolTable() self.if_index = -1 self.while_index = -1
def __init__(self, symbol_dir, classes): self.symbol_table = SymbolTable() self.symbol_table.LoadSymbolsFromDirectory(symbol_dir) self.classes = classes self.known_symbols = [ '', # Typeless 'types', e.g. return value of ctor is parsed to an empty string. 'void', 'bool', 'char', 'signed char', 'unsigned char', 'short', 'signed short', 'unsigned short', 'int', 'signed int', 'unsigned int', 'long', 'signed long', 'unsigned long', 'float', 'double', 'unsigned int', 'std::string', 'emscripten::val', ] self.generated_function_selectors = [] # Lists the types for which we need to generate type identifier functions for. self.require_type_function = []
def __init__(self, asm_filename): self.asm_filename = asm_filename self.symbol_table = SymbolTable() self.parser = Parser(self.symbol_table) self.binary_translator = BinaryTraslator(self.parser) self.next_open_memory_address = 16 self.labels_parsed = 0 self.output_string = ''
def __init__(self, path): self.parser = Parser(path) self.code = Code() self.symb_table = SymbolTable() ind1 = path.find('/') ind2 = path.find('.') writefile = path[:ind1] + "/" + path[ind1+1:ind2] self.file = open(writefile + '2.hack', 'w')
def assemble(infile, outfile): p = Parser(infile) st = SymbolTable() out = open(outfile, 'w') # Insert code here out.close()
def __init__(self, jack_fname): self._jack_fname = jack_fname self._s_table = SymbolTable() self._writer = None self._class_name = None self._is_writing_void_func = None self._current_func_name = None self._n_labels = 0
def __init__(self, source): self.source = source self.line = 1 self.col = 0 self.position = 0 self.current_token = None self.symbol_table = SymbolTable() self.current_char = self.source[self.position] self.current_mode = None
def setUp(self): TemporaryVariables.reset() Label.reset() symbol_table = SymbolTable() symbol_table.add_symbol("a_int", Types.INT, 1) symbol_table.add_symbol("b_int", Types.INT, 1) symbol_table.add_symbol("c_float", Types.FLOAT, 2) symbol_table.add_symbol("d_float", Types.FLOAT, 2) self.transformer = CPLTransformer(symbol_table)
def __init__(self, file_address, compile_address, vm=False): self.here = False self.file_object = open(file_address, 'rb') self.compiled = open(compile_address, 'wb') first_line = self.advance() self.current_line = first_line self.nest_level = 0 self.vm = vm self.SYMBOL_TABLE = SymbolTable()
def __init__(self, f): self.in_name, self.out_name = f self.output = [] self.tokeniser = Tokeniser(f) self.st_handler = SymbolTable() self.writer = VMCodeWriter(f) self.local_state = {'labeler': labeler()} self.parse() self.writer.close()
def __init__(self, filename): self.asm_filename = filename self.hack_filename = self.asm_filename[:-3] + 'hack' self.outfile = open(self.hack_filename, 'w') self.parser = Parser(self.asm_filename) self.code = Code() self.symbol_table = SymbolTable() self.next_available_address = 16
def main(): test = read_file("input.txt") try: parser = Parser(test) symbolTable = SymbolTable(None) result = parser.parseProgram() result.Evaluate(symbolTable) except ValueError as err: print(err)
def __init__(self, file_name): self.last_read_char = None self.current_line = 1 self.current_column = 1 self.file = None self.file_name = file_name self.symbol_table = SymbolTable() self.was_error_raised = False self.file = open(self.file_name, 'r')