def optimize(self, code, expected): tokenizer1 = Tokenizer() tokenizer2 = Tokenizer() tokens1 = tokenizer1.tokenize('var i = ' + code) tokens2 = tokenizer2.tokenize('var i = ' + expected) parser1 = Parser() parser2 = Parser() ast1 = parser1.parse(tokens1) ast2 = parser2.parse(tokens2) optimizer = Optimizer() optimizer.optimize(ast1) self.assertEqual(ast1, ast2)
def interpret_calculation(self, calculation, expected=0): tokenizer = Tokenizer() tokens = tokenizer.tokenize('var i = ' + calculation) parser = Parser() ast = parser.parse(tokens) interpreter = Interpreter() interpreter.interpret(ast) self.assertEqual(interpreter.vars['i'], expected)
def compile(self, string, *, debug=False): self.debug = debug # Read file contents and interpret it t = Tokenizer() t.load(string) self.tokens = t.tokenize() print("\nTokens:") for t in self.tokens: print(" {0}\t\t{1}".format(str(t.value), str(t.token))) (exprs, asm) = self._parse(self.tokens) a = Assembler(mem_size=100, testing=self.testing) output = a.load(asm) return output
from compiler.tokenizer import Tokenizer if len(sys.argv) < 2: sys.exit() debug_enabled = False files = sys.argv[1:] if sys.argv[1] == '--debug': debug_enabled = True files = files[1:] generated = "" for filename in files: tokenizer = Tokenizer(open(filename).read()) tokens = tokenizer.tokenize() if debug_enabled: print('>>> parsed tokens:\n%s\n' % list(map(lambda x: x.value, tokens))) parser = Parser(tokens) tree = parser.parse() if debug_enabled: print('>>> parse tree:\n%s\n' % tree) generator = Generator(tree) generated = generated + '\n' + generator.generate(tree) if debug_enabled: print('>>> generated code:\n%s' % generated) exit()
import os from compiler.tokenizer import Tokenizer from compiler.parser import Parser from compiler.interpreter import Interpreter from compiler.code_generator import CodeGenerator from compiler.optimizer import Optimizer #from compiler.symbols import TOKENS f = open('../examples/vars', 'r') code = f.read() f.close() tokenizer = Tokenizer() tokens = tokenizer.tokenize(code) """ for line in tokens: for token in line: sys.stdout.write(TOKENS[token[0]]) if len(token) == 2: sys.stdout.write('(%s)' % token[1]) sys.stdout.write(' ') print """ parser = Parser() ast = parser.parse(tokens) """ for a in ast: