Exemplo n.º 1
0
    def compile(self):
        tknizer = Tokenizer(self.input_code)
        parser = Parser(tknizer.run())
        transformer = Transformer(parser.run())
        code_generator = CodeGenerator(transformer.run())

        return code_generator.run()
Exemplo n.º 2
0
 def interpret_calculation(self, calculation, expected=0):
     tokenizer = Tokenizer()
     tokens = tokenizer.tokenize('var i = ' + calculation)
     parser = Parser()
     ast =  parser.parse(tokens)
     interpreter = Interpreter()
     interpreter.interpret(ast)
     self.assertEqual(interpreter.vars['i'], expected)
Exemplo n.º 3
0
 def optimize(self, code, expected):
     tokenizer1 = Tokenizer()
     tokenizer2 = Tokenizer()
     tokens1 = tokenizer1.tokenize('var i = ' + code)
     tokens2 = tokenizer2.tokenize('var i = ' + expected)
     parser1 = Parser()
     parser2 = Parser()
     ast1 = parser1.parse(tokens1)
     ast2 = parser2.parse(tokens2)
     optimizer = Optimizer()
     optimizer.optimize(ast1)
     self.assertEqual(ast1, ast2)
Exemplo n.º 4
0
    def compile(self, string, *, debug=False):
        self.debug = debug

        # Read file contents and interpret it
        t = Tokenizer()
        t.load(string)

        self.tokens = t.tokenize()

        print("\nTokens:")
        for t in self.tokens:
            print("   {0}\t\t{1}".format(str(t.value), str(t.token)))

        (exprs, asm) = self._parse(self.tokens)

        a = Assembler(mem_size=100, testing=self.testing)
        output = a.load(asm)

        return output
Exemplo n.º 5
0
from compiler.tokenizer import Tokenizer
from compiler.tokens import TOKEN_TAGS
import sys
import os
import html

ARG = sys.argv[1]
if os.path.isfile(ARG):
    FILES = [ARG]
else:
    FILES = [
        os.path.abspath(ARG) + "/" + f for f in os.listdir(ARG)
        if f.endswith('.jack')
    ]

for f in FILES:
    output_filename = os.path.abspath(f).split('.')[0] + 'Z.xml'
    output_file = open(output_filename, "w")
    tokenizer = Tokenizer(f)
    output_file.write("<tokens>\n")

    while tokenizer.has_more_tokens():
        token = tokenizer.pop_next_token()
        tag = TOKEN_TAGS[token.token_type]
        output_file.write(f"<{tag}> {html.escape(token.value)} </{tag}>\n")

    output_file.write("</tokens>\n")
    tokenizer.close_file()
    output_file.close()
Exemplo n.º 6
0
from compiler.parser import Parser
from compiler.tokenizer import Tokenizer

if len(sys.argv) < 2:
    sys.exit()

debug_enabled = False
files = sys.argv[1:]

if sys.argv[1] == '--debug':
    debug_enabled = True
    files = files[1:]

generated = ""
for filename in files:
    tokenizer = Tokenizer(open(filename).read())
    tokens = tokenizer.tokenize()
    if debug_enabled:
        print('>>> parsed tokens:\n%s\n' % list(map(lambda x: x.value, tokens)))

    parser = Parser(tokens)
    tree = parser.parse()
    if debug_enabled:
        print('>>> parse tree:\n%s\n' % tree)

    generator = Generator(tree)
    generated = generated + '\n' + generator.generate(tree)

if debug_enabled:
    print('>>> generated code:\n%s' % generated)
    exit()
Exemplo n.º 7
0
#!/usr/bin/env python3

from compiler.tokenizer import Tokenizer
from compiler.compileengine import CompilationEngine
import sys
import os

ARG = sys.argv[1]
if os.path.isfile(ARG):
    FILES = [ARG]
else:
    FILES = [
        os.path.abspath(ARG) + "/" + f for f in os.listdir(ARG)
        if f.endswith('.jack')
    ]

for f in FILES:
    output_filename = os.path.abspath(f).split('.')[0] + 'E.xml'
    tokenizer = Tokenizer(f)
    compiler = CompilationEngine(output_filename, tokenizer)
    compiler.compile_class()
    tokenizer.close_file()
    compiler.close_file()
Exemplo n.º 8
0
import os

from compiler.tokenizer import Tokenizer
from compiler.parser import Parser
from compiler.interpreter import Interpreter
from compiler.code_generator import CodeGenerator
from compiler.optimizer import Optimizer
#from compiler.symbols import TOKENS

f = open('../examples/vars', 'r')
code = f.read()
f.close()

tokenizer = Tokenizer()
tokens = tokenizer.tokenize(code)

"""
for line in tokens:
    for token in line:
        sys.stdout.write(TOKENS[token[0]])
        if len(token) == 2:
            sys.stdout.write('(%s)' % token[1])
        sys.stdout.write(' ')
    print
"""

parser = Parser()
ast = parser.parse(tokens)

"""
for a in ast: