Beispiel #1
0
    def compile(self):
        tknizer = Tokenizer(self.input_code)
        parser = Parser(tknizer.run())
        transformer = Transformer(parser.run())
        code_generator = CodeGenerator(transformer.run())

        return code_generator.run()
Beispiel #2
0
    def compile(self, string, *, debug=False):
        self.debug = debug

        # Read file contents and interpret it
        t = Tokenizer()
        t.load(string)

        self.tokens = t.tokenize()

        print("\nTokens:")
        for t in self.tokens:
            print("   {0}\t\t{1}".format(str(t.value), str(t.token)))

        (exprs, asm) = self._parse(self.tokens)

        a = Assembler(mem_size=100, testing=self.testing)
        output = a.load(asm)

        return output
Beispiel #3
0
from compiler.tokenizer import Tokenizer
from compiler.tokens import TOKEN_TAGS
import sys
import os
import html

ARG = sys.argv[1]
if os.path.isfile(ARG):
    FILES = [ARG]
else:
    FILES = [
        os.path.abspath(ARG) + "/" + f for f in os.listdir(ARG)
        if f.endswith('.jack')
    ]

for f in FILES:
    output_filename = os.path.abspath(f).split('.')[0] + 'Z.xml'
    output_file = open(output_filename, "w")
    tokenizer = Tokenizer(f)
    output_file.write("<tokens>\n")

    while tokenizer.has_more_tokens():
        token = tokenizer.pop_next_token()
        tag = TOKEN_TAGS[token.token_type]
        output_file.write(f"<{tag}> {html.escape(token.value)} </{tag}>\n")

    output_file.write("</tokens>\n")
    tokenizer.close_file()
    output_file.close()
Beispiel #4
0
from compiler.parser import Parser
from compiler.tokenizer import Tokenizer

if len(sys.argv) < 2:
    sys.exit()

debug_enabled = False
files = sys.argv[1:]

if sys.argv[1] == '--debug':
    debug_enabled = True
    files = files[1:]

generated = ""
for filename in files:
    tokenizer = Tokenizer(open(filename).read())
    tokens = tokenizer.tokenize()
    if debug_enabled:
        print('>>> parsed tokens:\n%s\n' % list(map(lambda x: x.value, tokens)))

    parser = Parser(tokens)
    tree = parser.parse()
    if debug_enabled:
        print('>>> parse tree:\n%s\n' % tree)

    generator = Generator(tree)
    generated = generated + '\n' + generator.generate(tree)

if debug_enabled:
    print('>>> generated code:\n%s' % generated)
    exit()