Пример #1
0
 def optimize(self, code, expected):
     tokenizer1 = Tokenizer()
     tokenizer2 = Tokenizer()
     tokens1 = tokenizer1.tokenize('var i = ' + code)
     tokens2 = tokenizer2.tokenize('var i = ' + expected)
     parser1 = Parser()
     parser2 = Parser()
     ast1 = parser1.parse(tokens1)
     ast2 = parser2.parse(tokens2)
     optimizer = Optimizer()
     optimizer.optimize(ast1)
     self.assertEqual(ast1, ast2)
Пример #2
0
def main():
    with open('example.gg', 'r') as f:
        text_input = f.read()

    lexer = Lexer().get_lexer()
    tokens = lexer.lex(text_input)

    cg = CodeGen()
    pg = Parser(cg)
    pg.parse()
    parser = pg.get_parser()
    parser.parse(tokens, state=ParserState()).generate()

    cg.create_ir()
    cg.save_ir('output/output.ll')
    print(cg.run(False))
Пример #3
0
 def interpret_calculation(self, calculation, expected=0):
     tokenizer = Tokenizer()
     tokens = tokenizer.tokenize('var i = ' + calculation)
     parser = Parser()
     ast =  parser.parse(tokens)
     interpreter = Interpreter()
     interpreter.interpret(ast)
     self.assertEqual(interpreter.vars['i'], expected)
Пример #4
0
def compile_il():
	file = open("test.il", "r")
	file_contents = file.read()
	file.close()
	
	tokens = lexer.lex(file_contents)
	
	parser = Parser(tokens)
	ast = parser.parse()
	funcs = parser.get_func_list()
	#print_ast(ast)
	
	cg = CodeGen(ast, funcs)
	bytes = cg.generate()
	
	output = open("test.ix", "bw")
	output.write(bytes)
	output.close()
Пример #5
0
    def compile(self, source):
        scanner = Scanner(source)

        parser = Parser(scanner)
        program = parser.parse()

        tree_to_json = TreeToJson()
        obj = program.accept(tree_to_json)
        print(obj)

        logger.ACTIVE = True
        logger.DEBUG = False

        # scope_analyzer = ScopeAnalyzer()

        program_copy = copy.deepcopy(program)

        # program_copy.accept(scope_analyzer)

        linear_generator = LinearGenerator()
        code = program.accept(linear_generator)

        return code
Пример #6
0
debug_enabled = False
files = sys.argv[1:]

if sys.argv[1] == '--debug':
    debug_enabled = True
    files = files[1:]

generated = ""
for filename in files:
    tokenizer = Tokenizer(open(filename).read())
    tokens = tokenizer.tokenize()
    if debug_enabled:
        print('>>> parsed tokens:\n%s\n' % list(map(lambda x: x.value, tokens)))

    parser = Parser(tokens)
    tree = parser.parse()
    if debug_enabled:
        print('>>> parse tree:\n%s\n' % tree)

    generator = Generator(tree)
    generated = generated + '\n' + generator.generate(tree)

if debug_enabled:
    print('>>> generated code:\n%s' % generated)
    exit()

RUNTIME = """
function add(x, y) { return x + y; }
function subtract(x, y) { return x - y; }
function multiply(x, y) { return x * y; }
function divide(x, y) { return x / y; }
Пример #7
0
tokenizer = Tokenizer()
tokens = tokenizer.tokenize(code)

"""
for line in tokens:
    for token in line:
        sys.stdout.write(TOKENS[token[0]])
        if len(token) == 2:
            sys.stdout.write('(%s)' % token[1])
        sys.stdout.write(' ')
    print
"""

parser = Parser()
ast = parser.parse(tokens)

"""
for a in ast:
    print a
"""
"""
interpreter = Interpreter()
interpreter.interpret(ast)
"""

optimizer = Optimizer()
optimizer.optimize(ast)


code_generator = CodeGenerator()