def test_one(self):
        l = Lexer('tests/sample.ps')
        l.lex()
        self.assertEqual([(t.name, t.value) for t in l.tokens], [('KEYWORD', 'program'), ('IDENTIFIER', 'hellowld'), ('SEMICOLON', ';'), ('KEYWORD', 'begin'), ('IDENTIFIER', 'writeln'), ('LP', '('), ('BASE10_NUM', '2'), ('RP', ')'), ('SEMICOLON', ';'), ('IDENTIFIER', 'readln'), ('SEMICOLON', ';'), ('KEYWORD', 'end')])

        p = Parser(l.tokens)
        try:
            p.parse()
        except SyntaxError:
            self.fail()
    def test_two(self):
        l = Lexer('tests/sample2.ps')
        l.lex()
        self.assertEqual([(t.name, t.value) for t in l.tokens], [('KEYWORD', 'program'), ('IDENTIFIER', 'exFunction'), ('SEMICOLON', ';'), ('KEYWORD', 'var'), ('IDENTIFIER', 'a'), ('COMMA', ','), ('IDENTIFIER', 'b'), ('COMMA', ','), ('IDENTIFIER', 'ret'), ('COLON', ':'), ('KEYWORD', 'integer'), ('SEMICOLON', ';'), ('KEYWORD', 'function'), ('IDENTIFIER', 'max'), ('LP', '('), ('IDENTIFIER', 'num1'), ('COMMA', ','), ('IDENTIFIER', 'num2'), ('COLON', ':'), ('KEYWORD', 'integer'), ('RP', ')'), ('COLON', ':'), ('KEYWORD', 'integer'), ('SEMICOLON', ';'), ('KEYWORD', 'var'), ('IDENTIFIER', 'result'), ('COLON', ':'), ('KEYWORD', 'integer'), ('SEMICOLON', ';'), ('KEYWORD', 'begin'), ('KEYWORD', 'if'), ('LP', '('), ('IDENTIFIER', 'num1'), ('GT', '>'), ('IDENTIFIER', 'num2'), ('RP', ')'), ('KEYWORD', 'then'), ('IDENTIFIER', 'result'), ('ATTRIB', ':='), ('IDENTIFIER', 'num1'), ('KEYWORD', 'else'), ('IDENTIFIER', 'result'), ('ATTRIB', ':='), ('IDENTIFIER', 'num2'), ('SEMICOLON', ';'), ('IDENTIFIER', 'max'), ('ATTRIB', ':='), ('IDENTIFIER', 'result'), ('SEMICOLON', ';'), ('KEYWORD', 'end'), ('SEMICOLON', ';'), ('KEYWORD', 'begin'), ('IDENTIFIER', 'a'), ('ATTRIB', ':='), ('BASE10_NUM', '100'), ('SEMICOLON', ';'), ('IDENTIFIER', 'b'), ('ATTRIB', ':='), ('BASE10_NUM', '200'), ('SEMICOLON', ';'), ('IDENTIFIER', 'ret'), ('ATTRIB', ':='), ('IDENTIFIER', 'max'), ('LP', '('), ('IDENTIFIER', 'a'), ('COMMA', ','), ('IDENTIFIER', 'b'), ('RP', ')'), ('SEMICOLON', ';'), ('IDENTIFIER', 'writeln'), ('LP', '('), ('IDENTIFIER', 'ret'), ('RP', ')'), ('SEMICOLON', ';'), ('KEYWORD', 'end')])

        p = Parser(l.tokens)
        try:
            p.parse()
        except SyntaxError:
            self.fail()
def main() -> None:
	"""Main interpreter routine."""
	expression = cli.init()
	tokens = get(expression)
	lexer = Lexer()
	lexer.lex = next(tokens)  # Store the first token without passing generator into __init__
	valid = lexer.B(tokens)
	if valid:
		print("Result:", lexer.stack.pop())
	else:
		print("Invalid expression.")
	def test_results(self) -> None:
		"""For every key in cases, assert that the result of
		the expression `key` is equal to cases[key][0]
		and the Lexer instance returns cases[key][1].


		Notes
		-----
		If the entire expression is valid, but it does not end with a period
		an ExpressionError is raised. Else, the interpreter will fail fast.
		"""
		for case, result in zip(self.cases.keys(), self.cases.values()):
			token = get(case)
			lexer = Lexer()
			lexer.lex = next(token)
			
			try:
				valid = lexer.B(token)
			except ExpressionError as e:
				pass
			
			if valid:
				self.assertEqual(lexer.stack.pop(), result[0])
			self.assertEqual(valid, result[1])
Exemple #5
0
from lexer.lexer import Lexer
from parser.parser import Parser


text_input = """
    inteiro:x;
    inteiro:y;

    leia(x);

    leia(y);
    para x ate y passo 1 imprima(x); fim_para
"""

lexer = Lexer().get_lexer()

pg = Parser()
pg.parse()
parser = pg.get_parser()

for line in list(filter(None, text_input.split('\n'))):
    tokens = lexer.lex(line)
    parser.parse(tokens).eval()
Exemple #6
0
from lexer.lexer import Lexer
from ula_parser.parser import Parser
from code_gen.codegen import CodeGen

fname = "input.ula"
with open(fname) as f:
    text_input = f.read()

lexer = Lexer().get_lexer()
tokens = lexer.lex(text_input)

codegen = CodeGen()

module = codegen.module
builder = codegen.builder
printf = codegen.printf

pg = Parser(module, builder, printf)
pg.parse()
parser = pg.get_parser()

# print(list(tokens))

parser.parse(tokens).eval()

codegen.create_ir()
codegen.save_ir("output.ll")