Exemple #1
0
 def test_lexer(self):
     for path in glob.glob("test/grader/*/src.pas"):
         with open(path, 'r') as source:
             try:
                 text = source.read()
                 lexer = Lexer(text)
                 lexer.lex()
             except:
                 ex = sys.exc_info()[0]
                 self.fail("Failed to lex " + path + "\n" + ex)
     self.assertTrue(True)
Exemple #2
0
def main():
    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument('read_path')
    arg_parser.add_argument('write_path')
    args = arg_parser.parse_args()

    with open(args.read_path, 'r') as source:
        text = source.read()

        lexer = Lexer(text)
        tokens = lexer.lex()

        parser = Parser(tokens)
        ast = parser.parse()

        symbolizer = Symbolizer(ast)
        symbolizer.symbolize()

        optimizer = Optimizer(ast)
        optimizer.optimize()

        grapher = Grapher(ast)
        grapher.graph()

        generator = Generator(ast)
        generator.generate(args.write_path)

        runner = Runner(ast)
        runner.run()
Exemple #3
0
def test_test():
    source = """a jest równe 2.
                Zwiększ a o 5.
                Wypisz na ekranie a.
                b jest równe 4+a-(2-a).
                Zmniejsz b o b+1.
                c jest równe "jestem sobie zmienna c".
                Wypisz na ekranie b+b.
                Wypisz na ekranie "ELO :P".
                Wypisz na ekranie "Wpisałem w puste miejsca  _, _, _!", w puste miejsce wpisz "siema",1,(2-5).
                Wypisz na ekranie "Wpisałem w puste miejsce _, _!", w puste miejsce wpisz "elo", "siemano".
                Wypisz na ekranie "zmienna a = _, zmienna b = _, zmienna c = _!", w puste miejsce wpisz a,b,c.
                Wypisz na ekranie b jest wieksze od b.
                Wypisz na ekranie b jest mniejsze od b.
                Wypisz na ekranie b równa się b.
                Wypisz na ekranie b jest różne od b.
                Jeżeli b jest mniejsze od b to wypisz na ekranie "b<b". Tyle.
                Jeżeli a jest mniejsze od b to wypisz na ekranie "a<b". Tyle.
                Jeżeli b jest mniejsze od a to wypisz na ekranie "b<a". Tyle.
                """

    lexer = Lexer().get_lexer()
    tokens = lexer.lex(source)

    pg = Parser()
    pg.parse()
    parser = pg.get_parser()
    context = {}
    parser.parse(tokens).eval(context)
Exemple #4
0
 def test_functions(self):
     lexer = Lexer(
         'fun f(x) {ret x}\nfun f2(x, y,z) {\nx - y\n}\nf() + 3 - g(x, y)(3)'
     )
     expected = \
         [MyToken(TokenType.FUN, 'fun', None, 1), MyToken(TokenType.IDENT, 'f', 'f', 1),
          MyToken(TokenType.LPAREN, '(', None, 1), MyToken(TokenType.IDENT, 'x', 'x', 1),
          MyToken(TokenType.RPAREN, ')', None, 1), MyToken(TokenType.LBRACE, '{', None, 1),
          MyToken(TokenType.RET, 'ret', None, 1), MyToken(TokenType.IDENT, 'x', 'x', 1),
          MyToken(TokenType.RBRACE, '}', None, 1), MyToken(TokenType.EOL, None, None, 1),
          MyToken(TokenType.FUN, 'fun', None, 2), MyToken(TokenType.IDENT, 'f2', 'f2', 2),
          MyToken(TokenType.LPAREN, '(', None, 2), MyToken(TokenType.IDENT, 'x', 'x', 2),
          MyToken(TokenType.COMMA, ',', None, 2), MyToken(TokenType.IDENT, 'y', 'y', 2),
          MyToken(TokenType.COMMA, ',', None, 2), MyToken(TokenType.IDENT, 'z', 'z', 2),
          MyToken(TokenType.RPAREN, ')', None, 2), MyToken(TokenType.LBRACE, '{', None, 2),
          MyToken(TokenType.EOL, None, None, 2),
          MyToken(TokenType.IDENT, 'x', 'x', 3), MyToken(TokenType.MINUS, '-', None, 3),
          MyToken(TokenType.IDENT, 'y', 'y', 3), MyToken(TokenType.EOL, None, None, 3),
          MyToken(TokenType.RBRACE, '}', None, 4), MyToken(TokenType.EOL, None, None, 4),
          MyToken(TokenType.IDENT, 'f', 'f', 5), MyToken(TokenType.LPAREN, '(', None, 5),
          MyToken(TokenType.RPAREN, ')', None, 5), MyToken(TokenType.PLUS, '+', None, 5),
          MyToken(TokenType.NUMBER, '3', 3, 5), MyToken(TokenType.MINUS, '-', None, 5),
          MyToken(TokenType.IDENT, 'g', 'g', 5), MyToken(TokenType.LPAREN, '(', None, 5),
          MyToken(TokenType.IDENT, 'x', 'x', 5), MyToken(TokenType.COMMA, ',', None, 5),
          MyToken(TokenType.IDENT, 'y', 'y', 5), MyToken(TokenType.RPAREN, ')', None, 5),
          MyToken(TokenType.LPAREN, '(', None, 5), MyToken(TokenType.NUMBER, '3', 3, 5),
          MyToken(TokenType.RPAREN, ')', None, 5)]
     self.assertEqual(expected, lexer.lex())
Exemple #5
0
    def test_generator(self):
        for path in glob.glob("test/grader/*/src.pas"):
            dir = os.path.dirname(path)
            should_fail = not dir.endswith('16')
            with open(path, 'r') as source:
                print(f"testing {path}")
                text = source.read()
                lexer = Lexer(text)
                tokens = lexer.lex()
                parser = Parser(tokens)
                ast = parser.parse()
                symbolizer = Symbolizer(ast)
                symbolizer.symbolize()
                grapher = Generator(ast, symbolizer)
                grapher.generate()
                sol = os.path.join(dir, 'src.c')
                out = os.path.join(dir, 'out')
                if os.path.exists(sol):
                    os.remove(sol)
                if os.path.exists(out):
                    os.remove(out)
                grapher.write(sol)
                p = None
                try:
                    p = sp.Popen(['gcc', sol, '-o', out], stdout=sp.PIPE)
                    retCode = p.wait()
                    self.assertTrue(retCode == 0)
                    p.stdout.close()
                    #s = str(p.stdout.read())
                    #self.assertTrue(s == '')
                except Exception:
                    self.assertFalse(should_fail)
                for i in range(1, 5):
                    inFile = os.path.join(dir, str(i) + '.in')
                    outFile = os.path.join(dir, str(i) + '.out')
                    with open(inFile, 'r') as inText:
                        with open(outFile, 'r') as outText:
                            inText = inText.read()
                            outText = outText.read()
                            try:
                                of = sp.Popen([out],
                                              stdin=sp.PIPE,
                                              stdout=sp.PIPE)
                                of.stdin.write(inText.encode('utf-8'))
                                of.stdin.close()
                                rc = of.wait()
                                self.assertTrue(rc == 0)
                                b = of.stdout.read()
                                s = b.decode('utf-8')
                                of.stdout.close()
                                if (not should_fail):
                                    self.assertEqual(s, str(outText))
                            except Exception:
                                self.assertFalse(should_fail)

        self.assertTrue(True)


#Tests().test_grapher()
Exemple #6
0
 def test_keyword(self):
     lexer = Lexer('if x {}\nelse elses i ifs} while{ print printx _print')
     expected = \
     [MyToken(TokenType.IF, 'if', None, 1), MyToken(TokenType.IDENT, 'x', 'x', 1), MyToken(TokenType.LBRACE, '{', None, 1),
      MyToken(TokenType.RBRACE, '}', None, 1), MyToken(TokenType.EOL, None, None, 1), MyToken(TokenType.ELSE, 'else', None, 2),
      MyToken(TokenType.IDENT, 'elses', 'elses', 2), MyToken(TokenType.IDENT, 'i', 'i', 2), MyToken(TokenType.IDENT, 'ifs', 'ifs', 2),
      MyToken(TokenType.RBRACE, '}', None, 2), MyToken(TokenType.WHILE, 'while', None, 2), MyToken(TokenType.LBRACE, '{', None, 2),
      MyToken(TokenType.IDENT, 'print', 'print', 2), MyToken(TokenType.IDENT, 'printx', 'printx', 2), MyToken(TokenType.IDENT, '_print', '_print', 2)]
     self.assertEqual(expected, lexer.lex())
Exemple #7
0
def test_empty():
    source = ""

    lexer = Lexer().get_lexer()
    tokens = lexer.lex(source)

    pg = Parser()
    pg.parse()
    parser = pg.get_parser()
    context = {}
    parser.parse(tokens).eval(context)
Exemple #8
0
 def test_floats(self):
     lexer = Lexer('4 + 12.3 - 3. .1')
     expected = [
         MyToken(TokenType.NUMBER, '4', 4, 1),
         MyToken(TokenType.PLUS, '+', None, 1),
         MyToken(TokenType.NUMBER, '12.3', 12.3, 1),
         MyToken(TokenType.MINUS, '-', None, 1),
         MyToken(TokenType.NUMBER, '3.', 3., 1),
         MyToken(TokenType.NUMBER, '.1', .1, 1)
     ]
     self.assertEqual(expected, lexer.lex())
Exemple #9
0
 def test_strings(self):
     lexer = Lexer('\'try\' try \'if}\'#\'  asd\' \'\'')
     expected = [
         MyToken(TokenType.STRING, 'try', 'try', 1),
         MyToken(TokenType.IDENT, 'try', 'try', 1),
         MyToken(TokenType.STRING, 'if}', 'if}', 1),
         MyToken(TokenType.HASH, '#', None, 1),
         MyToken(TokenType.STRING, '  asd', '  asd', 1),
         MyToken(TokenType.STRING, '', '', 1)
     ]
     self.assertEqual(expected, lexer.lex())
Exemple #10
0
 def test_assignment(self):
     lexer = Lexer('x:= 7\nx = 7')
     expected = [
         MyToken(TokenType.IDENT, 'x', 'x', 1),
         MyToken(TokenType.ASSIGN, ':=', None, 1),
         MyToken(TokenType.NUMBER, '7', 7, 1),
         MyToken(TokenType.EOL, None, None, 1),
         MyToken(TokenType.IDENT, 'x', 'x', 2),
         MyToken(TokenType.EQUAL, '=', None, 2),
         MyToken(TokenType.NUMBER, '7', 7, 2)
     ]
     self.assertEqual(expected, lexer.lex())
Exemple #11
0
 def test_booleans(self):
     lexer = Lexer(
         'true false trues = true 7 < 12 <= > y>=x or xor and not_ < not true'
     )
     expected = \
     [MyToken(TokenType.TRUE, 'true', True, 1), MyToken(TokenType.FALSE, 'false', False, 1), MyToken(TokenType.IDENT, 'trues', 'trues', 1),
      MyToken(TokenType.EQUAL, '=', None, 1), MyToken(TokenType.TRUE, 'true', True, 1), MyToken(TokenType.NUMBER, '7', 7, 1),
      MyToken(TokenType.L, '<', None, 1), MyToken(TokenType.NUMBER, '12', 12, 1), MyToken(TokenType.LE, '<=', None, 1),
      MyToken(TokenType.G, '>', None, 1), MyToken(TokenType.IDENT, 'y', 'y', 1), MyToken(TokenType.GE, '>=', None, 1), MyToken(TokenType.IDENT, 'x', 'x', 1),
      MyToken(TokenType.OR, 'or', None, 1), MyToken(TokenType.IDENT, 'xor', 'xor', 1), MyToken(TokenType.AND, 'and', None, 1),
      MyToken(TokenType.IDENT, 'not_', 'not_', 1), MyToken(TokenType.L, '<', None, 1), MyToken(TokenType.NOT, 'not', None, 1),
      MyToken(TokenType.TRUE, 'true', True, 1)]
     self.assertEqual(expected, lexer.lex())
Exemple #12
0
    def test_symbolizer(self):
        for path in glob.glob("test/grader/*/src.pas"):
            with open(path, 'r') as source:
                print(f"testing {path}")
                text = source.read()
                lexer = Lexer(text)
                tokens = lexer.lex()
                parser = Parser(tokens)
                ast = parser.parse()
                symbolizer = Symbolizer(ast)
                symbolizer.symbolize()

        self.assertTrue(True)
Exemple #13
0
def test_if_else():
    source = """
                Jeżeli 2 jest wieksze od 3 to wypisz na ekranie "jestem w ifie".
                W przeciwnym razie wypisz na ekranie "jestem w  elsie".
                Tyle.
                Wypisz na ekranie "ifelse działa".
                """

    lexer = Lexer().get_lexer()
    tokens = lexer.lex(source)

    pg = Parser()
    pg.parse()
    parser = pg.get_parser()
    context = {}
    parser.parse(tokens).eval(context)
Exemple #14
0
 def test_identifiers(self):
     lexer = Lexer('x + 7 - test123( 18 x x18 /_try-')
     expected = [
         MyToken(TokenType.IDENT, 'x', 'x', 1),
         MyToken(TokenType.PLUS, '+', None, 1),
         MyToken(TokenType.NUMBER, '7', 7, 1),
         MyToken(TokenType.MINUS, '-', None, 1),
         MyToken(TokenType.IDENT, 'test123', 'test123', 1),
         MyToken(TokenType.LPAREN, '(', None, 1),
         MyToken(TokenType.NUMBER, '18', 18, 1),
         MyToken(TokenType.IDENT, 'x', 'x', 1),
         MyToken(TokenType.IDENT, 'x18', 'x18', 1),
         MyToken(TokenType.DIV, '/', None, 1),
         MyToken(TokenType.IDENT, '_try', '_try', 1),
         MyToken(TokenType.MINUS, '-', None, 1)
     ]
     self.assertEqual(expected, lexer.lex())
Exemple #15
0
def test_if():
    source = """
                b jest równe 5.
                Wypisz na ekranie b.
                Jeżeli b równa się b to wypisz na ekranie "jestem w body" 
                oraz wypisz na ekranie "dalej w body"
                oraz wypisz na ekranie "chyba bangla". 
                Tyle.
                Wypisz na ekranie "już poza body".
                """

    lexer = Lexer().get_lexer()
    tokens = lexer.lex(source)

    pg = Parser()
    pg.parse()
    parser = pg.get_parser()
    context = {}
    parser.parse(tokens).eval(context)
Exemple #16
0
def test_advanced_if():
    source = """
                Jeżeli 5 jest wieksze od 3 to 
                    jeżeli 5 jest wieksze od 4 to 
                    wypisz na ekranie "jestem w zagnieżdżonym ifie" 
                    oraz wypisz na ekranie "5 jest większe od 3 i 4". 
                    Tyle. 
                Tyle.
                Wypisz na ekranie "zagnieżdzony if działa".
                """

    lexer = Lexer().get_lexer()
    tokens = lexer.lex(source)

    pg = Parser()
    pg.parse()
    parser = pg.get_parser()
    context = {}
    parser.parse(tokens).eval(context)
Exemple #17
0
 def test_arithmetic(self):
     lexer = Lexer('5 *  341 - 4 / 81*(532 + -7)   51  \n  ^   423 12')
     expected = [
         MyToken(TokenType.NUMBER, '5', 5, 1),
         MyToken(TokenType.MUL, '*', None, 1),
         MyToken(TokenType.NUMBER, '341', 341, 1),
         MyToken(TokenType.MINUS, '-', None, 1),
         MyToken(TokenType.NUMBER, '4', 4, 1),
         MyToken(TokenType.DIV, '/', None, 1),
         MyToken(TokenType.NUMBER, '81', 81, 1),
         MyToken(TokenType.MUL, '*', None, 1),
         MyToken(TokenType.LPAREN, '(', None, 1),
         MyToken(TokenType.NUMBER, '532', 532, 1),
         MyToken(TokenType.PLUS, '+', None, 1),
         MyToken(TokenType.MINUS, '-', None, 1),
         MyToken(TokenType.NUMBER, '7', 7, 1),
         MyToken(TokenType.RPAREN, ')', None, 1),
         MyToken(TokenType.NUMBER, '51', 51, 1),
         MyToken(TokenType.EOL, None, None, 1),
         MyToken(TokenType.POW, '^', None, 2),
         MyToken(TokenType.NUMBER, '423', 423, 2),
         MyToken(TokenType.NUMBER, '12', 12, 2)
     ]
     self.assertEqual(expected, lexer.lex())
Exemple #18
0
from src.lexer import Lexer
from src.parser import Parser


class ParserState(object):
    def __init__(self, filename):
        self.filename = filename


text_input = ""
with open("input.txt", "r") as f:
    line = f.readline()
    while line:
        text_input += line
        line = f.readline()

lexer = Lexer().get_lexer()
tokens = lexer.lex(text_input)

pg = Parser()
pg.parse()
parser = pg.get_parser()
context = {}
parser.parse(tokens).eval(context)

#print("\nContext: ",context)
from src.lexer import Lexer
from src.parser import Parser
from src.executor import Executor

with open("input") as f:
    lexer = Lexer(f.read())
    tokens = lexer.lex()
# for t in tokens: print("value: {}, token: {}".format(t.text, t.type))

parser = Parser(tokens)
ast = parser.statement()
Executor().eval_statement(ast, {})
Exemple #20
0
from src.lexer import Lexer
from src.parser import Parser
import sys

lexer = Lexer().build()
parser = Parser().build()

with open(sys.argv[1], "r", encoding="utf8") as f:
    a = f.read()
    parser.parse(lexer.lex(a)).eval()