예제 #1
0
 def test_set_cardinality(self):
   lexer = Lexer('#{1}')
   self.assertEqual(lexer.get_next_token(), Token(Type.CARD, '#'))
   self.assertEqual(lexer.get_next_token(), Token(Type.OPENC, '{'))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 1))
   self.assertEqual(lexer.get_next_token(), Token(Type.CLOSEC, '}'))
   self.assertEqual(lexer.get_next_token(), Token(Type.EOF, ''))
예제 #2
0
 def test_functions(self):
     lexer = Lexer(
         'fun f(x) {ret x}\nfun f2(x, y,z) {\nx - y\n}\nf() + 3 - g(x, y)(3)'
     )
     expected = \
         [MyToken(TokenType.FUN, 'fun', None, 1), MyToken(TokenType.IDENT, 'f', 'f', 1),
          MyToken(TokenType.LPAREN, '(', None, 1), MyToken(TokenType.IDENT, 'x', 'x', 1),
          MyToken(TokenType.RPAREN, ')', None, 1), MyToken(TokenType.LBRACE, '{', None, 1),
          MyToken(TokenType.RET, 'ret', None, 1), MyToken(TokenType.IDENT, 'x', 'x', 1),
          MyToken(TokenType.RBRACE, '}', None, 1), MyToken(TokenType.EOL, None, None, 1),
          MyToken(TokenType.FUN, 'fun', None, 2), MyToken(TokenType.IDENT, 'f2', 'f2', 2),
          MyToken(TokenType.LPAREN, '(', None, 2), MyToken(TokenType.IDENT, 'x', 'x', 2),
          MyToken(TokenType.COMMA, ',', None, 2), MyToken(TokenType.IDENT, 'y', 'y', 2),
          MyToken(TokenType.COMMA, ',', None, 2), MyToken(TokenType.IDENT, 'z', 'z', 2),
          MyToken(TokenType.RPAREN, ')', None, 2), MyToken(TokenType.LBRACE, '{', None, 2),
          MyToken(TokenType.EOL, None, None, 2),
          MyToken(TokenType.IDENT, 'x', 'x', 3), MyToken(TokenType.MINUS, '-', None, 3),
          MyToken(TokenType.IDENT, 'y', 'y', 3), MyToken(TokenType.EOL, None, None, 3),
          MyToken(TokenType.RBRACE, '}', None, 4), MyToken(TokenType.EOL, None, None, 4),
          MyToken(TokenType.IDENT, 'f', 'f', 5), MyToken(TokenType.LPAREN, '(', None, 5),
          MyToken(TokenType.RPAREN, ')', None, 5), MyToken(TokenType.PLUS, '+', None, 5),
          MyToken(TokenType.NUMBER, '3', 3, 5), MyToken(TokenType.MINUS, '-', None, 5),
          MyToken(TokenType.IDENT, 'g', 'g', 5), MyToken(TokenType.LPAREN, '(', None, 5),
          MyToken(TokenType.IDENT, 'x', 'x', 5), MyToken(TokenType.COMMA, ',', None, 5),
          MyToken(TokenType.IDENT, 'y', 'y', 5), MyToken(TokenType.RPAREN, ')', None, 5),
          MyToken(TokenType.LPAREN, '(', None, 5), MyToken(TokenType.NUMBER, '3', 3, 5),
          MyToken(TokenType.RPAREN, ')', None, 5)]
     self.assertEqual(expected, lexer.lex())
예제 #3
0
 def test_vector_transposition(self):
   lexer = Lexer('(1)**T')
   self.assertEqual(lexer.get_next_token(), Token(Type.OPENP, '('))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 1))
   self.assertEqual(lexer.get_next_token(), Token(Type.CLOSEP, ')'))
   self.assertEqual(lexer.get_next_token(), Token(Type.TPOSE, '**T'))
   self.assertEqual(lexer.get_next_token(), Token(Type.EOF, ''))
예제 #4
0
 def test_real_must_have_fraction_again(self):
   lexer = Lexer('1.0 + 1.')
   self.assertEqual(lexer.get_next_token(), Token(Type.REAL, 1.0))
   self.assertEqual(lexer.get_next_token(), Token(Type.UNION, '+'))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 1))
   self.assertEqual(lexer.get_next_token(), Token(Type.PERIOD, '.'))
   self.assertEqual(lexer.get_next_token(), Token(Type.EOF, ''))
예제 #5
0
 def test_real_no_leading_zeros(self):
   lexer = Lexer('0.00 + 00.0')
   self.assertEqual(lexer.get_next_token(), Token(Type.REAL, 0.00))
   self.assertEqual(lexer.get_next_token(), Token(Type.UNION, '+'))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 0))
   self.assertEqual(lexer.get_next_token(), Token(Type.REAL, 0.0))
   self.assertEqual(lexer.get_next_token(), Token(Type.EOF, ''))
예제 #6
0
 def test_integer_no_leading_zeros(self):
   lexer = Lexer('0 + 00')
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 0))
   self.assertEqual(lexer.get_next_token(), Token(Type.UNION, '+'))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 0))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 0))
   self.assertEqual(lexer.get_next_token(), Token(Type.EOF, ''))
예제 #7
0
def main():
    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument('read_path')
    arg_parser.add_argument('write_path')
    args = arg_parser.parse_args()

    with open(args.read_path, 'r') as source:
        text = source.read()

        lexer = Lexer(text)
        tokens = lexer.lex()

        parser = Parser(tokens)
        ast = parser.parse()

        symbolizer = Symbolizer(ast)
        symbolizer.symbolize()

        optimizer = Optimizer(ast)
        optimizer.optimize()

        grapher = Grapher(ast)
        grapher.graph()

        generator = Generator(ast)
        generator.generate(args.write_path)

        runner = Runner(ast)
        runner.run()
예제 #8
0
파일: test_file.py 프로젝트: supsub/LOUDXD
def test_test():
    source = """a jest równe 2.
                Zwiększ a o 5.
                Wypisz na ekranie a.
                b jest równe 4+a-(2-a).
                Zmniejsz b o b+1.
                c jest równe "jestem sobie zmienna c".
                Wypisz na ekranie b+b.
                Wypisz na ekranie "ELO :P".
                Wypisz na ekranie "Wpisałem w puste miejsca  _, _, _!", w puste miejsce wpisz "siema",1,(2-5).
                Wypisz na ekranie "Wpisałem w puste miejsce _, _!", w puste miejsce wpisz "elo", "siemano".
                Wypisz na ekranie "zmienna a = _, zmienna b = _, zmienna c = _!", w puste miejsce wpisz a,b,c.
                Wypisz na ekranie b jest wieksze od b.
                Wypisz na ekranie b jest mniejsze od b.
                Wypisz na ekranie b równa się b.
                Wypisz na ekranie b jest różne od b.
                Jeżeli b jest mniejsze od b to wypisz na ekranie "b<b". Tyle.
                Jeżeli a jest mniejsze od b to wypisz na ekranie "a<b". Tyle.
                Jeżeli b jest mniejsze od a to wypisz na ekranie "b<a". Tyle.
                """

    lexer = Lexer().get_lexer()
    tokens = lexer.lex(source)

    pg = Parser()
    pg.parse()
    parser = pg.get_parser()
    context = {}
    parser.parse(tokens).eval(context)
예제 #9
0
    def test_generator(self):
        for path in glob.glob("test/grader/*/src.pas"):
            dir = os.path.dirname(path)
            should_fail = not dir.endswith('16')
            with open(path, 'r') as source:
                print(f"testing {path}")
                text = source.read()
                lexer = Lexer(text)
                tokens = lexer.lex()
                parser = Parser(tokens)
                ast = parser.parse()
                symbolizer = Symbolizer(ast)
                symbolizer.symbolize()
                grapher = Generator(ast, symbolizer)
                grapher.generate()
                sol = os.path.join(dir, 'src.c')
                out = os.path.join(dir, 'out')
                if os.path.exists(sol):
                    os.remove(sol)
                if os.path.exists(out):
                    os.remove(out)
                grapher.write(sol)
                p = None
                try:
                    p = sp.Popen(['gcc', sol, '-o', out], stdout=sp.PIPE)
                    retCode = p.wait()
                    self.assertTrue(retCode == 0)
                    p.stdout.close()
                    #s = str(p.stdout.read())
                    #self.assertTrue(s == '')
                except Exception:
                    self.assertFalse(should_fail)
                for i in range(1, 5):
                    inFile = os.path.join(dir, str(i) + '.in')
                    outFile = os.path.join(dir, str(i) + '.out')
                    with open(inFile, 'r') as inText:
                        with open(outFile, 'r') as outText:
                            inText = inText.read()
                            outText = outText.read()
                            try:
                                of = sp.Popen([out],
                                              stdin=sp.PIPE,
                                              stdout=sp.PIPE)
                                of.stdin.write(inText.encode('utf-8'))
                                of.stdin.close()
                                rc = of.wait()
                                self.assertTrue(rc == 0)
                                b = of.stdout.read()
                                s = b.decode('utf-8')
                                of.stdout.close()
                                if (not should_fail):
                                    self.assertEqual(s, str(outText))
                            except Exception:
                                self.assertFalse(should_fail)

        self.assertTrue(True)


#Tests().test_grapher()
예제 #10
0
    def __init__(self, path):
        archive = open(path, 'r').read()

        lex = Lexer(archive)
        tokens = lex.tokenize()
        parser = Parser(tokens)

        parser.parse()
예제 #11
0
 def Test_number(self):
     l = Lexer()
     caso1 = False
     for token in l.tokenize("10"):
         if token.type == "NUMBER":
             caso1 = True
     self.assertAlmostEqual(caso1, True)
     self.assertAlmostEqual(False, True)
예제 #12
0
 def test_vector_norm(self):
   lexer = Lexer('|| (1) ||')
   self.assertEqual(lexer.get_next_token(), Token(Type.NORM, '||'))
   self.assertEqual(lexer.get_next_token(), Token(Type.OPENP, '('))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 1))
   self.assertEqual(lexer.get_next_token(), Token(Type.CLOSEP, ')'))
   self.assertEqual(lexer.get_next_token(), Token(Type.NORM, '||'))
   self.assertEqual(lexer.get_next_token(), Token(Type.EOF, ''))
예제 #13
0
 def test_reserved_values(self):
   lexer = Lexer('e false null pi true')
   self.assertEqual(lexer.get_next_token(), Token(Type.REAL, math.e))
   self.assertEqual(lexer.get_next_token(), Token(Type.BOOL, False))
   self.assertEqual(lexer.get_next_token(), Token(Type.NULL, None))
   self.assertEqual(lexer.get_next_token(), Token(Type.REAL, math.pi))
   self.assertEqual(lexer.get_next_token(), Token(Type.BOOL, True))
   self.assertEqual(lexer.get_next_token(), Token(Type.EOF, ''))
예제 #14
0
파일: test.py 프로젝트: jekozyra/lexer
    def test_should_properly_tokenize_a_full_method_definition(self):
        lexer = Lexer('func add(a: Int, b: Int): Int = {\n' +
            '   a + b\n' +
            '}')

        tokens = lexer.tokenize();

        self.assertEqual(21, len(tokens))

        self.assertEqual(tokens[0].type, TokenType.FUNC)

        self.assertEqual(tokens[1].type, TokenType.IDENTIFIER)
        self.assertEqual(tokens[1].value, 'add')

        self.assertEqual(tokens[2].type, TokenType.LEFT_PAREN)

        self.assertEqual(tokens[3].type, TokenType.IDENTIFIER)
        self.assertEqual(tokens[3].value, 'a')

        self.assertEqual(tokens[4].type, TokenType.COLON)

        self.assertEqual(tokens[5].type, TokenType.IDENTIFIER)
        self.assertEqual(tokens[5].value, 'Int')

        self.assertEqual(tokens[6].type, TokenType.COMMA)

        self.assertEqual(tokens[7].type, TokenType.IDENTIFIER)
        self.assertEqual(tokens[7].value, 'b')

        self.assertEqual(tokens[8].type, TokenType.COLON)

        self.assertEqual(tokens[9].type, TokenType.IDENTIFIER)
        self.assertEqual(tokens[9].value, 'Int')

        self.assertEqual(tokens[10].type, TokenType.RIGHT_PAREN)

        self.assertEqual(tokens[11].type, TokenType.COLON)

        self.assertEqual(tokens[12].type, TokenType.IDENTIFIER)
        self.assertEqual(tokens[12].value, 'Int')

        self.assertEqual(tokens[13].type, TokenType.EQUAL)

        self.assertEqual(tokens[14].type, TokenType.LEFT_BRACE)

        self.assertEqual(tokens[15].type, TokenType.NEWLINE)

        self.assertEqual(tokens[16].type, TokenType.IDENTIFIER)
        self.assertEqual(tokens[16].value, 'a')

        self.assertEqual(tokens[17].type, TokenType.PLUS)

        self.assertEqual(tokens[18].type, TokenType.IDENTIFIER)
        self.assertEqual(tokens[18].value, 'b')

        self.assertEqual(tokens[19].type, TokenType.NEWLINE)

        self.assertEqual(tokens[20].type, TokenType.RIGHT_BRACE)
예제 #15
0
 def test_keyword(self):
     lexer = Lexer('if x {}\nelse elses i ifs} while{ print printx _print')
     expected = \
     [MyToken(TokenType.IF, 'if', None, 1), MyToken(TokenType.IDENT, 'x', 'x', 1), MyToken(TokenType.LBRACE, '{', None, 1),
      MyToken(TokenType.RBRACE, '}', None, 1), MyToken(TokenType.EOL, None, None, 1), MyToken(TokenType.ELSE, 'else', None, 2),
      MyToken(TokenType.IDENT, 'elses', 'elses', 2), MyToken(TokenType.IDENT, 'i', 'i', 2), MyToken(TokenType.IDENT, 'ifs', 'ifs', 2),
      MyToken(TokenType.RBRACE, '}', None, 2), MyToken(TokenType.WHILE, 'while', None, 2), MyToken(TokenType.LBRACE, '{', None, 2),
      MyToken(TokenType.IDENT, 'print', 'print', 2), MyToken(TokenType.IDENT, 'printx', 'printx', 2), MyToken(TokenType.IDENT, '_print', '_print', 2)]
     self.assertEqual(expected, lexer.lex())
예제 #16
0
 def test_with_spaces(self):
     lexer = Lexer("  </   tag  >  ")
     open_token = lexer.get_next_token()
     id = lexer.get_next_token()
     close_token = lexer.get_next_token()
     self.assertIsInstance(open_token, OpenOfTagWithSlashToken)
     self.assertIsInstance(id, IdToken)
     self.assertEqual(id.value, "tag")
     self.assertIsInstance(close_token, CloseOfTagToken)
예제 #17
0
 def __init__(self):
     """
     The class needs:
     the Environment instance for storing variables,
     the Parser and Lexer instances for parsing and lexing the input.
     """
     self.__env = Environment()
     self.__parser = Parser(self.__env)
     self.__lexer = Lexer()
예제 #18
0
파일: main.py 프로젝트: arnavsirigere/Vega
def run(file_name, text):
    lexer = Lexer(file_name, text)
    tokens, error = lexer.makeTokens()
    if error:
        return None, error

    parser = Parser(tokens)
    ast = parser.parse()  # ast => Abstract Syntax Tree

    return ast.node, ast.error
예제 #19
0
파일: test_file.py 프로젝트: supsub/LOUDXD
def test_empty():
    source = ""

    lexer = Lexer().get_lexer()
    tokens = lexer.lex(source)

    pg = Parser()
    pg.parse()
    parser = pg.get_parser()
    context = {}
    parser.parse(tokens).eval(context)
예제 #20
0
 def test_strings(self):
     lexer = Lexer('\'try\' try \'if}\'#\'  asd\' \'\'')
     expected = [
         MyToken(TokenType.STRING, 'try', 'try', 1),
         MyToken(TokenType.IDENT, 'try', 'try', 1),
         MyToken(TokenType.STRING, 'if}', 'if}', 1),
         MyToken(TokenType.HASH, '#', None, 1),
         MyToken(TokenType.STRING, '  asd', '  asd', 1),
         MyToken(TokenType.STRING, '', '', 1)
     ]
     self.assertEqual(expected, lexer.lex())
예제 #21
0
 def test_floats(self):
     lexer = Lexer('4 + 12.3 - 3. .1')
     expected = [
         MyToken(TokenType.NUMBER, '4', 4, 1),
         MyToken(TokenType.PLUS, '+', None, 1),
         MyToken(TokenType.NUMBER, '12.3', 12.3, 1),
         MyToken(TokenType.MINUS, '-', None, 1),
         MyToken(TokenType.NUMBER, '3.', 3., 1),
         MyToken(TokenType.NUMBER, '.1', .1, 1)
     ]
     self.assertEqual(expected, lexer.lex())
예제 #22
0
 def _test_with_params(self, params):
     for i in range(len(params)):
         expr, lex_expected = params[i]
         lex_actual = Lexer(expr).lex()
         # Every lexer result ends with this token. It is nicer and
         # more performant to remove it from the result instead of
         # mutating data or creating new lex_expected (using + on arrays)
         self.assertEqual(('TERMINATE_TOKEN', ''), lex_actual.pop())
         self.assertEqual(
             lex_expected, lex_actual,
             f'Error on the {i} param set with the expression "{expr}"')
예제 #23
0
 def test_lexer(self):
     for path in glob.glob("test/grader/*/src.pas"):
         with open(path, 'r') as source:
             try:
                 text = source.read()
                 lexer = Lexer(text)
                 lexer.lex()
             except:
                 ex = sys.exc_info()[0]
                 self.fail("Failed to lex " + path + "\n" + ex)
     self.assertTrue(True)
예제 #24
0
 def test_assignment(self):
     lexer = Lexer('x:= 7\nx = 7')
     expected = [
         MyToken(TokenType.IDENT, 'x', 'x', 1),
         MyToken(TokenType.ASSIGN, ':=', None, 1),
         MyToken(TokenType.NUMBER, '7', 7, 1),
         MyToken(TokenType.EOL, None, None, 1),
         MyToken(TokenType.IDENT, 'x', 'x', 2),
         MyToken(TokenType.EQUAL, '=', None, 2),
         MyToken(TokenType.NUMBER, '7', 7, 2)
     ]
     self.assertEqual(expected, lexer.lex())
예제 #25
0
파일: test_lexer.py 프로젝트: lcary/tranq
 def test_oneline_computation(self):
     text = "print: 4 * var + 1"
     lexer = Lexer()
     tokens = lexer.get_tokens(text)
     expect = [
         Token(token_type=TokenType.identifier, value='print'),
         Token(token_type=TokenType.operator, value=':'),
         Token(token_type=TokenType.number, value='4'),
         Token(token_type=TokenType.operator, value='*'),
         Token(token_type=TokenType.identifier, value='var'),
         Token(token_type=TokenType.operator, value='+'),
         Token(token_type=TokenType.number, value='1')]
     self.assertEqual(tokens, expect)
예제 #26
0
    def test_symbolizer(self):
        for path in glob.glob("test/grader/*/src.pas"):
            with open(path, 'r') as source:
                print(f"testing {path}")
                text = source.read()
                lexer = Lexer(text)
                tokens = lexer.lex()
                parser = Parser(tokens)
                ast = parser.parse()
                symbolizer = Symbolizer(ast)
                symbolizer.symbolize()

        self.assertTrue(True)
예제 #27
0
 def test_booleans(self):
     lexer = Lexer(
         'true false trues = true 7 < 12 <= > y>=x or xor and not_ < not true'
     )
     expected = \
     [MyToken(TokenType.TRUE, 'true', True, 1), MyToken(TokenType.FALSE, 'false', False, 1), MyToken(TokenType.IDENT, 'trues', 'trues', 1),
      MyToken(TokenType.EQUAL, '=', None, 1), MyToken(TokenType.TRUE, 'true', True, 1), MyToken(TokenType.NUMBER, '7', 7, 1),
      MyToken(TokenType.L, '<', None, 1), MyToken(TokenType.NUMBER, '12', 12, 1), MyToken(TokenType.LE, '<=', None, 1),
      MyToken(TokenType.G, '>', None, 1), MyToken(TokenType.IDENT, 'y', 'y', 1), MyToken(TokenType.GE, '>=', None, 1), MyToken(TokenType.IDENT, 'x', 'x', 1),
      MyToken(TokenType.OR, 'or', None, 1), MyToken(TokenType.IDENT, 'xor', 'xor', 1), MyToken(TokenType.AND, 'and', None, 1),
      MyToken(TokenType.IDENT, 'not_', 'not_', 1), MyToken(TokenType.L, '<', None, 1), MyToken(TokenType.NOT, 'not', None, 1),
      MyToken(TokenType.TRUE, 'true', True, 1)]
     self.assertEqual(expected, lexer.lex())
예제 #28
0
파일: test.py 프로젝트: jekozyra/lexer
    def test_should_tokenize_a_simple_expression(self):
        lexer = Lexer('42 + 21')

        tokens = lexer.tokenize()

        self.assertEqual(3, len(tokens))

        self.assertEqual(tokens[0].type, TokenType.INTEGER)
        self.assertEqual(tokens[0].value, '42')

        self.assertEqual(tokens[1].type, TokenType.PLUS)
        self.assertEqual(tokens[1].value, '+')

        self.assertEqual(tokens[2].type, TokenType.INTEGER)
        self.assertEqual(tokens[2].value, '21')
예제 #29
0
파일: test_file.py 프로젝트: supsub/LOUDXD
def test_if_else():
    source = """
                Jeżeli 2 jest wieksze od 3 to wypisz na ekranie "jestem w ifie".
                W przeciwnym razie wypisz na ekranie "jestem w  elsie".
                Tyle.
                Wypisz na ekranie "ifelse działa".
                """

    lexer = Lexer().get_lexer()
    tokens = lexer.lex(source)

    pg = Parser()
    pg.parse()
    parser = pg.get_parser()
    context = {}
    parser.parse(tokens).eval(context)
예제 #30
0
 def test_identifiers(self):
     lexer = Lexer('x + 7 - test123( 18 x x18 /_try-')
     expected = [
         MyToken(TokenType.IDENT, 'x', 'x', 1),
         MyToken(TokenType.PLUS, '+', None, 1),
         MyToken(TokenType.NUMBER, '7', 7, 1),
         MyToken(TokenType.MINUS, '-', None, 1),
         MyToken(TokenType.IDENT, 'test123', 'test123', 1),
         MyToken(TokenType.LPAREN, '(', None, 1),
         MyToken(TokenType.NUMBER, '18', 18, 1),
         MyToken(TokenType.IDENT, 'x', 'x', 1),
         MyToken(TokenType.IDENT, 'x18', 'x18', 1),
         MyToken(TokenType.DIV, '/', None, 1),
         MyToken(TokenType.IDENT, '_try', '_try', 1),
         MyToken(TokenType.MINUS, '-', None, 1)
     ]
     self.assertEqual(expected, lexer.lex())
예제 #31
0
 def __init__(self, text):
   self.lexer = Lexer(text)
   self.token = self.lexer.get_next_token()
   self.ae()
예제 #32
0
class Interpreter:
  
  def __init__(self, text):
    self.lexer = Lexer(text)
    self.token = self.lexer.get_next_token()
    self.ae()

  @staticmethod
  def is_first_of_term(type):
    return type == Type.UNION or type == Type.DIFF or \
        type == Type.NOT or type == Type.CARD or \
        type == Type.ID or type == Type.NULL or \
        type == Type.INT or type == Type.REAL or \
        type == Type.BOOL or type == Type.STRING or \
        type == Type.OPENP or type == Type.OPENB or \
        type == Type.OPENC
  
  def ae(self):
    if Interpreter.is_first_of_term(self.token.type):
      self.c5e()
    else:
      raise
    if self.token.type != Type.EOF:
      raise
  
  def c5e(self):
    if Interpreter.is_first_of_term(self.token.type):
      self.c4e()
      self.c5e_prime()
    else:
      raise
  
  def c5e_prime(self):
    if self.token.type == Type.EQ or self.token.type == Type.NEQ or \
        self.token.type == Type.GT or self.token.type == Type.GTEQ or \
        self.token.type == Type.LT or self.token.type == Type.LTEQ:
      self.c5o()
      self.c5e()
  
  def c5o(self):
    if self.token.type == Type.EQ:
      self.token = self.lexer.get_next_token()
    elif self.token.type == Type.NEQ:
      self.token = self.lexer.get_next_token()
    elif self.token.type == Type.GT:
      self.token = self.lexer.get_next_token()
    elif self.token.type == Type.GTEQ:
      self.token = self.lexer.get_next_token()
    elif self.token.type == Type.LT:
      self.token = self.lexer.get_next_token()
    elif self.token.type == Type.LTEQ:
      self.token = self.lexer.get_next_token()
    else:
      raise
  
  def c4e(self):
    if Interpreter.is_first_of_term(self.token.type):
      self.c3e()
      self.c4e_prime()
    else:
      raise
  
  def c4e_prime(self):
    if self.token.type == Type.UNION or self.token.type == Type.DIFF:
      self.c4o()
      self.c4e()
  
  def c4o(self):
    if self.token.type == Type.UNION:
      self.token = self.lexer.get_next_token()
    elif self.token.type == Type.DIFF:
      self.token = self.lexer.get_next_token()
    else:
      raise
  
  def c3e(self):
    if Interpreter.is_first_of_term(self.token.type):
      self.c2e()
      self.c3e_prime()
    else:
      raise
  
  def c3e_prime(self):
    if self.token.type == Type.AND or self.token.type == Type.XOR:
      self.c3o()
      self.c3e()
  
  def c3o(self):
    if self.token.type == Type.AND:
      self.token = self.lexer.get_next_token()
    elif self.token.type == Type.XOR:
      self.token = self.lexer.get_next_token()
    else:
      raise
  
  def c2e(self):
    if Interpreter.is_first_of_term(self.token.type):
      self.c1e()
      self.c2e_prime()
    else:
      raise
  
  def c2e_prime(self):
    if self.token.type == Type.COMPOSE or self.token.type == Type.DIV or \
        self.token.type == Type.MOD:
      self.c2o()
      self.c2e()
  
  def c2o(self):
    if self.token.type == Type.COMPOSE:
      self.token = self.lexer.get_next_token()
    elif self.token.type == Type.DIV:
      self.token = self.lexer.get_next_token()
    elif self.token.type == Type.MOD:
      self.token = self.lexer.get_next_token()
    else:
      raise
  
  def c1e(self):
    if Interpreter.is_first_of_term(self.token.type):
      self.c0e()
      self.c1e_prime()
    else:
      raise
  
  def c1e_prime(self):
    if self.token.type == Type.TPOSE or self.token.type == Type.POW:
      self.c1o()
      self.c1e()
  
  def c1o(self):
    if self.token.type == Type.TPOSE:
      self.token = self.lexer.get_next_token()
    elif self.token.type == Type.POW:
      self.token = self.lexer.get_next_token()
    else:
      raise
  
  def c0e(self):
    if Interpreter.is_first_of_term(self.token.type):
      self.term()
    else:
      raise
  
  def term(self):
    if self.token.type == Type.UNION or self.token.type == Type.DIFF:
      self.unary_op()
      self.var_num_term()
    elif self.token.type == Type.NOT:
      self.token = self.lexer.get_next_token()
      if self.token.type == Type.BOOL or self.token.type == Type.NULL:
        self.bool_term()
      elif self.token.type == Type.ID:
        self.token = self.lexer.get_next_token()
      else:
        raise
    elif self.token.type == Type.ID or self.token.type == Type.INT or \
        self.token.type == Type.REAL or self.token.type == Type.CARD:
      self.var_num_term()
    elif self.token.type == Type.BOOL or self.token.type == Type.NULL or \
        self.token.type == Type.OPENP or self.token.type == type.OPENB or \
        self.token.type == Type.OPENC or self.token.type == Type.STRING:
      self.constant_term()
    else:
      raise
  
  def unary_op(self):
    if self.token.type == Type.UNION:
      self.token = self.lexer.get_next_token()
    elif self.token.type == Type.DIFF:
      self.token = self.lexer.get_next_token()
    else:
      raise
  
  def var_num_term(self):
    if self.token.type == Type.ID:
      self.token = self.lexer.get_next_token()
      self.fact()
    elif self.token.type == Type.INT or self.token.type == Type.REAL or \
        self.token.type == Type.CARD:
      self.num_term()
    else:
      raise
  
  def num_term(self):
    if self.token.type == Type.INT:
      self.token = self.lexer.get_next_token()
      self.fact()
    elif self.token.type == Type.REAL:
      self.token = self.lexer.get_next_token()
    elif self.token.type == Type.CARD:
      self.card_term()
      self.fact()
    else:
      raise

  def card_term(self):
    if self.token.type == Type.CARD:
      self.token = self.lexer.get_next_token()
      if self.token.type == Type.ID:
        self.token = self.lexer.get_next_token()
      elif self.token.type == Type.OPENP or self.token.type == Type.OPENB or \
          self.token.type == Type.OPENC:
        self.struct_term()
      else:
        raise
    else:
      raise
  
  def fact(self):
    if self.token.type == Type.FACT:
      self.token = self.lexer.get_next_token()
  
  def struct_term(self):
    if self.token.type == Type.OPENP:
      self.token = self.lexer.get_next_token()
      self.e_list()
      if self.token.type == Type.CLOSEP:
        self.token = self.lexer.get_next_token()
      else:
        raise
    elif self.token.type == Type.OPENB:
      self.token = self.lexer.get_next_token()
      self.e_list()
      if self.token.type == Type.CLOSEB:
        self.token = self.lexer.get_next_token()
      else:
        raise
    elif self.token.type == Type.OPENC:
      self.token = self.lexer.get_next_token()
      self.e_list()
      if self.token.type == Type.CLOSEC:
        self.token = self.lexer.get_next_token()
      else:
        raise
    else:
      raise

  def e_list(self):
    if Interpreter.is_first_of_term(self.token.type):
      self.c5e()
      self.e_list_tail()
    else:
      raise
  
  def e_list_tail(self):
    if self.token.type == Type.COMMA:
      self.token = self.lexer.get_next_token()
      self.e_list()
  
  def bool_term(self):
    if self.token.type == Type.BOOL:
      self.token = self.lexer.get_next_token()
    elif self.token.type == Type.NULL:
      self.token = self.lexer.get_next_token()
    else:
      raise
  
  def constant_term(self):
    if self.token.type == Type.BOOL or self.token.type == Type.NULL:
      self.bool_term()
    elif self.token.type == Type.OPENP or self.token.type == Type.OPENB or \
        self.token.type == Type.OPENC:
      self.struct_term()
    elif self.token.type == Type.STRING:
      self.token = self.lexer.get_next_token()
    else:
      raise