Esempio n. 1
0
 def test_vector_transposition(self):
   lexer = Lexer('(1)**T')
   self.assertEqual(lexer.get_next_token(), Token(Type.OPENP, '('))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 1))
   self.assertEqual(lexer.get_next_token(), Token(Type.CLOSEP, ')'))
   self.assertEqual(lexer.get_next_token(), Token(Type.TPOSE, '**T'))
   self.assertEqual(lexer.get_next_token(), Token(Type.EOF, ''))
Esempio n. 2
0
 def test_real_must_have_fraction_again(self):
   lexer = Lexer('1.0 + 1.')
   self.assertEqual(lexer.get_next_token(), Token(Type.REAL, 1.0))
   self.assertEqual(lexer.get_next_token(), Token(Type.UNION, '+'))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 1))
   self.assertEqual(lexer.get_next_token(), Token(Type.PERIOD, '.'))
   self.assertEqual(lexer.get_next_token(), Token(Type.EOF, ''))
Esempio n. 3
0
 def test_set_cardinality(self):
   lexer = Lexer('#{1}')
   self.assertEqual(lexer.get_next_token(), Token(Type.CARD, '#'))
   self.assertEqual(lexer.get_next_token(), Token(Type.OPENC, '{'))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 1))
   self.assertEqual(lexer.get_next_token(), Token(Type.CLOSEC, '}'))
   self.assertEqual(lexer.get_next_token(), Token(Type.EOF, ''))
Esempio n. 4
0
 def test_integer_no_leading_zeros(self):
   lexer = Lexer('0 + 00')
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 0))
   self.assertEqual(lexer.get_next_token(), Token(Type.UNION, '+'))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 0))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 0))
   self.assertEqual(lexer.get_next_token(), Token(Type.EOF, ''))
Esempio n. 5
0
 def test_real_no_leading_zeros(self):
   lexer = Lexer('0.00 + 00.0')
   self.assertEqual(lexer.get_next_token(), Token(Type.REAL, 0.00))
   self.assertEqual(lexer.get_next_token(), Token(Type.UNION, '+'))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 0))
   self.assertEqual(lexer.get_next_token(), Token(Type.REAL, 0.0))
   self.assertEqual(lexer.get_next_token(), Token(Type.EOF, ''))
Esempio n. 6
0
 def test_reserved_values(self):
   lexer = Lexer('e false null pi true')
   self.assertEqual(lexer.get_next_token(), Token(Type.REAL, math.e))
   self.assertEqual(lexer.get_next_token(), Token(Type.BOOL, False))
   self.assertEqual(lexer.get_next_token(), Token(Type.NULL, None))
   self.assertEqual(lexer.get_next_token(), Token(Type.REAL, math.pi))
   self.assertEqual(lexer.get_next_token(), Token(Type.BOOL, True))
   self.assertEqual(lexer.get_next_token(), Token(Type.EOF, ''))
Esempio n. 7
0
 def test_vector_norm(self):
   lexer = Lexer('|| (1) ||')
   self.assertEqual(lexer.get_next_token(), Token(Type.NORM, '||'))
   self.assertEqual(lexer.get_next_token(), Token(Type.OPENP, '('))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 1))
   self.assertEqual(lexer.get_next_token(), Token(Type.CLOSEP, ')'))
   self.assertEqual(lexer.get_next_token(), Token(Type.NORM, '||'))
   self.assertEqual(lexer.get_next_token(), Token(Type.EOF, ''))
Esempio n. 8
0
 def test_set_xor(self):
   lexer = Lexer('{1} ^ {1}')
   self.assertEqual(lexer.get_next_token(), Token(Type.OPENC, '{'))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 1))
   self.assertEqual(lexer.get_next_token(), Token(Type.CLOSEC, '}'))
   self.assertEqual(lexer.get_next_token(), Token(Type.XOR, '^'))
   self.assertEqual(lexer.get_next_token(), Token(Type.OPENC, '{'))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 1))
   self.assertEqual(lexer.get_next_token(), Token(Type.CLOSEC, '}'))
   self.assertEqual(lexer.get_next_token(), Token(Type.EOF, ''))
Esempio n. 9
0
 def test_set_and(self):
   lexer = Lexer('{1} & {1}')
   self.assertEqual(lexer.get_next_token(), Token(Type.OPENC, '{'))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 1))
   self.assertEqual(lexer.get_next_token(), Token(Type.CLOSEC, '}'))
   self.assertEqual(lexer.get_next_token(), Token(Type.AND, '&'))
   self.assertEqual(lexer.get_next_token(), Token(Type.OPENC, '{'))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 1))
   self.assertEqual(lexer.get_next_token(), Token(Type.CLOSEC, '}'))
   self.assertEqual(lexer.get_next_token(), Token(Type.EOF, ''))
Esempio n. 10
0
 def test_set_union(self):
   lexer = Lexer('{1} + {1}')
   self.assertEqual(lexer.get_next_token(), Token(Type.OPENC, '{'))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 1))
   self.assertEqual(lexer.get_next_token(), Token(Type.CLOSEC, '}'))
   self.assertEqual(lexer.get_next_token(), Token(Type.UNION, '+'))
   self.assertEqual(lexer.get_next_token(), Token(Type.OPENC, '{'))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 1))
   self.assertEqual(lexer.get_next_token(), Token(Type.CLOSEC, '}'))
   self.assertEqual(lexer.get_next_token(), Token(Type.EOF, ''))
Esempio n. 11
0
 def test_list_union(self):
   lexer = Lexer('[1] + [1]')
   self.assertEqual(lexer.get_next_token(), Token(Type.OPENB, '['))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 1))
   self.assertEqual(lexer.get_next_token(), Token(Type.CLOSEB, ']'))
   self.assertEqual(lexer.get_next_token(), Token(Type.UNION, '+'))
   self.assertEqual(lexer.get_next_token(), Token(Type.OPENB, '['))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 1))
   self.assertEqual(lexer.get_next_token(), Token(Type.CLOSEB, ']'))
   self.assertEqual(lexer.get_next_token(), Token(Type.EOF, ''))
Esempio n. 12
0
 def test_tuple_union(self):
   lexer = Lexer('(1) + (1)')
   self.assertEqual(lexer.get_next_token(), Token(Type.OPENP, '('))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 1))
   self.assertEqual(lexer.get_next_token(), Token(Type.CLOSEP, ')'))
   self.assertEqual(lexer.get_next_token(), Token(Type.UNION, '+'))
   self.assertEqual(lexer.get_next_token(), Token(Type.OPENP, '('))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 1))
   self.assertEqual(lexer.get_next_token(), Token(Type.CLOSEP, ')'))
   self.assertEqual(lexer.get_next_token(), Token(Type.EOF, ''))
Esempio n. 13
0
 def test_assignment_expression(self):
   lexer = Lexer('let x of Z := 5.')
   self.assertEqual(lexer.get_next_token(), Token(Type.LET, 'let'))
   self.assertEqual(lexer.get_next_token(), Token(Type.ID, 'x'))
   self.assertEqual(lexer.get_next_token(), Token(Type.OF, 'of'))
   self.assertEqual(lexer.get_next_token(), Token(Type.Z, 'Z'))
   self.assertEqual(lexer.get_next_token(), Token(Type.DEFAS, ':='))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 5))
   self.assertEqual(lexer.get_next_token(), Token(Type.PERIOD, '.'))
   self.assertEqual(lexer.get_next_token(), Token(Type.EOF, ''))
Esempio n. 14
0
 def _create_lexer_mock(self, return_values, flag):
     return_values.append(EndOfTextToken())
     lexer = Lexer(None)
     lexer.get_next_token = Mock()
     lexer.is_next_nonempty_char_an_open_of_tag = Mock()
     lexer.get_text_until_open_of_tag = Mock()
     lexer.get_comment = Mock()
     lexer.get_next_token.side_effect = return_values
     lexer.is_next_nonempty_char_an_open_of_tag.side_effect = [(flag, ""),
                                                               (False, "")]
     lexer.get_text_until_open_of_tag.side_effect = ["text"]
     lexer.get_comment.side_effect = ["comment"]
     return lexer
Esempio n. 15
0
 def test_reserved_types(self):
   lexer = Lexer('B Boolean Integer N Natural Q Rational R Real S String U Universal Z')
   self.assertEqual(lexer.get_next_token(), Token(Type.B, 'B'))
   self.assertEqual(lexer.get_next_token(), Token(Type.B, 'B'))
   self.assertEqual(lexer.get_next_token(), Token(Type.Z, 'Z'))
   self.assertEqual(lexer.get_next_token(), Token(Type.N, 'N'))
   self.assertEqual(lexer.get_next_token(), Token(Type.N, 'N'))
   self.assertEqual(lexer.get_next_token(), Token(Type.Q, 'Q'))
   self.assertEqual(lexer.get_next_token(), Token(Type.Q, 'Q'))
   self.assertEqual(lexer.get_next_token(), Token(Type.R, 'R'))
   self.assertEqual(lexer.get_next_token(), Token(Type.R, 'R'))
   self.assertEqual(lexer.get_next_token(), Token(Type.S, 'S'))
   self.assertEqual(lexer.get_next_token(), Token(Type.S, 'S'))
   self.assertEqual(lexer.get_next_token(), Token(Type.U, 'U'))
   self.assertEqual(lexer.get_next_token(), Token(Type.U, 'U'))
   self.assertEqual(lexer.get_next_token(), Token(Type.Z, 'Z'))
   self.assertEqual(lexer.get_next_token(), Token(Type.EOF, ''))
Esempio n. 16
0
 def test_function_declaration(self):
   lexer = Lexer('let f: Z -> Z, f(x) := 5x.')
   self.assertEqual(lexer.get_next_token(), Token(Type.LET, 'let'))
   self.assertEqual(lexer.get_next_token(), Token(Type.ID, 'f'))
   self.assertEqual(lexer.get_next_token(), Token(Type.COLON, ':'))
   self.assertEqual(lexer.get_next_token(), Token(Type.Z, 'Z'))
   self.assertEqual(lexer.get_next_token(), Token(Type.TO, '->'))
   self.assertEqual(lexer.get_next_token(), Token(Type.Z, 'Z'))
   self.assertEqual(lexer.get_next_token(), Token(Type.COMMA, ','))
   self.assertEqual(lexer.get_next_token(), Token(Type.ID, 'f'))
   self.assertEqual(lexer.get_next_token(), Token(Type.OPENP, '('))
   self.assertEqual(lexer.get_next_token(), Token(Type.ID, 'x'))
   self.assertEqual(lexer.get_next_token(), Token(Type.CLOSEP, ')'))
   self.assertEqual(lexer.get_next_token(), Token(Type.DEFAS, ':='))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 5))
   self.assertEqual(lexer.get_next_token(), Token(Type.ID, 'x'))
   self.assertEqual(lexer.get_next_token(), Token(Type.PERIOD, '.'))
   self.assertEqual(lexer.get_next_token(), Token(Type.EOF, ''))
Esempio n. 17
0
 def test_step_interval(self):
   lexer = Lexer('forall x in [1,..1..,#X)')
   self.assertEqual(lexer.get_next_token(), Token(Type.FORALL, 'forall'))
   self.assertEqual(lexer.get_next_token(), Token(Type.ID, 'x'))
   self.assertEqual(lexer.get_next_token(), Token(Type.IN, 'in'))
   self.assertEqual(lexer.get_next_token(), Token(Type.OPENB, '['))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 1))
   self.assertEqual(lexer.get_next_token(), Token(Type.COMMA, ','))
   self.assertEqual(lexer.get_next_token(), Token(Type.RANGE, '..'))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 1))
   self.assertEqual(lexer.get_next_token(), Token(Type.RANGE, '..'))
   self.assertEqual(lexer.get_next_token(), Token(Type.COMMA, ','))
   self.assertEqual(lexer.get_next_token(), Token(Type.CARD, '#'))
   self.assertEqual(lexer.get_next_token(), Token(Type.ID, 'X'))
   self.assertEqual(lexer.get_next_token(), Token(Type.CLOSEP, ')'))
   self.assertEqual(lexer.get_next_token(), Token(Type.EOF, ''))
Esempio n. 18
0
 def test_reserved_words(self):
   lexer = Lexer('else forall if in is let of st then while')
   self.assertEqual(lexer.get_next_token(), Token(Type.ELSE, 'else'))
   self.assertEqual(lexer.get_next_token(), Token(Type.FORALL, 'forall'))
   self.assertEqual(lexer.get_next_token(), Token(Type.IF, 'if'))
   self.assertEqual(lexer.get_next_token(), Token(Type.IN, 'in'))
   self.assertEqual(lexer.get_next_token(), Token(Type.IS, 'is'))
   self.assertEqual(lexer.get_next_token(), Token(Type.LET, 'let'))
   self.assertEqual(lexer.get_next_token(), Token(Type.OF, 'of'))
   self.assertEqual(lexer.get_next_token(), Token(Type.ST, 'st'))
   self.assertEqual(lexer.get_next_token(), Token(Type.THEN, 'then'))
   self.assertEqual(lexer.get_next_token(), Token(Type.WHILE, 'while'))
   self.assertEqual(lexer.get_next_token(), Token(Type.EOF, ''))
Esempio n. 19
0
 def test_reserved_operators(self):
   lexer = Lexer('and diff iff inter mod not onlyif or to xor union')
   self.assertEqual(lexer.get_next_token(), Token(Type.AND, '&'))
   self.assertEqual(lexer.get_next_token(), Token(Type.DIFF, '-'))
   self.assertEqual(lexer.get_next_token(), Token(Type.IFF, '<=>'))
   self.assertEqual(lexer.get_next_token(), Token(Type.AND, '&'))
   self.assertEqual(lexer.get_next_token(), Token(Type.MOD, '%'))
   self.assertEqual(lexer.get_next_token(), Token(Type.NOT, '~'))
   self.assertEqual(lexer.get_next_token(), Token(Type.IMPL, '=>'))
   self.assertEqual(lexer.get_next_token(), Token(Type.UNION, '+'))
   self.assertEqual(lexer.get_next_token(), Token(Type.TO, '->'))
   self.assertEqual(lexer.get_next_token(), Token(Type.XOR, '^'))
   self.assertEqual(lexer.get_next_token(), Token(Type.UNION, '+'))
   self.assertEqual(lexer.get_next_token(), Token(Type.EOF, ''))
Esempio n. 20
0
class Interpreter:
  
  def __init__(self, text):
    self.lexer = Lexer(text)
    self.token = self.lexer.get_next_token()
    self.ae()

  @staticmethod
  def is_first_of_term(type):
    return type == Type.UNION or type == Type.DIFF or \
        type == Type.NOT or type == Type.CARD or \
        type == Type.ID or type == Type.NULL or \
        type == Type.INT or type == Type.REAL or \
        type == Type.BOOL or type == Type.STRING or \
        type == Type.OPENP or type == Type.OPENB or \
        type == Type.OPENC
  
  def ae(self):
    if Interpreter.is_first_of_term(self.token.type):
      self.c5e()
    else:
      raise
    if self.token.type != Type.EOF:
      raise
  
  def c5e(self):
    if Interpreter.is_first_of_term(self.token.type):
      self.c4e()
      self.c5e_prime()
    else:
      raise
  
  def c5e_prime(self):
    if self.token.type == Type.EQ or self.token.type == Type.NEQ or \
        self.token.type == Type.GT or self.token.type == Type.GTEQ or \
        self.token.type == Type.LT or self.token.type == Type.LTEQ:
      self.c5o()
      self.c5e()
  
  def c5o(self):
    if self.token.type == Type.EQ:
      self.token = self.lexer.get_next_token()
    elif self.token.type == Type.NEQ:
      self.token = self.lexer.get_next_token()
    elif self.token.type == Type.GT:
      self.token = self.lexer.get_next_token()
    elif self.token.type == Type.GTEQ:
      self.token = self.lexer.get_next_token()
    elif self.token.type == Type.LT:
      self.token = self.lexer.get_next_token()
    elif self.token.type == Type.LTEQ:
      self.token = self.lexer.get_next_token()
    else:
      raise
  
  def c4e(self):
    if Interpreter.is_first_of_term(self.token.type):
      self.c3e()
      self.c4e_prime()
    else:
      raise
  
  def c4e_prime(self):
    if self.token.type == Type.UNION or self.token.type == Type.DIFF:
      self.c4o()
      self.c4e()
  
  def c4o(self):
    if self.token.type == Type.UNION:
      self.token = self.lexer.get_next_token()
    elif self.token.type == Type.DIFF:
      self.token = self.lexer.get_next_token()
    else:
      raise
  
  def c3e(self):
    if Interpreter.is_first_of_term(self.token.type):
      self.c2e()
      self.c3e_prime()
    else:
      raise
  
  def c3e_prime(self):
    if self.token.type == Type.AND or self.token.type == Type.XOR:
      self.c3o()
      self.c3e()
  
  def c3o(self):
    if self.token.type == Type.AND:
      self.token = self.lexer.get_next_token()
    elif self.token.type == Type.XOR:
      self.token = self.lexer.get_next_token()
    else:
      raise
  
  def c2e(self):
    if Interpreter.is_first_of_term(self.token.type):
      self.c1e()
      self.c2e_prime()
    else:
      raise
  
  def c2e_prime(self):
    if self.token.type == Type.COMPOSE or self.token.type == Type.DIV or \
        self.token.type == Type.MOD:
      self.c2o()
      self.c2e()
  
  def c2o(self):
    if self.token.type == Type.COMPOSE:
      self.token = self.lexer.get_next_token()
    elif self.token.type == Type.DIV:
      self.token = self.lexer.get_next_token()
    elif self.token.type == Type.MOD:
      self.token = self.lexer.get_next_token()
    else:
      raise
  
  def c1e(self):
    if Interpreter.is_first_of_term(self.token.type):
      self.c0e()
      self.c1e_prime()
    else:
      raise
  
  def c1e_prime(self):
    if self.token.type == Type.TPOSE or self.token.type == Type.POW:
      self.c1o()
      self.c1e()
  
  def c1o(self):
    if self.token.type == Type.TPOSE:
      self.token = self.lexer.get_next_token()
    elif self.token.type == Type.POW:
      self.token = self.lexer.get_next_token()
    else:
      raise
  
  def c0e(self):
    if Interpreter.is_first_of_term(self.token.type):
      self.term()
    else:
      raise
  
  def term(self):
    if self.token.type == Type.UNION or self.token.type == Type.DIFF:
      self.unary_op()
      self.var_num_term()
    elif self.token.type == Type.NOT:
      self.token = self.lexer.get_next_token()
      if self.token.type == Type.BOOL or self.token.type == Type.NULL:
        self.bool_term()
      elif self.token.type == Type.ID:
        self.token = self.lexer.get_next_token()
      else:
        raise
    elif self.token.type == Type.ID or self.token.type == Type.INT or \
        self.token.type == Type.REAL or self.token.type == Type.CARD:
      self.var_num_term()
    elif self.token.type == Type.BOOL or self.token.type == Type.NULL or \
        self.token.type == Type.OPENP or self.token.type == type.OPENB or \
        self.token.type == Type.OPENC or self.token.type == Type.STRING:
      self.constant_term()
    else:
      raise
  
  def unary_op(self):
    if self.token.type == Type.UNION:
      self.token = self.lexer.get_next_token()
    elif self.token.type == Type.DIFF:
      self.token = self.lexer.get_next_token()
    else:
      raise
  
  def var_num_term(self):
    if self.token.type == Type.ID:
      self.token = self.lexer.get_next_token()
      self.fact()
    elif self.token.type == Type.INT or self.token.type == Type.REAL or \
        self.token.type == Type.CARD:
      self.num_term()
    else:
      raise
  
  def num_term(self):
    if self.token.type == Type.INT:
      self.token = self.lexer.get_next_token()
      self.fact()
    elif self.token.type == Type.REAL:
      self.token = self.lexer.get_next_token()
    elif self.token.type == Type.CARD:
      self.card_term()
      self.fact()
    else:
      raise

  def card_term(self):
    if self.token.type == Type.CARD:
      self.token = self.lexer.get_next_token()
      if self.token.type == Type.ID:
        self.token = self.lexer.get_next_token()
      elif self.token.type == Type.OPENP or self.token.type == Type.OPENB or \
          self.token.type == Type.OPENC:
        self.struct_term()
      else:
        raise
    else:
      raise
  
  def fact(self):
    if self.token.type == Type.FACT:
      self.token = self.lexer.get_next_token()
  
  def struct_term(self):
    if self.token.type == Type.OPENP:
      self.token = self.lexer.get_next_token()
      self.e_list()
      if self.token.type == Type.CLOSEP:
        self.token = self.lexer.get_next_token()
      else:
        raise
    elif self.token.type == Type.OPENB:
      self.token = self.lexer.get_next_token()
      self.e_list()
      if self.token.type == Type.CLOSEB:
        self.token = self.lexer.get_next_token()
      else:
        raise
    elif self.token.type == Type.OPENC:
      self.token = self.lexer.get_next_token()
      self.e_list()
      if self.token.type == Type.CLOSEC:
        self.token = self.lexer.get_next_token()
      else:
        raise
    else:
      raise

  def e_list(self):
    if Interpreter.is_first_of_term(self.token.type):
      self.c5e()
      self.e_list_tail()
    else:
      raise
  
  def e_list_tail(self):
    if self.token.type == Type.COMMA:
      self.token = self.lexer.get_next_token()
      self.e_list()
  
  def bool_term(self):
    if self.token.type == Type.BOOL:
      self.token = self.lexer.get_next_token()
    elif self.token.type == Type.NULL:
      self.token = self.lexer.get_next_token()
    else:
      raise
  
  def constant_term(self):
    if self.token.type == Type.BOOL or self.token.type == Type.NULL:
      self.bool_term()
    elif self.token.type == Type.OPENP or self.token.type == Type.OPENB or \
        self.token.type == Type.OPENC:
      self.struct_term()
    elif self.token.type == Type.STRING:
      self.token = self.lexer.get_next_token()
    else:
      raise
Esempio n. 21
0
 def test_get_single_quoted_id(self):
     lexer = Lexer("\'id\'")
     token = lexer.get_next_token()
     self.assertIsInstance(token, QuotedIdToken)
     self.assertEqual(token.value, "id")
Esempio n. 22
0
 def test_get_comment_method(self):
     lexer = Lexer("<!-- comment --> <")
     self.assertIsInstance(lexer.get_next_token(), OpenOfCommentTagToken)
     self.assertEqual(lexer.get_comment(), " comment ")
     self.assertIsInstance(lexer.get_next_token(), CloseOfCommentTagToken)
     self.assertIsInstance(lexer.get_next_token(), OpenOfTagToken)
Esempio n. 23
0
 def test_real_union(self):
   lexer = Lexer('3.0 + 2.5')
   self.assertEqual(lexer.get_next_token(), Token(Type.REAL, 3.0))
   self.assertEqual(lexer.get_next_token(), Token(Type.UNION, '+'))
   self.assertEqual(lexer.get_next_token(), Token(Type.REAL, 2.5))
   self.assertEqual(lexer.get_next_token(), Token(Type.EOF, ''))
Esempio n. 24
0
 def test_integer_absolute_value(self):
   lexer = Lexer('| 1 |')
   self.assertEqual(lexer.get_next_token(), Token(Type.PIPE, '|'))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 1))
   self.assertEqual(lexer.get_next_token(), Token(Type.PIPE, '|'))
   self.assertEqual(lexer.get_next_token(), Token(Type.EOF, ''))
Esempio n. 25
0
 def test_integer_difference(self):
   lexer = Lexer('1 - 1')
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 1))
   self.assertEqual(lexer.get_next_token(), Token(Type.DIFF, '-'))
   self.assertEqual(lexer.get_next_token(), Token(Type.INT, 1))
   self.assertEqual(lexer.get_next_token(), Token(Type.EOF, ''))