Ejemplo n.º 1
0
 def evaluate(self, expression: str):
     lexer = Lexer(expression)
     tokens = lexer.tokenize()
     parser = Parser(tokens)
     expression = parser.parse()
     evaluator = Evaluator(self.env)
     return evaluator.evaluate(expression)
Ejemplo n.º 2
0
 def test_tokenize_integer(self):
     expression = "1"
     tokens = Lexer(expression).tokenize()
     self.assertEqual(len(tokens), 1)
     self.assertEqual(tokens[0].type, ttypes.INTEGER)
Ejemplo n.º 3
0
 def test_tokenize_operator_gt_eq(self):
     expression = ">="
     tokens = Lexer(expression).tokenize()
     self.assertEqual(len(tokens), 1)
     self.assertEqual(tokens[0].type, ttypes.GREATER_EQ_THAN)
Ejemplo n.º 4
0
 def test_tokenize_identifier_starts_digit_ko(self):
     expression = "123asdf"
     with self.assertRaises(UnknownTokenException):
         tokens = Lexer(expression).tokenize()
Ejemplo n.º 5
0
 def test_tokenize_operator_lt(self):
     expression = "<"
     tokens = Lexer(expression).tokenize()
     self.assertEqual(len(tokens), 1)
     self.assertEqual(tokens[0].type, ttypes.LESS_THAN)
Ejemplo n.º 6
0
 def test_tokenize_identifier_phrase_mixed_upper(self):
     expression = "AbCdEFGhIjk"
     tokens = Lexer(expression).tokenize()
     self.assertEqual(len(tokens), 1)
     self.assertEqual(tokens[0].type, ttypes.IDENTIFIER)
Ejemplo n.º 7
0
 def test_tokenize_identifier_underscore_also_mixed(self):
     expression = "aB123c_D45efG"
     tokens = Lexer(expression).tokenize()
     self.assertEqual(len(tokens), 1)
     self.assertEqual(tokens[0].type, ttypes.IDENTIFIER)
Ejemplo n.º 8
0
 def test_tokenize_identifier_char_upper(self):
     expression = "A"
     tokens = Lexer(expression).tokenize()
     self.assertEqual(len(tokens), 1)
     self.assertEqual(tokens[0].type, ttypes.IDENTIFIER)
Ejemplo n.º 9
0
 def test_tokenize_identifier_phrase_mixed_lower(self):
     expression = "aBcDEfGHI"
     tokens = Lexer(expression).tokenize()
     self.assertEqual(len(tokens), 1)
     self.assertEqual(tokens[0].type, ttypes.IDENTIFIER)
Ejemplo n.º 10
0
 def test_tokenize_float(self):
     expression = "1.23"
     tokens = Lexer(expression).tokenize()
     self.assertEqual(len(tokens), 1)
     self.assertEqual(tokens[0].type, ttypes.FLOAT)
Ejemplo n.º 11
0
 def test_tokenize_several_tokens_several_spaces(self):
     expression = "( foo    )"
     tokens = Lexer(expression).tokenize()
     self.assertEqual(len(tokens), 3)
Ejemplo n.º 12
0
 def test_tokenize_close_par(self):
     expression = ")"
     tokens = Lexer(expression).tokenize()
     self.assertEqual(len(tokens), 1)
     self.assertEqual(tokens[0].type, ttypes.CLOSEPAR)
Ejemplo n.º 13
0
 def test_tokenize_open_par(self):
     expression = "("
     tokens = Lexer(expression).tokenize()
     self.assertEqual(len(tokens), 1)
     self.assertEqual(tokens[0].type, ttypes.OPENPAR)
Ejemplo n.º 14
0
 def test_tokenize_operator_or(self):
     expression = "or"
     tokens = Lexer(expression).tokenize()
     self.assertEqual(len(tokens), 1)
     self.assertEqual(tokens[0].type, ttypes.OR)
Ejemplo n.º 15
0
 def test_tokenize_operator_not_eq(self):
     expression = "<>"
     tokens = Lexer(expression).tokenize()
     self.assertEqual(len(tokens), 1)
     self.assertEqual(tokens[0].type, ttypes.NOT_EQUALS)
Ejemplo n.º 16
0
 def parse(expression):
     tokens = Lexer(expression).tokenize()
     return Parser(tokens).parse()