Example #1
0
 def evaluate(self, expression: str):
     lexer = Lexer(expression)
     tokens = lexer.tokenize()
     parser = Parser(tokens)
     expression = parser.parse()
     evaluator = Evaluator(self.env)
     return evaluator.evaluate(expression)
Example #2
0
 def test_tokenize_integer(self):
     expression = "1"
     tokens = Lexer(expression).tokenize()
     self.assertEqual(len(tokens), 1)
     self.assertEqual(tokens[0].type, ttypes.INTEGER)
Example #3
0
 def test_tokenize_operator_gt_eq(self):
     expression = ">="
     tokens = Lexer(expression).tokenize()
     self.assertEqual(len(tokens), 1)
     self.assertEqual(tokens[0].type, ttypes.GREATER_EQ_THAN)
Example #4
0
 def test_tokenize_identifier_starts_digit_ko(self):
     expression = "123asdf"
     with self.assertRaises(UnknownTokenException):
         tokens = Lexer(expression).tokenize()
Example #5
0
 def test_tokenize_operator_lt(self):
     expression = "<"
     tokens = Lexer(expression).tokenize()
     self.assertEqual(len(tokens), 1)
     self.assertEqual(tokens[0].type, ttypes.LESS_THAN)
Example #6
0
 def test_tokenize_identifier_phrase_mixed_upper(self):
     expression = "AbCdEFGhIjk"
     tokens = Lexer(expression).tokenize()
     self.assertEqual(len(tokens), 1)
     self.assertEqual(tokens[0].type, ttypes.IDENTIFIER)
Example #7
0
 def test_tokenize_identifier_underscore_also_mixed(self):
     expression = "aB123c_D45efG"
     tokens = Lexer(expression).tokenize()
     self.assertEqual(len(tokens), 1)
     self.assertEqual(tokens[0].type, ttypes.IDENTIFIER)
Example #8
0
 def test_tokenize_identifier_char_upper(self):
     expression = "A"
     tokens = Lexer(expression).tokenize()
     self.assertEqual(len(tokens), 1)
     self.assertEqual(tokens[0].type, ttypes.IDENTIFIER)
Example #9
0
 def test_tokenize_identifier_phrase_mixed_lower(self):
     expression = "aBcDEfGHI"
     tokens = Lexer(expression).tokenize()
     self.assertEqual(len(tokens), 1)
     self.assertEqual(tokens[0].type, ttypes.IDENTIFIER)
Example #10
0
 def test_tokenize_float(self):
     expression = "1.23"
     tokens = Lexer(expression).tokenize()
     self.assertEqual(len(tokens), 1)
     self.assertEqual(tokens[0].type, ttypes.FLOAT)
Example #11
0
 def test_tokenize_several_tokens_several_spaces(self):
     expression = "( foo    )"
     tokens = Lexer(expression).tokenize()
     self.assertEqual(len(tokens), 3)
Example #12
0
 def test_tokenize_close_par(self):
     expression = ")"
     tokens = Lexer(expression).tokenize()
     self.assertEqual(len(tokens), 1)
     self.assertEqual(tokens[0].type, ttypes.CLOSEPAR)
Example #13
0
 def test_tokenize_open_par(self):
     expression = "("
     tokens = Lexer(expression).tokenize()
     self.assertEqual(len(tokens), 1)
     self.assertEqual(tokens[0].type, ttypes.OPENPAR)
Example #14
0
 def test_tokenize_operator_or(self):
     expression = "or"
     tokens = Lexer(expression).tokenize()
     self.assertEqual(len(tokens), 1)
     self.assertEqual(tokens[0].type, ttypes.OR)
Example #15
0
 def test_tokenize_operator_not_eq(self):
     expression = "<>"
     tokens = Lexer(expression).tokenize()
     self.assertEqual(len(tokens), 1)
     self.assertEqual(tokens[0].type, ttypes.NOT_EQUALS)
Example #16
0
 def parse(expression):
     tokens = Lexer(expression).tokenize()
     return Parser(tokens).parse()