def evaluate(self, expression: str): lexer = Lexer(expression) tokens = lexer.tokenize() parser = Parser(tokens) expression = parser.parse() evaluator = Evaluator(self.env) return evaluator.evaluate(expression)
def test_tokenize_integer(self): expression = "1" tokens = Lexer(expression).tokenize() self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].type, ttypes.INTEGER)
def test_tokenize_operator_gt_eq(self): expression = ">=" tokens = Lexer(expression).tokenize() self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].type, ttypes.GREATER_EQ_THAN)
def test_tokenize_identifier_starts_digit_ko(self): expression = "123asdf" with self.assertRaises(UnknownTokenException): tokens = Lexer(expression).tokenize()
def test_tokenize_operator_lt(self): expression = "<" tokens = Lexer(expression).tokenize() self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].type, ttypes.LESS_THAN)
def test_tokenize_identifier_phrase_mixed_upper(self): expression = "AbCdEFGhIjk" tokens = Lexer(expression).tokenize() self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].type, ttypes.IDENTIFIER)
def test_tokenize_identifier_underscore_also_mixed(self): expression = "aB123c_D45efG" tokens = Lexer(expression).tokenize() self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].type, ttypes.IDENTIFIER)
def test_tokenize_identifier_char_upper(self): expression = "A" tokens = Lexer(expression).tokenize() self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].type, ttypes.IDENTIFIER)
def test_tokenize_identifier_phrase_mixed_lower(self): expression = "aBcDEfGHI" tokens = Lexer(expression).tokenize() self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].type, ttypes.IDENTIFIER)
def test_tokenize_float(self): expression = "1.23" tokens = Lexer(expression).tokenize() self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].type, ttypes.FLOAT)
def test_tokenize_several_tokens_several_spaces(self): expression = "( foo )" tokens = Lexer(expression).tokenize() self.assertEqual(len(tokens), 3)
def test_tokenize_close_par(self): expression = ")" tokens = Lexer(expression).tokenize() self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].type, ttypes.CLOSEPAR)
def test_tokenize_open_par(self): expression = "(" tokens = Lexer(expression).tokenize() self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].type, ttypes.OPENPAR)
def test_tokenize_operator_or(self): expression = "or" tokens = Lexer(expression).tokenize() self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].type, ttypes.OR)
def test_tokenize_operator_not_eq(self): expression = "<>" tokens = Lexer(expression).tokenize() self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].type, ttypes.NOT_EQUALS)
def parse(expression): tokens = Lexer(expression).tokenize() return Parser(tokens).parse()