def test_divide(self):
        tkns = Tokeniser(StringReader('aaa/bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.OPERATOR_DIVIDE, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa /bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.OPERATOR_DIVIDE, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa/ bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.OPERATOR_DIVIDE, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa / bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.OPERATOR_DIVIDE, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)
    def test_minus(self):
        tkns = Tokeniser(StringReader('aaa-bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.OPERATOR_MINUS, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa -bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.OPERATOR_MINUS, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa- bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.OPERATOR_MINUS, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa - bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.OPERATOR_MINUS, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)
    def test_multiply(self):
        tkns = Tokeniser(StringReader('aaa*bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.OPERATOR_MULTIPLY, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa *bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.OPERATOR_MULTIPLY, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa* bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.OPERATOR_MULTIPLY, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa * bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.OPERATOR_MULTIPLY, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)
    def test_comparator_greater_than(self):
        tkns = Tokeniser(StringReader('aaa>bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.COMPARATOR_GT, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa >bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.COMPARATOR_GT, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa> bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.COMPARATOR_GT, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa > bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.COMPARATOR_GT, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)
    def test_comparator_less_than_or_equal(self):
        tkns = Tokeniser(StringReader('aaa<=bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.COMPARATOR_LE, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa <=bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.COMPARATOR_LE, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa<= bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.COMPARATOR_LE, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa <= bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.COMPARATOR_LE, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)
 def test_path_to_expression(self):
     expr = ExpressionParser(Tokeniser(StringReader(Q+'ABC'+Q+'!='+Q+'abc'+Q))).get_expression()
     self.assertTrue(isinstance(expr, NotEquals))
     self.assertTrue(isinstance(expr.lhs, Literal))
     self.assertEqual('ABC', expr.lhs.literal)      
     self.assertTrue(isinstance(expr.rhs, Literal))
     self.assertEqual('abc', expr.rhs.literal)      
    def test_path(self):

        tkns = Tokeniser(StringReader('aaa.bbb.ccc.ddd'))

        t = tkns.get_next_token()
        self.assertNotEqual(None, t)
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.PATH_SEPARATOR, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.PATH_SEPARATOR, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('ccc', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.PATH_SEPARATOR, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('ddd', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)
 def test_path_to_expression(self):
     expr = ExpressionParser(Tokeniser(StringReader('101>100'))).get_expression()
     self.assertTrue(isinstance(expr, GreaterThan))
     self.assertTrue(isinstance(expr.lhs, Literal))
     self.assertEqual(101, expr.lhs.literal)      
     self.assertTrue(isinstance(expr.rhs, Literal))
     self.assertEqual(100, expr.rhs.literal)      
 def test_path_to_expression(self):
     expr = ExpressionParser(Tokeniser(StringReader('99<=100'))).get_expression()
     self.assertTrue(isinstance(expr, LessThanOrEqual))
     self.assertTrue(isinstance(expr.lhs, Literal))
     self.assertEqual(99, expr.lhs.literal)      
     self.assertTrue(isinstance(expr.rhs, Literal))
     self.assertEqual(100, expr.rhs.literal)      
 def test_path_to_expression(self):
     expr = ExpressionParser(Tokeniser(StringReader('100/101'))).get_expression()
     self.assertTrue(isinstance(expr, Divide))
     self.assertEqual(2, len(expr.items))
     self.assertTrue(isinstance(expr.items[0], Literal))
     self.assertEqual(100, expr.items[0].literal)   
     self.assertTrue(isinstance(expr.items[1], Literal))
     self.assertEqual(101, expr.items[1].literal)   
 def test_literal_string_2(self):
     tkns = Tokeniser(
         StringReader(TokenType.QUOTE.value + 'literal string' +
                      TokenType.QUOTE.value))
     t = tkns.get_next_token()
     while t:
         print(t)
         t = tkns.get_next_token()
 def test_path_to_expression(self):
     expr = ExpressionParser(Tokeniser(StringReader('[name==$name]'))).get_expression()
     self.assertTrue(isinstance(expr, Filter))
     self.assertTrue(isinstance(expr.filter, Equals))
     self.assertTrue(isinstance(expr.filter.lhs, Field))
     self.assertEqual('name', expr.filter.lhs.field)
     self.assertTrue(isinstance(expr.filter.rhs, Parameter))
     self.assertEqual('name', expr.filter.rhs.parameter)
 def test_path_to_expression(self):
     expr = ExpressionParser(Tokeniser(StringReader('Concat("aaa",123)'))).get_expression()
     self.assertTrue(isinstance(expr, Concatenate))
     self.assertEqual(2, len(expr.items))
     self.assertTrue(isinstance(expr.items[0], Literal))
     self.assertEqual('aaa', expr.items[0].literal)   
     self.assertTrue(isinstance(expr.items[1], Literal))
     self.assertEqual(123, expr.items[1].literal)   
 def test_path_to_expression(self):
     expr = ExpressionParser(Tokeniser(StringReader('(True|False)'))).get_expression()
     self.assertTrue(isinstance(expr, Or))
     self.assertEqual(2, len(expr.items))
     self.assertTrue(isinstance(expr.items[0], Literal))
     self.assertEqual(True, expr.items[0].literal)   
     self.assertTrue(isinstance(expr.items[1], Literal))
     self.assertEqual(False, expr.items[1].literal)   
Exemple #15
0
 def test_parse_equals(self):
     p = ExpressionParser(Tokeniser(StringReader('aaa == bbb')))
     exp = p.get_expression()
     self.assertNotEqual(None, exp)
     self.assertTrue(isinstance(exp, Equals))
     self.assertTrue(isinstance(exp.lhs, Field))
     self.assertEqual('aaa', exp.lhs.field)
     self.assertTrue(isinstance(exp.rhs, Field))
     self.assertEqual('bbb', exp.rhs.field)
Exemple #16
0
 def test_parse_or(self):
     p = ExpressionParser(Tokeniser(StringReader('(True|False)')))
     exp = p.get_expression()
     self.assertNotEqual(None, exp)
     self.assertTrue(isinstance(exp, Or))
     self.assertEqual(2, len(exp.items))
     self.assertTrue(isinstance(exp.items[0], Literal))
     self.assertEqual(True, exp.items[0].literal)
     self.assertTrue(isinstance(exp.items[1], Literal))
     self.assertEqual(False, exp.items[1].literal)
Exemple #17
0
 def test_parse_path(self):
     p = ExpressionParser(Tokeniser(StringReader('aaa.bbb.ccc')))
     exp = p.get_expression()
     self.assertNotEqual(None, exp)
     self.assertTrue(isinstance(exp, Path))
     self.assertEqual('ccc', exp.field)
     self.assertTrue(isinstance(exp.path, Path))
     self.assertEqual('bbb', exp.path.field)
     self.assertTrue(isinstance(exp.path.path, Field))
     self.assertEqual('aaa', exp.path.path.field)
 def test_path_to_expression(self):
     expr = ExpressionParser(Tokeniser(StringReader('123'))).get_expression()
     self.assertTrue(isinstance(expr, Literal))
     self.assertEqual(123, expr.literal)      
     
     expr = ExpressionParser(Tokeniser(StringReader('123.456'))).get_expression()
     self.assertTrue(isinstance(expr, Literal))
     self.assertEqual(123.456, expr.literal)      
     
     expr = ExpressionParser(Tokeniser(StringReader('True'))).get_expression()
     self.assertTrue(isinstance(expr, Literal))
     self.assertEqual(True, expr.literal)      
     
     expr = ExpressionParser(Tokeniser(StringReader('False'))).get_expression()
     self.assertTrue(isinstance(expr, Literal))
     self.assertEqual(False, expr.literal)      
     
     expr = ExpressionParser(Tokeniser(StringReader(Q+'literal string'+Q))).get_expression()
     self.assertTrue(isinstance(expr, Literal))
     self.assertEqual('literal string', expr.literal)      
 def test_path_with_filter_to_expr(self):      
     expr = ExpressionParser(Tokeniser(StringReader('items[id==$id]'))).get_expression()
     self.assertTrue(isinstance(expr, Field))
     self.assertEqual('items', expr.field)
     self.assertEqual(None, expr.list_index)
     self.assertNotEqual(None, expr.list_filter)
     self.assertTrue(isinstance(expr.list_filter, Filter))
     self.assertTrue(isinstance(expr.list_filter.filter, Equals))
     self.assertTrue(isinstance(expr.list_filter.filter.lhs, Field))
     self.assertEquals('id', expr.list_filter.filter.lhs.field)
     self.assertTrue(isinstance(expr.list_filter.filter.rhs, Parameter))
     self.assertEquals('id', expr.list_filter.filter.rhs.parameter)
    def test_not(self):

        tkns = Tokeniser(StringReader('!true'))

        t = tkns.get_next_token()
        self.assertNotEqual(None, t)
        self.assertEqual(tokeniser.TokenType.NOT, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.LITERAL_BOOLEAN, t.token_type)
        self.assertEqual(True, t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)
    def test_literal_string(self):
        tkns = Tokeniser(StringReader('"abc", "abc\\n\\t\\b\\"xyz"'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.LITERAL_STRING, t.token_type)
        self.assertEqual('abc', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.GROUP_SEPARATOR, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.LITERAL_STRING, t.token_type)
        self.assertEqual('abc\n\t\b"xyz', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)
    def test_literal_number(self):
        tkns = Tokeniser(StringReader('123, 123.456'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.LITERAL_INT, t.token_type)
        self.assertEqual(123, t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.GROUP_SEPARATOR, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.LITERAL_FLOAT, t.token_type)
        self.assertEqual(123.456, t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)
    def test_literal_boolean(self):
        tkns = Tokeniser(StringReader('true, false'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.LITERAL_BOOLEAN, t.token_type)
        self.assertEqual(True, t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.GROUP_SEPARATOR, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.LITERAL_BOOLEAN, t.token_type)
        self.assertEqual(False, t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)
    def test_parameter(self):

        tkns = Tokeniser(StringReader(' aaa == $bbb '))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.COMPARATOR_EQUAL, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.PARAMETER, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)
 def test_path_with_compound_filter_to_expr(self):      
     expr = ExpressionParser(Tokeniser(StringReader('items[(id==$id&total>1.234)]'))).get_expression()
     self.assertTrue(isinstance(expr, Field))
     self.assertEqual('items', expr.field)
     self.assertEqual(None, expr.list_index)
     self.assertNotEqual(None, expr.list_filter)
     self.assertTrue(isinstance(expr.list_filter, Filter))
     self.assertTrue(isinstance(expr.list_filter.filter, And))
     self.assertTrue(isinstance(expr.list_filter.filter.items[0].lhs, Field))
     self.assertEquals('id', expr.list_filter.filter.items[0].lhs.field)
     self.assertTrue(isinstance(expr.list_filter.filter.items[0].rhs, Parameter))
     self.assertEquals('id', expr.list_filter.filter.items[0].rhs.parameter)
     self.assertTrue(isinstance(expr.list_filter.filter.items[1], GreaterThan))
     self.assertTrue(isinstance(expr.list_filter.filter.items[1].lhs, Field))
     self.assertEquals('total', expr.list_filter.filter.items[1].lhs.field)
     self.assertTrue(isinstance(expr.list_filter.filter.items[1].rhs, Literal))
     self.assertEquals(1.234, expr.list_filter.filter.items[1].rhs.literal)
     self.assertEqual(2, len(expr.list_filter.filter.items))
    def test_or(self):
        tkns = Tokeniser(StringReader('(True|False)'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.START_GROUP, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.LITERAL_BOOLEAN, t.token_type)
        self.assertEqual(True, t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.OR_SEPARATOR, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.LITERAL_BOOLEAN, t.token_type)
        self.assertEqual(False, t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.END_GROUP, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(None, t)
 def test_path_to_expression(self):
     
     expr = ExpressionParser(Tokeniser(StringReader('children.children[id>2].name'))).get_expression()
     
     self.assertTrue(isinstance(expr, Path))
     self.assertEqual('name', expr.field)
     self.assertEqual(None, expr.list_index)
     self.assertEqual(None, expr.list_filter)
     self.assertTrue(isinstance(expr.path, Path))
     self.assertEqual('children', expr.path.field)
     self.assertEqual(None, expr.path.list_index)
     self.assertNotEqual(None, expr.path.list_filter)
     self.assertTrue(isinstance(expr.path.list_filter, Filter))
     self.assertTrue(isinstance(expr.path.list_filter.filter, GreaterThan))
     self.assertTrue(isinstance(expr.path.list_filter.filter.lhs, Field))
     self.assertEqual('id', expr.path.list_filter.filter.lhs.field)
     self.assertTrue(isinstance(expr.path.list_filter.filter.rhs, Literal))
     self.assertEqual(2, expr.path.list_filter.filter.rhs.literal)
     self.assertTrue(isinstance(expr.path.path, Field))
     self.assertEqual('children', expr.path.path.field)
     self.assertEqual(None, expr.path.path.list_index)
     self.assertEqual(None, expr.path.path.list_filter)
Exemple #28
0
    def test_parse_filter_in_path(self):
        p = ExpressionParser(Tokeniser(StringReader('aaa.bbb[ccc>123].ddd')))
        expr = p.get_expression()

        print(expr.to_path())

        self.assertTrue(isinstance(expr, Path))
        self.assertEqual('ddd', expr.field)
        self.assertEqual(None, expr.list_index)
        self.assertEqual(None, expr.list_filter)
        self.assertTrue(isinstance(expr.path, Path))
        self.assertEqual('bbb', expr.path.field)
        self.assertEqual(None, expr.path.list_index)
        self.assertNotEqual(None, expr.path.list_filter)
        self.assertTrue(isinstance(expr.path.list_filter, Filter))
        self.assertTrue(isinstance(expr.path.list_filter.filter, GreaterThan))
        self.assertTrue(isinstance(expr.path.list_filter.filter.lhs, Field))
        self.assertEqual('ccc', expr.path.list_filter.filter.lhs.field)
        self.assertTrue(isinstance(expr.path.list_filter.filter.rhs, Literal))
        self.assertEqual(123, expr.path.list_filter.filter.rhs.literal)
        self.assertTrue(isinstance(expr.path.path, Field))
        self.assertEqual('aaa', expr.path.path.field)
        self.assertEqual(None, expr.path.path.list_index)
        self.assertEqual(None, expr.path.path.list_filter)
    def test_filter_in_path(self):
        tkns = Tokeniser(StringReader('aaa.bbb[ccc>123].ddd'))
        t = tkns.get_next_token()
        self.assertEqual(TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(TokenType.PATH_SEPARATOR, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(TokenType.START_FILTER, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(TokenType.WORD, t.token_type)
        self.assertEqual('ccc', t.value)

        t = tkns.get_next_token()
        self.assertEqual(TokenType.COMPARATOR_GT, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(TokenType.LITERAL_INT, t.token_type)
        self.assertEqual(123, t.value)

        t = tkns.get_next_token()
        self.assertEqual(TokenType.END_FILTER, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(TokenType.PATH_SEPARATOR, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(TokenType.WORD, t.token_type)
        self.assertEqual('ddd', t.value)
Exemple #30
0
 def test_parse_field(self):
     p = ExpressionParser(Tokeniser(StringReader('aaa')))
     exp = p.get_expression()
     self.assertEqual('aaa', exp.field)