コード例 #1
0
    def test_path(self):

        tkns = Tokeniser(StringReader('aaa.bbb.ccc.ddd'))

        t = tkns.get_next_token()
        self.assertNotEqual(None, t)
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.PATH_SEPARATOR, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.PATH_SEPARATOR, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('ccc', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.PATH_SEPARATOR, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('ddd', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)
コード例 #2
0
 def test_literal_string_2(self):
     tkns = Tokeniser(
         StringReader(TokenType.QUOTE.value + 'literal string' +
                      TokenType.QUOTE.value))
     t = tkns.get_next_token()
     while t:
         print(t)
         t = tkns.get_next_token()
コード例 #3
0
    def test_not(self):

        tkns = Tokeniser(StringReader('!true'))

        t = tkns.get_next_token()
        self.assertNotEqual(None, t)
        self.assertEqual(tokeniser.TokenType.NOT, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.LITERAL_BOOLEAN, t.token_type)
        self.assertEqual(True, t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)
コード例 #4
0
 def test_parse_equals(self):
     p = ExpressionParser(Tokeniser(StringReader('aaa == bbb')))
     exp = p.get_expression()
     self.assertNotEqual(None, exp)
     self.assertTrue(isinstance(exp, Equals))
     self.assertTrue(isinstance(exp.lhs, Field))
     self.assertEqual('aaa', exp.lhs.field)
     self.assertTrue(isinstance(exp.rhs, Field))
     self.assertEqual('bbb', exp.rhs.field)
コード例 #5
0
 def test_parse_or(self):
     p = ExpressionParser(Tokeniser(StringReader('(True|False)')))
     exp = p.get_expression()
     self.assertNotEqual(None, exp)
     self.assertTrue(isinstance(exp, Or))
     self.assertEqual(2, len(exp.items))
     self.assertTrue(isinstance(exp.items[0], Literal))
     self.assertEqual(True, exp.items[0].literal)
     self.assertTrue(isinstance(exp.items[1], Literal))
     self.assertEqual(False, exp.items[1].literal)
コード例 #6
0
 def test_parse_path(self):
     p = ExpressionParser(Tokeniser(StringReader('aaa.bbb.ccc')))
     exp = p.get_expression()
     self.assertNotEqual(None, exp)
     self.assertTrue(isinstance(exp, Path))
     self.assertEqual('ccc', exp.field)
     self.assertTrue(isinstance(exp.path, Path))
     self.assertEqual('bbb', exp.path.field)
     self.assertTrue(isinstance(exp.path.path, Field))
     self.assertEqual('aaa', exp.path.path.field)
コード例 #7
0
    def test_or(self):
        tkns = Tokeniser(StringReader('(True|False)'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.START_GROUP, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.LITERAL_BOOLEAN, t.token_type)
        self.assertEqual(True, t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.OR_SEPARATOR, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.LITERAL_BOOLEAN, t.token_type)
        self.assertEqual(False, t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.END_GROUP, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(None, t)
コード例 #8
0
    def test_literal_string(self):
        tkns = Tokeniser(StringReader('"abc", "abc\\n\\t\\b\\"xyz"'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.LITERAL_STRING, t.token_type)
        self.assertEqual('abc', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.GROUP_SEPARATOR, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.LITERAL_STRING, t.token_type)
        self.assertEqual('abc\n\t\b"xyz', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)
コード例 #9
0
    def test_literal_number(self):
        tkns = Tokeniser(StringReader('123, 123.456'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.LITERAL_INT, t.token_type)
        self.assertEqual(123, t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.GROUP_SEPARATOR, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.LITERAL_FLOAT, t.token_type)
        self.assertEqual(123.456, t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)
コード例 #10
0
    def test_literal_boolean(self):
        tkns = Tokeniser(StringReader('true, false'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.LITERAL_BOOLEAN, t.token_type)
        self.assertEqual(True, t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.GROUP_SEPARATOR, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.LITERAL_BOOLEAN, t.token_type)
        self.assertEqual(False, t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)
コード例 #11
0
    def test_parameter(self):

        tkns = Tokeniser(StringReader(' aaa == $bbb '))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.COMPARATOR_EQUAL, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.PARAMETER, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)
コード例 #12
0
    def test_parse_filter_in_path(self):
        p = ExpressionParser(Tokeniser(StringReader('aaa.bbb[ccc>123].ddd')))
        expr = p.get_expression()

        print(expr.to_path())

        self.assertTrue(isinstance(expr, Path))
        self.assertEqual('ddd', expr.field)
        self.assertEqual(None, expr.list_index)
        self.assertEqual(None, expr.list_filter)
        self.assertTrue(isinstance(expr.path, Path))
        self.assertEqual('bbb', expr.path.field)
        self.assertEqual(None, expr.path.list_index)
        self.assertNotEqual(None, expr.path.list_filter)
        self.assertTrue(isinstance(expr.path.list_filter, Filter))
        self.assertTrue(isinstance(expr.path.list_filter.filter, GreaterThan))
        self.assertTrue(isinstance(expr.path.list_filter.filter.lhs, Field))
        self.assertEqual('ccc', expr.path.list_filter.filter.lhs.field)
        self.assertTrue(isinstance(expr.path.list_filter.filter.rhs, Literal))
        self.assertEqual(123, expr.path.list_filter.filter.rhs.literal)
        self.assertTrue(isinstance(expr.path.path, Field))
        self.assertEqual('aaa', expr.path.path.field)
        self.assertEqual(None, expr.path.path.list_index)
        self.assertEqual(None, expr.path.path.list_filter)
コード例 #13
0
 def test_parse_field(self):
     p = ExpressionParser(Tokeniser(StringReader('aaa')))
     exp = p.get_expression()
     self.assertEqual('aaa', exp.field)
コード例 #14
0
    def test_divide(self):
        tkns = Tokeniser(StringReader('aaa/bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.OPERATOR_DIVIDE, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa /bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.OPERATOR_DIVIDE, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa/ bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.OPERATOR_DIVIDE, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa / bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.OPERATOR_DIVIDE, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)
コード例 #15
0
 def test_get_next_token(self):
     tkns = Tokeniser(StringReader('aaa.bbb.ccc.ddd'))
     t = tkns.get_next_token()
     while t:
         print('Token type: %s Value: %s' % (t.token_type, t.value))
         t = tkns.get_next_token()
コード例 #16
0
 def test_new_parser(self):
     p = ExpressionParser(Tokeniser(StringReader('aaa.bbb.ccc')))
     self.assertNotEqual(None, p)
コード例 #17
0
    def test_comparator_greater_than(self):
        tkns = Tokeniser(StringReader('aaa>bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.COMPARATOR_GT, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa >bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.COMPARATOR_GT, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa> bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.COMPARATOR_GT, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa > bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.COMPARATOR_GT, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)
コード例 #18
0
    def test_comparator_less_than_or_equal(self):
        tkns = Tokeniser(StringReader('aaa<=bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.COMPARATOR_LE, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa <=bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.COMPARATOR_LE, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa<= bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.COMPARATOR_LE, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa <= bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.COMPARATOR_LE, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)
コード例 #19
0
    def test_minus(self):
        tkns = Tokeniser(StringReader('aaa-bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.OPERATOR_MINUS, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa -bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.OPERATOR_MINUS, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa- bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.OPERATOR_MINUS, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa - bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.OPERATOR_MINUS, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)
コード例 #20
0
    def test_multiply(self):
        tkns = Tokeniser(StringReader('aaa*bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.OPERATOR_MULTIPLY, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa *bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.OPERATOR_MULTIPLY, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa* bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.OPERATOR_MULTIPLY, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader('aaa * bbb'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.OPERATOR_MULTIPLY, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(None, t)
コード例 #21
0
    def test_filter(self):
        tkns = Tokeniser(StringReader('aaa[bbb==123&ccc!="xyz"]'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.START_FILTER, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.COMPARATOR_EQUAL, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.LITERAL_INT, t.token_type)
        self.assertEqual(123, t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.AND_SEPARATOR, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('ccc', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.COMPARATOR_NE, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.LITERAL_STRING, t.token_type)
        self.assertEqual('xyz', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.END_FILTER, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader(' aaa [ bbb == 123 & ccc != "xyz" ] '))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.START_FILTER, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.COMPARATOR_EQUAL, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.LITERAL_INT, t.token_type)
        self.assertEqual(123, t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.AND_SEPARATOR, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('ccc', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.COMPARATOR_NE, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.LITERAL_STRING, t.token_type)
        self.assertEqual('xyz', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.END_FILTER, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(None, t)
コード例 #22
0
    def test_group(self):
        tkns = Tokeniser(StringReader('aaa(bbb,ccc,ddd)'))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.START_GROUP, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.GROUP_SEPARATOR, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('ccc', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.GROUP_SEPARATOR, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('ddd', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.END_GROUP, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(None, t)

        tkns = Tokeniser(StringReader(' aaa ( bbb , ccc , ddd ) '))
        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.START_GROUP, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.GROUP_SEPARATOR, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('ccc', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.GROUP_SEPARATOR, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.WORD, t.token_type)
        self.assertEqual('ddd', t.value)

        t = tkns.get_next_token()
        self.assertEqual(tokeniser.TokenType.END_GROUP, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(None, t)
コード例 #23
0
    def test_filter_in_path(self):
        tkns = Tokeniser(StringReader('aaa.bbb[ccc>123].ddd'))
        t = tkns.get_next_token()
        self.assertEqual(TokenType.WORD, t.token_type)
        self.assertEqual('aaa', t.value)

        t = tkns.get_next_token()
        self.assertEqual(TokenType.PATH_SEPARATOR, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(TokenType.WORD, t.token_type)
        self.assertEqual('bbb', t.value)

        t = tkns.get_next_token()
        self.assertEqual(TokenType.START_FILTER, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(TokenType.WORD, t.token_type)
        self.assertEqual('ccc', t.value)

        t = tkns.get_next_token()
        self.assertEqual(TokenType.COMPARATOR_GT, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(TokenType.LITERAL_INT, t.token_type)
        self.assertEqual(123, t.value)

        t = tkns.get_next_token()
        self.assertEqual(TokenType.END_FILTER, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(TokenType.PATH_SEPARATOR, t.token_type)

        t = tkns.get_next_token()
        self.assertEqual(TokenType.WORD, t.token_type)
        self.assertEqual('ddd', t.value)