Beispiel #1
0
 def test_literal(self):
     text = 'Queue = composite-literal'
     lexer = Lexer(text)
     token = None
     while lexer.current_char is not None:
         token = lexer.get_next_token()
     self.assertEqual(token, Token(LITERAL, 'composite-literal'))
Beispiel #2
0
 def test_invalid_format_peek_is_None(self):
     text = '!'
     lexer = Lexer(text)
     with self.assertRaises(LexerError) as err:
         while lexer.current_char is not None:
             lexer.get_next_token()
     self.assertEqual(err.exception.args,
                      ('Parsing char `!` at position 0', ))
Beispiel #3
0
 def test_invalid_format(self):
     text = "%%%"
     lexer = Lexer(text)
     with self.assertRaises(LexerError) as err:
         while lexer.current_char is not None:
             lexer.get_next_token()
     self.assertEqual(err.exception.args,
                      ("Parsing char `%s` at position %i" % ('%', 0), ))
Beispiel #4
0
 def test_wrong_not_operation(self):
     text = "Status !% new"
     lexer = Lexer(text)
     with self.assertRaises(LexerError) as err:
         while lexer.current_char is not None:
             lexer.get_next_token()
     self.assertEqual(err.exception.args,
                      ('Parsing char `!` at position 7', ))
Beispiel #5
0
    def test_unary_expression(self):
        text = "Age > -10"
        qset = FilterQueryInterpreter(FilterParser(Lexer(text))).interpret()
        self.assertIsInstance(qset, Q)
        self.assertEqual(qset.resolve(), "(Age > -10)")

        text = "Age > --10"
        qset = FilterQueryInterpreter(FilterParser(Lexer(text))).interpret()
        self.assertIsInstance(qset, Q)
        self.assertEqual(qset.resolve(), "(Age > 10)")
Beispiel #6
0
    def test_parentheses(self):
        text = "(Status=new | Status=open)"
        lexer = Lexer(text)

        token = lexer.get_next_token()
        self.assertEqual(token, Token(LPAREN, '('))

        while lexer.current_char is not None:
            token = lexer.get_next_token()
        self.assertEqual(token, Token(RPAREN, ')'))
Beispiel #7
0
 def test_complex_with_string_literal(self):
     text = "Queue='complex-queue' & Status != resolved"
     lexer = Lexer(text)
     count = 0
     last_token = None
     while lexer.current_char is not None:
         last_token = lexer.get_next_token()
         count += 1
     self.assertEqual(count, 7)
     self.assertIsInstance(last_token, Token)
     self.assertEqual(last_token, Token(LITERAL, "resolved"))
Beispiel #8
0
    def test_simple(self):
        text = "Status=new"
        lexer = Lexer(text)
        token = lexer.get_next_token()
        self.assertEqual(token, Token(LITERAL, "Status"))

        token = lexer.get_next_token()
        self.assertEqual(token, Token(IS, "="))

        token = lexer.get_next_token()
        self.assertEqual(token, Token(LITERAL, "new"))
Beispiel #9
0
    def test_parser_error(self):
        with self.assertRaises(ParserError) as err:
            FilterParser(Lexer(">20=50")).parse()
        self.assertEqual(
            err.exception.args,
            ("Statement should start with Literal or StringLiteral, "
             "'Token(>, >)' given", ))

        with self.assertRaises(ParserError):
            FilterParser(Lexer("10 ~ text")).parse()

        with self.assertRaises(ParserError):
            FilterParser(Lexer("& ~ text")).parse()
Beispiel #10
0
    def test_simple_binary_operations(self):
        node = FilterParser(lexer=Lexer("Status='new'")).parse()
        self.assertIsInstance(node, BinOp)
        self.assertEqual(node.left.token, Token(LITERAL, "Status"))
        self.assertEqual(node.right.token, Token(STRING_LITERAL, "new"))
        self.assertEqual(node.op, Token(IS, "="))

        node = FilterParser(lexer=Lexer("Status=new")).parse()
        self.assertIsInstance(node, BinOp)
        self.assertEqual(node.right.token, Token(LITERAL, "new"))

        node = FilterParser(lexer=Lexer('Status=""')).parse()
        self.assertIsInstance(node, BinOp)
        self.assertEqual(node.right.token, Token(STRING_LITERAL, ''))
Beispiel #11
0
 def test_unary_operations(self):
     node = FilterParser(lexer=Lexer("Age=-10")).parse()
     self.assertIsInstance(node, BinOp)
     self.assertIsInstance(node.right, UnaryOp)
     self.assertIsInstance(node.right.expr, Num)
     self.assertEqual(node.right.expr.token, Token(INTEGER, 10))
     self.assertEqual(node.right.token, Token(MINUS, '-'))
Beispiel #12
0
 def test_complex_binary_expression(self):
     text = "(Status = 'new' & Owner = 'user') | Id > 10"
     qset = FilterQueryInterpreter(FilterParser(Lexer(text))).interpret()
     self.assertIsInstance(qset, Q)
     self.assertEqual(
         qset.resolve(),
         "(((Status = 'new') AND (Owner = 'user')) OR (Id > 10))")
Beispiel #13
0
 def test_unary_expression_literal(self):
     text = "Status > -open"
     with self.assertRaises(TypeError) as err:
         qset = FilterQueryInterpreter(FilterParser(
             Lexer(text))).interpret()
     self.assertEqual(err.exception.args,
                      ("bad operand type for unary -: 'str'", ))
Beispiel #14
0
    def test_wrong_unary_operation(self):
        text = "Age > -10"

        with mock.patch.object(operations, 'UNARY_OPERATIONS', {}):
            with self.assertRaises(InterpreterError) as err:
                FilterQueryInterpreter(FilterParser(Lexer(text))).interpret()

        self.assertEqual(err.exception.args,
                         ("Operation: '%s' is not supported" % consts.MINUS, ))
Beispiel #15
0
    def test_node_visitor(self):
        text = "Status = 'new'"
        ast = FilterParser(Lexer(text)).parse()
        self.assertIsInstance(ast, BinOp)

        base_visitor = NodeVisitor()
        with self.assertRaises(VisitError) as err:
            self.assertIsInstance(base_visitor.visit(ast), Q)
        self.assertEqual(err.exception.args,
                         ("No visit_%s method" % type(ast).__name__.lower(), ))
Beispiel #16
0
    def test_complex(self):
        text = "CF_Tags  ~  release-on-prod & Status != resolved"
        lexer = Lexer(text)
        token = lexer.get_next_token()
        self.assertEqual(token, Token(LITERAL, 'CF_Tags'))

        token = lexer.get_next_token()
        self.assertEqual(token, Token(MATCHES, '~'))

        token = lexer.get_next_token()
        self.assertEqual(token, Token(LITERAL, 'release-on-prod'))

        token = lexer.get_next_token()
        self.assertEqual(token, Token(AND, '&'))

        token = lexer.get_next_token()
        self.assertEqual(token, Token(LITERAL, 'Status'))

        token = lexer.get_next_token()
        self.assertEqual(token, Token(IS_NOT, '!='))

        token = lexer.get_next_token()
        self.assertEqual(token, Token(LITERAL, 'resolved'))
Beispiel #17
0
 def test_string_escaped_literal(self):
     text = r"'there\'s a text'"
     lexer = Lexer(text)
     token = lexer.get_next_token()
     self.assertEqual(token, Token(STRING_LITERAL, "there's a text"))
Beispiel #18
0
 def test_string_literal(self):
     text = '"new status for example"'
     lexer = Lexer(text)
     token = lexer.get_next_token()
     self.assertEqual(token, Token(STRING_LITERAL,
                                   "new status for example"))
Beispiel #19
0
 def test_complex(self):
     text = """
     (Queue = dev | Queue = devops) & 
     "CF.{Sprint}" = "2018-w6" & 
     (Owner ~ ad | Owner ~ dor)"""
     node = FilterParser(lexer=Lexer(text)).parse()
Beispiel #20
0
 def test_parser_error_in_composite(self):
     with self.assertRaises(ParserError) as err:
         FilterParser(Lexer("Status=&")).parse()
     self.assertEqual(err.exception.args,
                      ("Wrong statement right part: 'Token(&, &)'", ))
Beispiel #21
0
 def test_iter(self):
     text = 'Status ~ open & (Test ~ message | Owner = user)'
     tokens = [x for x in Lexer(text)]
     self.assertEqual(len(tokens), 13)
     self.assertEqual(tokens[0], Token(LITERAL, 'Status'))
     self.assertEqual(tokens[-1], Token(RPAREN, ')'))
Beispiel #22
0
 def test_parser_error_in_statement(self):
     with self.assertRaises(ParserError) as err:
         FilterParser(Lexer("Status&1")).parse()
     self.assertEqual(err.exception.args,
                      ("Wrong operation '&' for statement given", ))
Beispiel #23
0
 def test_binary_expression(self):
     text = "Status = 'new'"
     qset = FilterQueryInterpreter(FilterParser(Lexer(text))).interpret()
     self.assertIsInstance(qset, Q)
     self.assertEqual(qset.resolve(), "(Status = 'new')")
Beispiel #24
0
 def test_blank_string_literal(self):
     text = '""'
     lexer = Lexer(text)
     token = lexer.get_next_token()
     self.assertEqual(token, Token(STRING_LITERAL, ''))