p[0] = p[1]
    else:
      p[0] = p[1].push('and', p[3])

  def p_value(self, p):
    '''value : OPERATOR string
             | string'''
    if len(p) == 2:
      p[0] = ValueNode(p[1])
    else:
      p[0] = ValueNode(p[2], comparison_operator=p[1])

  def p_string(self, p):
    '''string : WORD
              | STRING
              | STRING_PREFIX string'''
    if len(p) == 3:
      p[0] = p[1] + p[2]
    else:
      p[0] = p[1]

  def __call__(self, input, is_column, *args, **kw):
    self.isColumn = is_column
    try:
      return self.parse(input, *args, **kw)
    finally:
      self.isColumn = None

update_docstrings(AdvancedSearchTextParser)

Example #2
0
    def real_token(self):
        return lexer.token(self)

    def token(self):
        return self.token_list.pop(0)

    def __call__(self, input, is_column):
        self.isColumn = is_column
        try:
            self.found = False
            check_grammar = False
            self.token_list = token_list = []
            append = token_list.append
            self.input(input)
            while not self.found:
                token = self.real_token()
                append(token)
                if token is None:
                    break
                if token.type == 'OPERATOR':
                    check_grammar = True
            if not self.found and check_grammar:
                self.found = self.parse()
            return self.found
        finally:
            self.isColumn = None


update_docstrings(AdvancedSearchTextDetector)
Example #3
0
      p[0] = p[1].push('and', p[3])

  def p_value(self, p):
    '''value : OPERATOR string
             | OPERATOR NULL
             | string
             | NULL'''
    if len(p) == 2:
      p[0] = ValueNode(p[1])
    else:
      p[0] = ValueNode(p[2], comparison_operator=p[1])

  def p_string(self, p):
    '''string : WORD
              | STRING
              | STRING_PREFIX string'''
    if len(p) == 3:
      p[0] = p[1] + p[2]
    else:
      p[0] = p[1]

  def __call__(self, input, is_column, *args, **kw):
    self.isColumn = is_column
    try:
      return self.parse(input, *args, **kw)
    finally:
      self.isColumn = None

update_docstrings(AdvancedSearchTextParser)

  def real_token(self):
    return lexer.token(self)

  def token(self):
    return self.token_list.pop(0)

  def __call__(self, input, is_column):
    self.isColumn = is_column
    try:
      self.found = False
      check_grammar = False
      self.token_list = token_list = []
      append = token_list.append
      self.input(input)
      while not self.found:
        token = self.real_token()
        append(token)
        if token is None:
          break
        if token.type == 'OPERATOR':
          check_grammar = True
      if not self.found and check_grammar:
        self.found = self.parse()
      return self.found
    finally:
      self.isColumn = None

update_docstrings(AdvancedSearchTextDetector)