def parse(self): result = self.expression() if self.tokens.peek(0): token = self.tokens.peek(0) raise errors.EfilterParseError(message="Unexpected %s '%s' here." % (token.name, token.value), query=self.original, token=token) if result is None: raise errors.EfilterParseError(message="Query %r is empty." % self.original) return result
def atom(self): # Unary operator. if self.tokens.accept(grammar.prefix, self.operators): operator = self.tokens.matched.operator start = self.tokens.matched.start children = [self.expression(operator.precedence)] # Allow infix to be repeated in circumfix operators. if operator.infix: while self.tokens.accept(grammar.match_tokens(operator.infix)): children.append(self.expression()) # If we have a suffix expect it now. if operator.suffix: self.tokens.expect(grammar.match_tokens(operator.suffix)) return operator.handler(*children, start=start, end=self.tokens.matched.end, source=self.original) if self.tokens.accept(grammar.literal): return ast.Literal(self.tokens.matched.value, source=self.original, start=self.tokens.matched.start, end=self.tokens.matched.end) if self.tokens.accept(grammar.symbol): return ast.Var(self.tokens.matched.value, source=self.original, start=self.tokens.matched.start, end=self.tokens.matched.end) if self.tokens.accept(grammar.lparen): expr = self.expression() self.tokens.expect(grammar.rparen) return expr if self.tokens.peek(0): raise errors.EfilterParseError( message="Was not expecting %r here." % self.tokens.peek(0).name, token=self.tokens.peek(0)) else: raise errors.EfilterParseError("Unexpected end of input.")
def error(self, message=None, start_token=None, end_token=None): start = self.tokens.tokenizer.position end = start + 20 if start_token: start = start_token.start end = start_token.end if end_token: end = end_token.end raise errors.EfilterParseError( query=self.original, start=start, end=end, message=message, token=start_token)
def reject(self, f, *args): """Like 'match', but throw a parse error if 'f' matches. This is useful when a parser wants to be strict about specific things being prohibited. For example, DottySQL bans the use of SQL keywords as variable names. """ match = self.match(f, *args) if match: token = self.peek(0) raise errors.EfilterParseError( query=self.tokenizer.source, token=token, message="Was not expecting a %s here." % token.name)
def application(tokens): """Matches function call (application).""" tokens = iter(tokens) func = next(tokens) paren = next(tokens) if func and func.name == "symbol" and paren.name == "lparen": # We would be able to unambiguously parse function application with # whitespace between the function name and the lparen, but let's not # do that because it's unexpected in most languages. if func.end != paren.start: raise errors.EfilterParseError( start=func.start, end=paren.end, message="No whitespace allowed between function and paren.") return common.TokenMatch(None, func.value, (func, paren))
def expect(self, f, *args): """Like 'accept' but throws a parse error if 'f' doesn't match.""" match = self.accept(f, *args) if match: return match try: func_name = f.__name__ except AttributeError: func_name = "<unnamed grammar function>" start, end = self.current_position() raise errors.EfilterParseError(query=self.tokenizer.source, start=start, end=end, message="Was expecting %s here." % (func_name))
def operator(self, lhs, min_precedence): while self.tokens.accept(self._infix_of_min_precedence, min_precedence): operator = self.tokens.matched.operator if operator.prefix: raise ValueError("infix+prefix operators aren't supported.") if operator.suffix: rhs = self.expression() self.tokens.expect(grammar.match_tokens(operator.suffix)) rhs.end = self.tokens.matched.end else: rhs = self.atom() next_min_precedence = operator.precedence if operator.assoc == "left": next_min_precedence += 1 while self.tokens.match(grammar.infix, self.operators): if (self.tokens.matched.operator.precedence < next_min_precedence): break rhs = self.operator(rhs, self.tokens.matched.operator.precedence) if not rhs: raise errors.EfilterParseError( message="Expecting the operator RHS here.", token=self.tokens.peek(0)) lhs = operator.handler(lhs, rhs, start=lhs.start, end=rhs.end, source=self.original) return lhs
def error(self, message, start, end=None): """Print a nice error.""" raise errors.EfilterParseError( query=self.buffer, start=start, end=end, message=message)
def _error(self, message, start, end=None): """Raise a nice error, with the token highlighted.""" raise errors.EfilterParseError(source=self.source, start=start, end=end, message=message)