def __div__(self, other): ''' Filter matched with self through other function ''' if not other: parser = tools.skip(self.parser) else: if isinstance(other, basestring): parser = filters.pipe(self.parser, filters.repl(other)) else: parser = filters.pipe(self.parser, other) return Parser(parser)
def __eq__(self, func): ''' Match self only if other function permits so ''' checker = self._checker(func) parser = filters.pipe(self.parser, checker) return Parser(parser)
def quoted(char=anychar, quot=charclass('\'"'), esc=lit('\\')): charseq = seq(but(state.check), char) if esc: escseq = seq(skip(esc), alter(quot, esc)) charseq = alter(escseq, charseq) return wrap_parser('quoted')( surround(pipe(star(charseq), join), state.push(quot), state.pop) )
def braced(char=anychar, left=lit('('), right=lit(')'), esc=lit('\\')): charseq = seq(but(right), char) if esc: escseq = seq(skip(esc), alter(right, esc)) charseq = alter(escseq, charseq) return wrap_parser('braced')( surround(pipe(star(charseq), join), left, right) )
def __rshift__(self, other): ''' Wrap self into other (or compose self with other) ''' from greencss.lexer.parsers.tokens import TokenError def filter(token): try: return other(*token) except TokenError: return None parser = filters.pipe(self.parser, filter) return Parser(parser)
def __getslice__(self, first, last): parser = filters.pipe(self.parser, lambda t: t[first:last]) return Parser(parser)
def __getitem__(self, index): parser = filters.pipe(self.parser, filters.take(index)) return Parser(parser)
def __ne__(self, func): checker = self._checker(func) parser = filters.pipe(self.parser, lambda token: not checker(token)) return Parser(parser)