def lex(): """ return a Lexer """ compiled_tokens = {} regexs = [] import sys all_vars = sys._getframe(1).f_locals if 'tokens' not in all_vars: raise NotImplementedError( 'Lex need variable `tokens` but not defined' ) tokens = all_vars['tokens'] if not hasattr(tokens, '__iter__'): raise TypeError( 'Lex expected variable `tokens` to be iterable' ) for token in tokens: if not token.isupper(): raise SyntaxError( 'token `%s` is not uppercase' % token ) if tokens.count(token) > 1: raise SyntaxWarning( 'declared token `%s` %d times' % \ (token, tokens.count(token)) ) func_name = 't_' + token if func_name not in all_vars: raise NotImplementedError( 'declared token `%s` but not define `%s`' % \ (token, func_name) ) func = all_vars[func_name] if type(func) is str: all_vars[func_name] = lambda t : t all_vars[func_name].__doc__ = func func = all_vars[func_name] import yare try: compiled_tokens[token] = (yare.compile(func.__doc__), func) except SyntaxError, e: raise SyntaxError( 'regular expression `%s` specified' % func.__doc__ + \ 'in function `%s` not valid. Detail: %s' % (func_name, e) ) regexs.append(func.__doc__)
def lex(): """ return a Lexer """ compiled_tokens = {} regexs = [] import sys all_vars = sys._getframe(1).f_locals if 'tokens' not in all_vars: raise NotImplementedError('Lex need variable `tokens` but not defined') tokens = all_vars['tokens'] if not hasattr(tokens, '__iter__'): raise TypeError('Lex expected variable `tokens` to be iterable') for token in tokens: if not token.isupper(): raise SyntaxError('token `%s` is not uppercase' % token) if tokens.count(token) > 1: raise SyntaxWarning( 'declared token `%s` %d times' % \ (token, tokens.count(token)) ) func_name = 't_' + token if func_name not in all_vars: raise NotImplementedError( 'declared token `%s` but not define `%s`' % \ (token, func_name) ) func = all_vars[func_name] if type(func) is str: all_vars[func_name] = lambda t: t all_vars[func_name].__doc__ = func func = all_vars[func_name] import yare try: compiled_tokens[token] = (yare.compile(func.__doc__), func) except SyntaxError, e: raise SyntaxError( 'regular expression `%s` specified' % func.__doc__ + \ 'in function `%s` not valid. Detail: %s' % (func_name, e) ) regexs.append(func.__doc__)
if not token.isupper(): raise SyntaxError( 'token `%s` is not uppercase' % token ) if tokens.count(token) > 1: raise SyntaxWarning( 'declared token `%s` %d times' % \ (token, tokens.count(token)) ) func_name = 't_' + token if func_name not in all_vars: raise NotImplementedError( 'declared token `%s` but not define `%s`' % \ (token, func_name) ) func = all_vars[func_name] if type(func) is str: all_vars[func_name] = lambda t : t all_vars[func_name].__doc__ = func func = all_vars[func_name] import yare try: compiled_tokens[token] = (yare.compile(func.__doc__), func) except SyntaxError, e: raise SyntaxError( 'regular expression `%s` specified' % func.__doc__ + \ 'in function `%s` not valid. Detail: %s' % (func_name, e) ) regexs.append(func.__doc__) return Lexer(compiled_tokens, tokens, yare.compile(yare.select(regexs)))
def setUp(self): """compile a regex""" self.__regex__ = yare.compile('a*')
def setUp(self): """compile a regex""" self.__regex__ = yare.compile('(a|b)*a(a|b)(a|b)(a|b)')
def setUp(self): """compile a regex""" self.__regex__ = yare.compile('a(\*b|c)d')
#!/usr/bin/env python # coding:utf-8 """demo for package pyre""" import yare regex = yare.compile("(a|b)*a(a|b)") print regex.match("aa") # >>> True print yare.match(regex, "aab") # >>> True
raise TypeError('Lex expected variable `tokens` to be iterable') for token in tokens: if not token.isupper(): raise SyntaxError('token `%s` is not uppercase' % token) if tokens.count(token) > 1: raise SyntaxWarning( 'declared token `%s` %d times' % \ (token, tokens.count(token)) ) func_name = 't_' + token if func_name not in all_vars: raise NotImplementedError( 'declared token `%s` but not define `%s`' % \ (token, func_name) ) func = all_vars[func_name] if type(func) is str: all_vars[func_name] = lambda t: t all_vars[func_name].__doc__ = func func = all_vars[func_name] import yare try: compiled_tokens[token] = (yare.compile(func.__doc__), func) except SyntaxError, e: raise SyntaxError( 'regular expression `%s` specified' % func.__doc__ + \ 'in function `%s` not valid. Detail: %s' % (func_name, e) ) regexs.append(func.__doc__) return Lexer(compiled_tokens, tokens, yare.compile(yare.select(regexs)))