def param_repr(p): if isinstance(p, (int, float)): return ustr(p) elif isinstance(p, strtype) and p.isalnum(): return ustr(p) else: return urepr(p)
def _to_str(self, lean=False): regex_directives = {'comments', 'eol_comments', 'whitespace'} ustr_directives = {'comments', 'grammar'} string_directives = {'namechars'} directives = '' for directive, value in self.directives.items(): fmt = dict( name=directive, frame='/' if directive in regex_directives else '', value=( urepr(value) if directive in string_directives else ustr(value) if directive in ustr_directives else value ), ) directives += '@@{name} :: {frame}{value}{frame}\n'.format(**fmt) if directives: directives += '\n' keywords = '\n'.join( '@@keyword :: ' + ' '.join(urepr(k) for k in c if k is not None) for c in chunks(sorted(self.keywords), 8) ).strip() keywords = '\n\n' + keywords + '\n' if keywords else '' rules = ( '\n\n'.join(ustr(rule._to_str(lean=lean)) for rule in self.rules) ).rstrip() + '\n' return directives + keywords + rules
def _invoke_rule(self, ruleinfo, key): memo = self._memo_for(key) if isinstance(memo, Exception): raise memo elif memo: return memo self._set_left_recursion_guard(key) self._push_ast() try: try: self._next_token(ruleinfo) ruleinfo.impl(self) node = self._get_node(key.pos, ruleinfo) node = self._invoke_semantic_rule(ruleinfo, node) result = self._mkresult(node) self._memoize(key, result) return result except FailedSemantics as e: self._error(ustr(e), FailedParse) finally: self._pop_ast() except FailedParse as e: self._memoize(key, e) self._goto(key.pos) raise
def _to_str(self, lean=False): ssep = self.sep._to_str(lean=lean) sexp = ustr(self.exp._to_str(lean=lean)) if len(sexp.splitlines()) <= 1: return '%s%s{%s}' % (ssep, self.JOINOP, sexp) else: return '%s%s{\n%s\n}' % (ssep, self.JOINOP, sexp)
def _to_str(self, lean=False): exp = ustr(self.exp._to_str(lean=lean)) template = '[%s]' if isinstance(self.exp, Choice): template = trim(self.str_template) elif isinstance(self.exp, Group): exp = self.exp.exp return template % exp
def _to_str(self, lean=False): comments = self.comments_str() seq = [ustr(s._to_str(lean=lean)) for s in self.sequence] single = ' '.join(seq) if len(single) <= PEP8_LLEN and len(single.splitlines()) <= 1: return comments + single else: return comments + '\n'.join(seq)
def _to_str(self, lean=False): parts = [] for pat in (ustr(p) for p in self.patterns): template = '/%s/' if '/' in pat: template = '?"%s"' pat = pat.replace('"', r'\"') parts.append(template % pat) return '\n+ '.join(parts)
def parse(self, ctx): with ctx._choice(): for o in self.options: with ctx._option(): ctx.last_node = o.parse(ctx) return ctx.last_node lookahead = ' '.join(ustr(urepr(f[0])) for f in self.lookahead() if str(f)) if lookahead: ctx._error('expecting one of {%s}' % lookahead) ctx._error('no available options')
def _to_str(self, lean=False): options = [ustr(o._to_str(lean=lean)) for o in self.options] multi = any(len(o.splitlines()) > 1 for o in options) single = ' | '.join(o for o in options) if multi: return '\n|\n'.join(indent(o) for o in options) elif len(options) and len(single) > PEP8_LLEN: return '| ' + '\n| '.join(o for o in options) else: return single
def render(item, join='', **fields): """ Render the given item """ if item is None: return '' elif isinstance(item, strtype): return item elif isinstance(item, Renderer): return item.render(join=join, **fields) elif isiter(item): return join.join(render(e, **fields) for e in iter(item) if e is not None) elif isinstance(item, (int, float)): return item else: return ustr(item)
def __init__(self, text, filename=None, whitespace=None, comments_re=None, eol_comments_re=None, ignorecase=False, nameguard=None, comment_recovery=False, namechars='', **kwargs): text = ustr(text) self.text = self.original_text = text self.filename = filename or '' self.whitespace = whitespace self.comments_re = comments_re self.eol_comments_re = eol_comments_re self.ignorecase = ignorecase self.nameguard = (nameguard if nameguard is not None else bool(self.whitespace_re)) self.comment_recovery = comment_recovery self.namechars = namechars self._namechar_set = set(namechars) if namechars: self.nameguard = True self._pos = 0 self._len = 0 self._linecount = 0 self._lines = [] self._line_index = [] self._line_cache = [] self._comment_index = [] self._re_cache = {} self._preprocess() self._postprocess()
def _to_str(self, lean=False): comments = self.comments_str() if lean: params = '' else: params = ', '.join( self.param_repr(p) for p in self.params ) if self.params else '' kwparams = '' if self.kwparams: kwparams = ', '.join( '%s=%s' % (k, self.param_repr(v)) for (k, v) in self.kwparams.items() ) if params and kwparams: params = '(%s, %s)' % (params, kwparams) elif kwparams: params = '(%s)' % (kwparams) elif params: if len(self.params) == 1: params = '::%s' % params else: params = '(%s)' % params base = ' < %s' % ustr(self.base.name) if self.base else '' return trim(self.str_template).format( name=self.name, base=base, params=params, exp=indent(self.exp._to_str(lean=lean)), comments=comments, is_name='@name\n' if self.is_name else '', )
def _to_str(self, lean=False): return '!' + ustr(self.exp._to_str(lean=lean))
def _check_name(self): name = ustr(self.last_node) if self.ignorecase or self._buffer.ignorecase: name = name.upper() if name in self.keywords: raise FailedKeywordSemantics('"%s" is a reserved word' % name)
def render_fields(self, fields): lines = '\n'.join('# %s' % ustr(c) for c in self.node.comment.splitlines()) fields.update(lines=lines)
def _trace(self, msg, *params, **kwargs): if self.trace: msg = msg % params info(ustr(msg), file=sys.stderr)
def _to_ustr(self, lean=False): return ustr(self._to_str(lean=lean))
def _to_str(self, lean=False): sexp = ustr(self.exp._to_str(lean=lean)) if len(sexp.splitlines()) <= 1: return '{%s}' % sexp else: return '{\n%s\n}' % indent(sexp)
def __init__(self, rule): assert isinstance(rule, Rule), ustr(rule.name) super(RuleInclude, self).__init__(rule.exp) self.rule = rule
def param_repr(p): if isinstance(p, (int, float)): return ustr(p) else: return urepr(p.split(BASE_CLASS_TOKEN)[0])
def _to_str(self, lean=False): if lean: return self.exp._to_ustr(lean=True) return '%s+:%s' % (self.name, ustr(self.exp._to_str(lean=lean)))
def setUp(self): testfile = os.path.splitext(__file__)[0] + '.py' with open(testfile, encoding='utf-8') as f: self.text = ustr(f.read()) self.buf = Buffer(self.text, whitespace='')
def test_36_param_combinations(self): def assert_equal(target, value): self.assertEqual(target, value) class TC36Semantics(object): """Check all rule parameters for expected types and values""" def rule_positional(self, ast, p1, p2, p3, p4): assert_equal("ABC", p1) assert_equal(123, p2) assert_equal('=', p3) assert_equal("+", p4) return ast def rule_keyword(self, ast, k1, k2, k3, k4): assert_equal("ABC", k1) assert_equal(123, k2) assert_equal('=', k3) assert_equal('+', k4) return ast def rule_all(self, ast, p1, p2, p3, p4, k1, k2, k3, k4): assert_equal("DEF", p1) assert_equal(456, p2) assert_equal('=', p3) assert_equal("+", p4) assert_equal("HIJ", k1) assert_equal(789, k2) assert_equal('=', k3) assert_equal('+', k4) return ast grammar = ''' @@ignorecase::False @@nameguard start = {rule_positional | rule_keywords | rule_all} $ ; rule_positional('ABC', 123, '=', '+') = 'a' ; rule_keywords(k1=ABC, k3='=', k4='+', k2=123) = 'b' ; rule_all('DEF', 456, '=', '+', k1=HIJ, k3='=', k4='+', k2=789) = 'c' ; ''' pretty = ''' @@ignorecase :: False @@nameguard :: True start = {rule_positional | rule_keywords | rule_all} $ ; rule_positional(ABC, 123, '=', '+') = 'a' ; rule_keywords(k1=ABC, k3='=', k4='+', k2=123) = 'b' ; rule_all(DEF, 456, '=', '+', k1=HIJ, k3='=', k4='+', k2=789) = 'c' ; ''' model = compile(grammar, 'RuleArguments') self.assertEqual(trim(pretty), ustr(model)) model = compile(pretty, 'RuleArguments') ast = model.parse("a b c") self.assertEqual(['a', 'b', 'c'], ast) semantics = TC36Semantics() ast = model.parse("a b c", semantics=semantics) self.assertEqual(['a', 'b', 'c'], ast) codegen(model)