def render_fields(self, fields): self.reset_counter() params = kwparams = '' if self.node.params: params = ', '.join( repr(ustr(self.rend(p))) for p in self.node.params) if self.node.kwparams: kwparams = ', '.join('%s=%s' % (k, ustr(self.rend(v))) for k, v in self.kwparams) if params and kwparams: params = params + ', ' + kwparams elif kwparams: params = kwparams fields.update(params=params) defines = compress_seq(self.defines()) sdefs = [d for d, l in defines if not l] ldefs = [d for d, l in defines if l] sdefs = set(sdefs) ldefs = set(ldefs) - sdefs if not (sdefs or ldefs): sdefines = 'AstPtr ast = std::make_shared<Ast>();' else: sdefines = "AstPtr ast = std::make_shared<Ast>\n (AstMap({\n " elements = ['{ "%s" , AST_DEFAULT }' % d for d in sdefs] elements += ['{ "%s" , AST_FORCELIST }' % d for d in ldefs] sdefines += ",\n ".join(elements) sdefines += "\n }));" fields.update(defines=sdefines)
def _to_str(self, lean=False): regex_directives = {'comments', 'eol_comments', 'whitespace'} ustr_directives = {'comments', 'grammar'} string_directives = {'namechars'} directives = '' for directive, value in self.directives.items(): fmt = dict( name=directive, frame='/' if directive in regex_directives else '', value=( urepr(value) if directive in string_directives else ustr(value) if directive in ustr_directives else value ), ) directives += '@@{name} :: {frame}{value}{frame}\n'.format(**fmt) if directives: directives += '\n' keywords = '\n'.join( '@@keyword :: ' + ' '.join(urepr(k) for k in c if k is not None) for c in chunks(sorted(self.keywords), 8) ).strip() keywords = '\n\n' + keywords + '\n' if keywords else '' rules = ( '\n\n'.join(ustr(rule._to_str(lean=lean)) for rule in self.rules) ).rstrip() + '\n' return directives + keywords + rules
def param_repr(p): if isinstance(p, (int, float)): return ustr(p) elif isinstance(p, strtype) and p.isalnum(): return ustr(p) else: return urepr(p)
def __str__(self): regex_directives = {'comments', 'eol_comments', 'whitespace'} ustr_directives = {'comments', 'grammar'} string_directives = {'namechars'} directives = '' for directive, value in self.directives.items(): fmt = dict( name=directive, frame='/' if directive in regex_directives else '', value=(urepr(value) if directive in string_directives else ustr(value) if directive in ustr_directives else value), ) directives += '@@{name} :: {frame}{value}{frame}\n'.format(**fmt) if directives: directives += '\n' keywords = '\n'.join( '@@keyword :: ' + ' '.join(urepr(k) for k in c if k is not None) for c in chunks(sorted(self.keywords), 8)).strip() keywords = '\n\n' + keywords + '\n' if keywords else '' rules = ('\n\n'.join(ustr(rule) for rule in self.rules)).rstrip() + '\n' return directives + keywords + rules
def test_patterns_with_newlines(self): grammar = ''' start = blanklines $ ; blanklines = blankline [blanklines] ; blankline = /^[^\n]*\n?$/ ; blankline2 = ?/^[^\n]*\n?$/? ; ''' model = genmodel("test", grammar) ast = model.parse('\n\n') self.assertEqual('', ustr(ast))
def _to_str(self, lean=False): ssep = self.sep._to_str(lean=lean) sexp = ustr(self.exp._to_str(lean=lean)) if len(sexp.splitlines()) <= 1: return '%s%s{%s}' % (ssep, self.JOINOP, sexp) else: return '%s%s{\n%s\n}' % (ssep, self.JOINOP, sexp)
def test_numbers_and_unicode(self): grammar = ''' rúle(1, -23, 4.56, 7.89e-11, 0xABCDEF, Añez) = 'a' ; ''' rule2 = ''' rulé::Añez = '\\xf1' ; ''' rule3 = ''' rúlé::Añez = 'ñ' ; ''' if PY3: grammar += rule3 else: grammar += rule2 model = genmodel("test", grammar) self.assertEquals(trim(grammar), ustr(model))
def __str__(self): comments = self.comments_str() params = ', '.join(self.param_repr(p) for p in self.params) if self.params else '' kwparams = '' if self.kwparams: kwparams = ', '.join('%s=%s' % (k, self.param_repr(v)) for (k, v) in self.kwparams.items()) if params and kwparams: params = '(%s, %s)' % (params, kwparams) elif kwparams: params = '(%s)' % (kwparams) elif params: if len(self.params) == 1: params = '::%s' % params else: params = '(%s)' % params base = ' < %s' % ustr(self.base.name) if self.base else '' return trim(self.str_template).format( name=self.name, base=base, params=params, exp=indent(str(self.exp)), comments=comments, is_name='@name\n' if self.is_name else '', )
def __str__(self): ssep = str(self.sep) sexp = ustr(self.exp) if len(sexp.splitlines()) <= 1: return '%s.{%s}' % (ssep, sexp) else: return '%s.{\n%s\n}' % (ssep, sexp)
def __str__(self): comments = self.comments_str() params = ', '.join( self.param_repr(p) for p in self.params ) if self.params else '' kwparams = '' if self.kwparams: kwparams = ', '.join( '%s=%s' % (k, self.param_repr(v)) for (k, v) in self.kwparams.items() ) if params and kwparams: params = '(%s, %s)' % (params, kwparams) elif kwparams: params = '(%s)' % (kwparams) elif params: if len(self.params) == 1: params = '::%s' % params else: params = '(%s)' % params base = ' < %s' % ustr(self.base.name) if self.base else '' return trim(self.str_template).format( name=self.name, base=base, params=params, exp=indent(str(self.exp)), comments=comments )
def __str__(self): comments = self.comments_str() seq = [ustr(s) for s in self.sequence] single = ' '.join(seq) if len(single) <= PEP8_LLEN and len(single.splitlines()) <= 1: return comments + single else: return comments + '\n'.join(seq)
def _to_str(self, lean=False): comments = self.comments_str() seq = [ustr(s._to_str(lean=lean)) for s in self.sequence] single = ' '.join(seq) if len(single) <= PEP8_LLEN and len(single.splitlines()) <= 1: return comments + single else: return comments + '\n'.join(seq)
def __str__(self): exp = ustr(self.exp) template = '[%s]' if isinstance(self.exp, Choice): template = trim(self.str_template) elif isinstance(self.exp, Group): exp = self.exp.exp return template % exp
def _to_str(self, lean=False): exp = ustr(self.exp._to_str(lean=lean)) template = '[%s]' if isinstance(self.exp, Choice): template = trim(self.str_template) elif isinstance(self.exp, Group): exp = self.exp.exp return template % exp
def test_36_params_and_keyword_params(self): grammar = ''' rule(A, kwdB=B) = 'a' ; ''' model = genmodel("test", grammar) self.assertEquals(trim(grammar), ustr(model))
def _to_str(self, lean=False): parts = [] for pat in (ustr(p) for p in self.patterns): template = '/%s/' if '/' in pat: template = '?"%s"' pat = pat.replace('"', r'\"') parts.append(template % pat) return '\n+ '.join(parts)
def test_36_params_and_keyword_params(self): grammar = ''' rule(A, kwdB=B) = 'a' ; ''' model = compile(grammar, "test") self.assertEqual(trim(grammar), ustr(model))
def __str__(self): pattern = ustr(self.pattern) if '/' not in pattern: template = '/%s/' return template % pattern else: template = '?/%s/?' result = template % pattern if result.count('?') % 2: result += '?' # for the VIM syntax return result
def parse(self, ctx): with ctx._choice(): for o in self.options: with ctx._option(): ctx.last_node = o.parse(ctx) return ctx.last_node lookahead = ' '.join(ustr(urepr(f[0])) for f in self.lookahead if f) if lookahead: ctx._error('expecting one of {%s}' % lookahead) ctx._error('no available options')
def parse(self, ctx): with ctx._choice(): for o in self.options: with ctx._option(): ctx.last_node = o.parse(ctx) return ctx.last_node lookahead = ' '.join( ustr(urepr(f[0])) for f in self.lookahead if f) if lookahead: ctx._error('expecting one of {%s}' % lookahead) ctx._error('no available options')
def __str__(self): options = [ustr(o) for o in self.options] multi = any(len(o.splitlines()) > 1 for o in options) single = ' | '.join(o for o in options) if multi: return '\n|\n'.join(indent(o) for o in options) elif len(options) and len(single) > PEP8_LLEN: return ' ' + '\n| '.join(o for o in options) else: return single
def __str__(self): directives = '' if 'comments' in self.directives: directives += '@@comments :: /%s/\n' % ustr(self.directives['comments']) if 'ignorecase' in self.directives: directives += '@@ignorecase :: %s\n' % self.directives['ignorecase'] if 'left_recursion' in self.directives: directives += '@@left_recursion :: %s\n' % self.directives['left_recursion'] if 'nameguard' in self.directives: directives += '@@nameguard :: %s\n' % self.directives['nameguard'] if 'eol_comments' in self.directives: directives += '@@eol_comments :: /%s/\n' % self.directives['eol_comments'] if 'whitespace' in self.directives: directives += '@@whitespace :: /%s/\n' % self.directives['whitespace'] directives = directives + '\n' if directives else '' rules = ( '\n\n'.join(ustr(rule) for rule in self.rules) ).rstrip() + '\n' return directives + rules
def render_fields(self, fields): self.reset_counter() params = kwparams = '' if self.node.params: params = ', '.join(repr( ustr(self.rend(p))) for p in self.node.params ) if self.node.kwparams: kwparams = ', '.join( '%s=%s' % (k, ustr(self.rend(v))) for k, v in self.kwparams ) if params and kwparams: params = params + ', ' + kwparams elif kwparams: params = kwparams fields.update(params=params) defines = compress_seq(self.defines()) sdefs = [d for d, l in defines if not l] ldefs = [d for d, l in defines if l] sdefs = set(sdefs) ldefs = set(ldefs) - sdefs if not (sdefs or ldefs): sdefines = 'AstPtr ast = std::make_shared<Ast>();' else: sdefines = "AstPtr ast = std::make_shared<Ast>\n (AstMap({\n " elements = ['{ "%s" , AST_DEFAULT }' % d for d in sdefs] elements += ['{ "%s" , AST_FORCELIST }' % d for d in ldefs] sdefines += ",\n ".join(elements) sdefines += "\n }));"; fields.update(defines=sdefines)
def render(item, join='', **fields): """ Render the given item """ if item is None: return '' elif isinstance(item, strtype): return item elif isinstance(item, Renderer): return item.render(join=join, **fields) elif isiter(item): return join.join(render(e, **fields) for e in iter(item) if e is not None) elif isinstance(item, (int, float)): return item else: return ustr(item)
def render(item, join='', **fields): """ Render the given item """ if item is None: return '' elif isinstance(item, strtype): return item elif isinstance(item, Renderer): return item.render(join=join, **fields) elif isiter(item): return join.join( render(e, **fields) for e in iter(item) if e is not None) elif isinstance(item, (int, float)): return item else: return ustr(item)
def _invoke_rule(self, rule, name, params, kwparams): cache = self._memoization_cache if name[0].islower(): self._next_token() pos = self._pos key = (pos, rule, self._state) if key in cache: memo = cache[key] memo = self._left_recursion_check(name, key, memo) if isinstance(memo, Exception): raise memo return memo self._set_left_recursion_guard(name, key) self._push_ast() try: try: rule(self) node = self.ast if not node: node = self.cst elif '@' in node: node = node['@'] # override the AST elif self.parseinfo: node.set_parseinfo(self._get_parseinfo(name, pos)) node = self._invoke_semantic_rule(name, node, params, kwparams) result = (node, self._pos, self._state) result = self._left_recurse(rule, name, pos, key, result, params, kwparams) if self._memoization() and not self._in_recursive_loop(): cache[key] = result return result except FailedSemantics as e: self._error(ustr(e), FailedParse) except FailedParse as e: self._set_furthest_exception(e) if self._memoization(): cache[key] = e raise finally: self._pop_ast()
def __init__(self, text, filename=None, whitespace=None, tabwidth=None, comments_re=None, eol_comments_re=None, ignorecase=False, trace=False, nameguard=None, comment_recovery=False, namechars='', **kwargs): self.original_text = text self.text = ustr(text) self.filename = filename or '' self.whitespace = whitespace self.tabwidth = tabwidth self.comments_re = comments_re self.eol_comments_re = eol_comments_re self.ignorecase = ignorecase self.trace = True self.nameguard = (nameguard if nameguard is not None else bool(self.whitespace_re)) self.comment_recovery = comment_recovery self.namechars = namechars self._namechar_set = set(namechars) if namechars: self.nameguard = True self._pos = 0 self._len = 0 self._linecount = 0 self._line_index = [] self._comment_index = [] self._preprocess() self._linecache = [] self._build_line_cache() self._comment_index = [[] for _ in self._line_index] self._len = len(self.text) self._re_cache = {}
def test_based_rule(self): grammar = '''\ start = b $ ; a = @:'a' ; b < a = {@:'b'} ; ''' model = genmodel("test", grammar) ast = model.parse("abb", nameguard=False) self.assertEquals(['a', 'b', 'b'], ast) self.assertEqual(trim(grammar), ustr(model))
def test_based_rule(self): grammar = '''\ start = b $ ; a = @:'a' ; b < a = {@:'b'} ; ''' model = compile(grammar, "test") ast = model.parse("abb", nameguard=False) self.assertEqual(['a', 'b', 'b'], ast) self.assertEqual(trim(grammar), ustr(model))
def _check_name(self): name = ustr(self.last_node) if self.ignorecase or self._buffer.ignorecase: name = name.upper() if name in self.keywords: raise FailedKeywordSemantics('"%s" is a reserved word' % name)
def __str__(self): exp = ustr(self.exp) if len(exp.splitlines()) > 1: return '(\n%s\n)' % indent(exp) else: return '(%s)' % trim(exp)
def __init__(self, rule): assert isinstance(rule, Rule), ustr(rule.name) super(RuleInclude, self).__init__(rule.exp) self.rule = rule
def param_repr(p): if isinstance(p, (int, float)): return ustr(p) else: return urepr(p.split(BASE_CLASS_TOKEN)[0])
def __str__(self): return '%s+:%s' % (self.name, ustr(self.exp))
def __str__(self): return ustr(self.exp)
def setUp(self): testfile = os.path.splitext(__file__)[0] + '.py' with open(testfile, encoding='utf-8') as f: self.text = ustr(f.read()) self.buf = Buffer(self.text, whitespace='')
def __str__(self): sexp = ustr(self.exp) if len(sexp.splitlines()) <= 1: return '{%s}' % sexp else: return '{\n%s\n}' % indent(sexp)
def param_repr(p): if isinstance(p, (int, float)): return ustr(p) else: return urepr(p)
def __str__(self): return '!' + ustr(self.exp)
def render_fields(self, fields): lines = '\n'.join( '# %s' % ustr(c) for c in self.node.comment.splitlines() ) fields.update(lines=lines)
def _trace(self, msg, *params): if self.trace: msg = msg % params info(ustr(msg), file=sys.stderr)