def to_fmt(self): """ Return an Fmt representation for pretty-printing """ qual = "evalctx" lseval = [] block = fmt.block(":\n", "", fmt.tab(lseval)) txt = fmt.sep(" ", [qual, block]) lseval.append(self._sig.to_fmt()) if len(self.resolution) > 0: lsb = [] for k in sorted(self.resolution.keys()): s = self.resolution[k] if s is not None: lsb.append(fmt.end("\n", ["'%s': %s (%s)" % (k, s, s().show_name())])) else: lsb.append(fmt.end("\n", ["'%s': Unresolved" % (k)])) if self._translate_to is not None: lsb.append("use translator:") lsb.append(self._translate_to.to_fmt()) if self._variadic_types is not None: lsb.append("variadic types:\n") arity = self._sig.arity for t in self._variadic_types: lsb.append("[%d] : %s\n" % (arity, t)) arity += 1 lseval.append(fmt.block("\nresolution :\n", "", fmt.tab(lsb))) return txt
def to_fmt(self, with_from=False) -> fmt.indentable: txt = fmt.block("{\n", "\n}", []) items = fmt.sep("\n---\n", []) for k in sorted(self._internal.keys()): items.lsdata.append(fmt.sep(": ", [k, fmt.tab([self._internal[k].to_fmt(with_from)])])) txt.lsdata.append(fmt.tab([items])) return txt
def to_fmt(self): """ Return an Fmt representation for pretty-printing """ qual = "evalctx" lseval = [] block = fmt.block(":\n", "", fmt.tab(lseval)) txt = fmt.sep(" ", [qual, block]) lseval.append(self._sig.to_fmt()) if len(self.resolution) > 0: lsb = [] for k in sorted(self.resolution.keys()): s = self.resolution[k] if s is not None: lsb.append( fmt.end("\n", ["'%s': %s (%s)" % (k, s, s().show_name())])) else: lsb.append(fmt.end("\n", ["'%s': Unresolved" % (k)])) if self._translate_to is not None: lsb.append("use translator:") lsb.append(self._translate_to.to_fmt()) if self._variadic_types is not None: lsb.append("variadic types:\n") arity = self._sig.arity for t in self._variadic_types: lsb.append("[%d] : %s\n" % (arity, t)) arity += 1 lseval.append(fmt.block("\nresolution :\n", "", fmt.tab(lsb))) return txt
def to_fmt(self, with_from=False) -> fmt.indentable: txt = fmt.block('{\n', '\n}', []) items = fmt.sep('\n---\n', []) for k in sorted(self._internal.keys()): items.lsdata.append( fmt.sep(': ', [k, fmt.tab([self._internal[k].to_fmt(with_from)])])) txt.lsdata.append(fmt.tab([items])) return txt
def to_c(self): thenbody = ';\n' if self.thencond is not None and hasattr(self.thencond, 'to_c'): thenbody = fmt.tab(self.thencond.to_c()) lsif = [ fmt.sep(" ", ["if", fmt.block('(', ')\n', [self.condition.to_c()])]), thenbody ] if self.elsecond is not None and hasattr(self.elsecond, 'to_c'): lsif.append("else\n") lsif.append(fmt.tab(self.elsecond.to_c())) return fmt.end("", lsif)
def to_c(self): lsbody = [] i = 0 for it in self.body: subbody = it.to_c() if hasattr(it, 'designation'): subbody = fmt.sep("", [it.designation, subbody]) if isinstance(subbody, fmt.block): lsbody.append(fmt.tab(["\n", subbody])) else: lsbody.append(subbody) i += 1 if i > 8: lsbody[-1] = "\n" + str(lsbody[-1]) i = 0 return fmt.block("{ ", " }", fmt.tab(fmt.sep(', ', lsbody)))
def to_cython(self, genstate) -> fmt.indentable: alts = fmt.tab([]) genstate.newScopeError() alts.lsdata.append(tpl_althead.substitute( lvlid=genstate.lvlid, outid=genstate.outid )) for s in self.ptlist: if not isinstance(s, functors.Seq): s = functors.Seq(s) genstate.newScopeAlt() content = tpl_alt.substitute( code=s.to_cython(genstate), lvlid=genstate.lvlid, outid=genstate.outid, errid=genstate.errid, outerrid=genstate.outerrid ) alts.lsdata.append(content) genstate.popScopeAlt() alts.lsdata.append(tpl_altfoot.substitute( lvlid=genstate.lvlid, outid=genstate.outid, errid=genstate.errid, outerrid=genstate.outerrid )) genstate.popScopeError() return alts
def to_c(self): if self.already_imported: return fmt.sep("", []) lsbody = [] lsbody.append(self.ifndef.to_c()) lsbody.append(self.define.to_c()) lsbody.append(self.body.to_c()) lsbody.append(self.endif.to_c()) return fmt.end("\n\n", fmt.tab(fmt.sep("\n", lsbody)))
def to_tl4t(self) -> fmt.indentable: lssub = [] for s in self.body: lssub.append(s.to_tl4t()) lsblock = None if self.root: lsblock = fmt.sep('', lssub) else: lsblock = fmt.block('{\n', '}', [fmt.tab(lssub)]) return lsblock
def to_c(self): body = ';\n' if self.body is not None and hasattr(self.body, 'to_c'): body = fmt.tab(self.body.to_c()) lswh = [ fmt.sep(" ", ["switch", fmt.block('(', ')\n', [self.condition.to_c()])]), body ] return fmt.sep("", lswh)
def to_c(self): body = ';\n' if self.body is not None and hasattr(self.body, 'to_c'): body = fmt.tab(self.body.to_c()) lsdo = [ fmt.sep("\n", ["do", body]), fmt.sep( " ", ["while", fmt.block('(', ');\n', [self.condition.to_c()])]) ] return fmt.sep("", lsdo)
def to_yml(self): """ Allow to get the YML string representation of a Node.:: from pyrser.passes import to_yml t = Node() ... print(str(t.to_yml())) """ pp = fmt.tab([]) to_yml_item(self, pp.lsdata, "") return str(pp)
def to_cython(self, genstate) -> fmt.indentable: repn = self.pt if not isinstance(repn, functors.Seq): repn = functors.Seq(repn) genstate.newScopeError() source = fmt.tab([repn.to_cython(genstate)]) res = tpl_repopt.substitute( code=self.pt.to_cython(genstate), lvlid=genstate.lvlid, outid=genstate.outid, errid=genstate.errid, ) genstate.popScopeError() return res
def to_c(self): body = ';\n' if self.body is not None and hasattr(self.body, 'to_c'): body = fmt.tab(self.body.to_c()) lswh = [ fmt.sep( " ", [ "while", fmt.block('(', ')\n', [self.condition.to_c()]) ]), body ] return fmt.sep("", lswh)
def to_fmt(self) -> fmt.indentable: """ Return an Fmt representation for pretty-printing """ qual = "type" txt = fmt.sep(" ", [qual]) txt.lsdata.append(self.show_name()) if hasattr(self, '_hsig') and len(self._hsig) > 0: lsb = [] for k in sorted(self._hsig.keys()): s = self._hsig[k] lsb.append(fmt.end("\n", [s.to_fmt()])) block = fmt.block(":\n", "", fmt.tab(lsb)) txt.lsdata.append(block) return txt
def to_cython(self, genstate) -> fmt.indentable: rule = self.pt if not isinstance(rule, functors.Seq): rule = functors.Seq(rule) genstate.newScopeError() source = fmt.tab([rule.to_cython(genstate)]) res = tpl_lookahead.substitute( code=self.pt.to_cython(genstate), lvlid=genstate.lvlid, outid=genstate.outid, errid=genstate.errid, outerrid=genstate.outerrid ) genstate.popScopeError() return res
def to_fmt(self) -> fmt.indentable: """ Return an Fmt representation for pretty-printing """ qual = "type" txt = fmt.sep(" ", [qual]) txt.lsdata.append(self.show_name()) if hasattr(self, "_hsig") and len(self._hsig) > 0: lsb = [] for k in sorted(self._hsig.keys()): s = self._hsig[k] lsb.append(fmt.end("\n", [s.to_fmt()])) block = fmt.block(":\n", "", fmt.tab(lsb)) txt.lsdata.append(block) return txt
def to_cython(self, genstate) -> fmt.indentable: seqs = fmt.tab([]) genstate.newScopeError() seqs.lsdata.append(tpl_seqhead.substitute( lvlid=genstate.lvlid, outid=genstate.outid )) for s in self.ptlist: content = tpl_seq.substitute(code=s.to_cython(genstate)) seqs.lsdata.append(content) seqs.lsdata.append(tpl_seqfoot.substitute( lvlid=genstate.lvlid, outid=genstate.outid, errid=genstate.errid, outerrid=genstate.outerrid )) genstate.popScopeError() return seqs
def to_fmt(self) -> fmt.indentable: """ Return an Fmt representation for pretty-printing """ qual = "scope" txt = fmt.sep(" ", [qual]) name = self.show_name() if name != "": txt.lsdata.append(name) if len(self._hsig) > 0 or len(self.mapTypeTranslate) > 0: lsb = [] if len(self.mapTypeTranslate) > 0: lsb.append("translate:\n") lsb.append(fmt.end("\n", self.mapTypeTranslate.to_fmt())) for k in sorted(self._hsig.keys()): s = self._hsig[k] lsb.append(fmt.end("\n", [s.to_fmt()])) block = fmt.block(":\n", "", fmt.tab(lsb)) txt.lsdata.append(block) return txt
def to_fmt(self) -> fmt.indentable: res = None showlist = { 'attr': { "res": fmt.sep('', ['.']), "inblock": fmt.sep('=', [self.value]) }, 'indice': { "res": fmt.block('[', ']', []), "inblock": fmt.sep(': ', [repr(self.value)]) }, 'key': { "res": fmt.block('{', '}', []), "inblock": fmt.sep(': ', [repr(self.value)]) } } for k in sorted(showlist.keys()): v = showlist[k] if self.kind == CaptureContext.kind_of_node[k]: res = v["res"] inblock = v["inblock"] if self.node is not None: if hasattr(self.node, 'to_fmt'): inblock.lsdata.append(self.node.to_fmt()) elif type(self.node) is weakref.ReferenceType: inblock.lsdata.append(repr(self.get())) res.lsdata.append(inblock) ###### if self.kind == CaptureContext.kind_of_node['node']: res = fmt.sep('', [self.value]) elmts = fmt.sep(',\n', []) for sk in sorted(CaptureContext.map_intern.values()): subelmt = None if hasattr(self, sk): subelmt = getattr(self, sk) if subelmt is not None: for sube in subelmt: elmts.lsdata.append(sube.to_fmt()) se = fmt.tab(fmt.block('(\n', '\n)', elmts)) res.lsdata.append(se) return res
def to_c(self): init_body = None if type(self.init) is nodes.Decl: init_body = decl_to_c(self.init) elif self.init is not None and hasattr(self.init, 'expr'): init_body = self.init.expr.to_c() cond_body = None if self.condition is not None and hasattr(self.condition, 'expr'): cond_body = self.condition.expr.to_c() inc_body = None if self.increment is not None and hasattr(self.increment, 'to_c'): inc_body = self.increment.to_c() lsfor = [ fmt.sep(" ", [ "for", fmt.block('(', ')\n', [fmt.sep("; ", [init_body, cond_body, inc_body])]) ]), fmt.tab(self.body.to_c()) ] return fmt.end("", lsfor)
def to_c(self): init_body = None if type(self.init) is nodes.Decl: init_body = decl_to_c(self.init) elif self.init is not None and hasattr(self.init, 'expr'): init_body = self.init.expr.to_c() cond_body = None if self.condition is not None and hasattr(self.condition, 'expr'): cond_body = self.condition.expr.to_c() inc_body = None if self.increment is not None and hasattr(self.increment, 'to_c'): inc_body = self.increment.to_c() lsfor = [ fmt.sep( " ", [ "for", fmt.block( '(', ')\n', [ fmt.sep( "; ", [ init_body, cond_body, inc_body ] ) ] ) ] ), fmt.tab(self.body.to_c()) ] return fmt.end("", lsfor)
def ctype_to_c(self, func_var_name=""): # our global declarator declarator = fmt.sep("", []) # typename or full decl if func_var_name != "": declarator.lsdata.append(func_var_name) # intern prototype if hasattr(self, 'params'): # param list pf = fmt.sep(", ", []) for p in self.params: if p.ctype is not None: if isinstance(p.ctype, nodes.CType): pf.lsdata.append(p.ctype.ctype_to_c(p._name)) if hasattr(self, '_ellipsis'): pf.lsdata.append('...') if len(pf.lsdata) > 0: declarator.lsdata.append(fmt.block('(', ')', pf)) else: declarator.lsdata.append('()') # for externalize the last qualifier qualextern = None # final output decl_ls = fmt.sep(" ", []) if self.link() is not None: # add all qualifiers if len(declarator.lsdata) > 0: qual_list = declarator else: qual_list = fmt.sep(" ", []) unqual_list = self.link() # qualification of declaration while unqual_list is not None: if isinstance(unqual_list, nodes.ParenType): # surround previous defs by () qual_list = fmt.sep("", [fmt.block("(", ")", [qual_list])]) # () provide param for function pointers if len(unqual_list.params) > 0: pf = fmt.sep(", ", []) for p in unqual_list.params: pf.lsdata.append(p.ctype.ctype_to_c(p._name)) if hasattr(unqual_list, '_ellipsis'): pf.lsdata.append('...') qual_list.lsdata.append(fmt.block('(', ')', pf)) if isinstance(unqual_list, nodes.PointerType): qual_list.lsdata.insert(0, "*") if isinstance(unqual_list, nodes.AttrType): qual_list.lsdata.insert(0, unqual_list._attr + " ") if isinstance(unqual_list, nodes.QualType): if unqual_list._qualifier != nodes.Qualifiers.AUTO: if unqual_list.link() is None: qualextern = unqual_list else: qual_list.lsdata.insert( 0, nodes.Qualifiers.rmap[ unqual_list._qualifier].lower() + " ") if isinstance(unqual_list, nodes.ArrayType): # collect all consecutive array consec_ary = [] consec_ary.append(unqual_list) unqual_list = unqual_list.link() while ((unqual_list is not None and isinstance(unqual_list, nodes.ArrayType))): consec_ary.append(unqual_list) unqual_list = unqual_list.link() reordered = [] for ary in consec_ary: if ary.expr is not None: ary_expr = None if hasattr(ary.expr, 'to_c'): ary_expr = ary.expr.to_c() reordered.insert(0, fmt.block("[", "]", [ary_expr])) else: reordered.insert(0, "[]") qual_list.lsdata.extend(reordered) # rewind one for last sentence unqual_list = consec_ary[-1] unqual_list = unqual_list.link() # add qualified declarator decl_ls.lsdata.append(qual_list) elif len(declarator.lsdata) > 0: # no qualifiers just the name decl_ls.lsdata.append(declarator) # for enum if hasattr(self, 'enums'): enums = fmt.sep(",\n", []) for enum in self.enums: if enum.expr is not None and hasattr(enum.expr, 'to_c'): enums.lsdata.append( fmt.sep(" = ", [enum.ident, enum.expr.to_c()])) else: enums.lsdata.append(enum.ident) decl_ls.lsdata.insert(0, fmt.tab(fmt.block("{\n", "}", enums))) # for struct if hasattr(self, 'fields'): fields = [] for field in self.fields: fields.append(field.to_c()) decl_ls.lsdata.insert(0, fmt.tab(fmt.block("{\n", "}", fields))) # just the type name if hasattr(self, 'identifier'): decl_ls.lsdata.insert(0, self.identifier) # attributes composed if hasattr(self, '_attr_composed'): decl_ls.lsdata.insert(0, fmt.sep(" ", self._attr_composed)) # specifier if self._specifier != nodes.Specifiers.AUTO: if self._specifier == nodes.Specifiers.LONGLONG: decl_ls.lsdata.insert(0, "long long") else: decl_ls.lsdata.insert( 0, nodes.Specifiers.rmap[self._specifier].lower()) # sign if hasattr(self, '_sign') and self._sign != nodes.Signs.AUTO: decl_ls.lsdata.insert(0, nodes.Signs.rmap[self._sign].lower()) # qualifier externalized if qualextern is not None: decl_ls.lsdata.insert( 0, nodes.Qualifiers.rmap[qualextern._qualifier].lower()) # End by storage if self._storage != nodes.Storages.AUTO: decl_ls.lsdata.insert(0, nodes.Storages.rmap[self._storage].lower()) return decl_ls
def test_00(self): """Test pprint functions""" data = fmt.end(";", ["tot"]) self.assertEqual(str(data), "tot;", "Failed to format end") data = fmt.tab(fmt.end(";\n", ["tot"])) self.assertEqual(str(data), " tot;\n", "Failed to format end") data = fmt.tab([fmt.end(";\n", ["tot"])]) self.assertEqual(str(data), " tot;\n", "Failed to format end") data = fmt.end(";", ["", fmt.tab(["\ntot", "\nplop"])]) self.assertEqual(str(data), ";\n{tab}tot\n{tab}plop;".format(tab=" " * 4), "Failed to format end") data = fmt.end(";", ["", fmt.tab(["\ntot", "\nplop"])]) self.assertEqual(str(data), ";\n{tab}tot\n{tab}plop;".format(tab=" " * 4), "Failed to format end") data = fmt.sep(",", ["a", "b", "c"]) self.assertEqual(str(data), "a,b,c", "Failed to format sep") data = fmt.tab(fmt.sep(",\n", ["tot", "tit", "tut"])) self.assertEqual(str(data), "{tab}tot,\n{tab}tit,\n{tab}tut".format(tab=" " * 4), "Failed to format end") data = fmt.sep(",\n", [fmt.tab(["tot", "tit"]), "tut"]) self.assertEqual(str(data), "tottit,\ntut", "Failed to format end") data = fmt.block("{", "}", ["a", "b", "c"]) self.assertEqual(str(data), "{abc}", "Failed to format block") data = fmt.block("{", "}", [fmt.sep(",", ["a", "b", "c"])]) self.assertEqual(str(data), "{a,b,c}", "Failed to format block/sep") data = fmt.sep(",", [fmt.block("{", "}", ["a", "b"]), fmt.block("{", "}", ["c", "d"])]) self.assertEqual(str(data), "{ab},{cd}", "Failed to format sep/block") data = fmt.end(";\n", ["a", "b", "c"]) self.assertEqual(str(data), "a;\nb;\nc;\n", "Failed to format a list end by ';\n'") data = fmt.tab(fmt.block("{\n", "}\n", ["a\n", "b\n", "c\n"])) self.assertEqual( str(data), (("{tab}{{\n{tab}a\n" + "{tab}b\n{tab}c\n{tab}}}\n")).format(tab=(" " * 4)), "Failed to indent" ) data = fmt.block("{\n", "}\n", [fmt.tab(["a\n", "b\n", "c\n"])]) self.assertEqual( str(data), (("{{\n{tab}a\n{tab}b\n" + "{tab}c\n}}\n")).format(tab=(" " * 4)), "Failed to indent" ) data = fmt.block( "{\n", "}\n", [fmt.tab(fmt.end("\n", ["a", "b", fmt.tab(fmt.block("[\n", "]", ["b\n", "g\n", "o\n", "e\n"])), "c"]))], ) self.assertEqual( str(data), ( ( "{{\n{tab}a\n{tab}b\n" + "{tab2}[\n{tab2}b\n{tab2}g\n" + "{tab2}o\n{tab2}e\n{tab2}]\n" + "{tab}c\n}}\n" ) ).format(tab=(" " * 4), tab2=(" " * 8)), "Failed to indent", ) data = fmt.block( "{\n", "}\n", [ fmt.tab( fmt.end( "\n", ["a", "b", fmt.block("[\n", "]", [fmt.tab(fmt.tab(["b\n", "g\n", "o\n", "e\n"]))]), "c"] ) ) ], ) self.assertEqual( str(data), ( ("{{\n{tab}a\n{tab}b\n" + "{tab}[\n{tab2}b\n{tab2}g\n" + "{tab2}o\n{tab2}e\n{tab}]\n" + "{tab}c\n}}\n") ).format(tab=(" " * 4), tab2=(" " * 12)), "Failed to indent", ) data = fmt.block("{\n", "}\n", fmt.tab(["a\n", fmt.block("{\n", "}\n", fmt.tab(["d\n", "e\n", "f\n"])), "c\n"])) self.assertEqual( str(data), (("{{\n{tab}a\n" + "{tab}{{\n{tab2}d\n{tab2}e\n{tab2}f\n{tab}}}\n" + "{tab}c\n}}\n")).format( tab=(" " * 4), tab2=(" " * 8) ), "Failed to indent", ) data = fmt.block("{\n", "}\n", fmt.tab(fmt.block("{\n", "}\n", fmt.tab(fmt.end(";\n", ["a", "b", "c"]))))) self.assertEqual( str(data), (("{{\n{tab}{{\n" + "{tab2}a;\n{tab2}b;\n{tab2}c;\n" + "{tab}}}\n}}\n")).format( tab=(" " * 4), tab2=(" " * 8) ), "Failed to indent", ) data = fmt.tab(["1", "2", [fmt.sep(",\n", ["tot", "tit", "tut"])], "4"]) self.assertEqual( str(data), ("{tab}12tot,\n{tab}tit,\n" + "{tab}tut4").format(tab=" " * 4), "Failed to format end" )
def to_fmt(self) -> fmt.indentable: res = fmt.block('{\n', '}', [fmt.tab([])]) lines = res.lsdata[0].lsdata for stmt in self.stmts: lines.append(fmt.end('\n', stmt.to_fmt())) return res
def to_c(self): lsbody = [] for e in self.body: if e is not None and hasattr(e, 'to_c'): lsbody.append(e.to_c()) return fmt.block("({\n", "})", fmt.tab(lsbody))
def to_yml_item(item, pp, name): global scalar refcount = weakref.getweakrefcount(item) if refcount > 0: name += " &" + str(id(item)) if type(item).__name__ in scalar: tag = fmt.end( '\n', fmt.sep( "", [ name, " ", yml_attr( type(item).__name__, repr(item) ) ] ) ) pp.append(tag) return if isinstance(item, weakref.ref): name += " *" + str(id(item())) tag = fmt.end('\n', fmt.sep("", [name, " ", repr(item)])) pp.append(tag) return if isinstance(item, bytes) or isinstance(item, bytearray): inner = fmt.tab([]) tag = fmt.block(name + " " + str(yml_attr('type', 'bytes')) + ':\n', '----' + name + '----\n', inner) inner.lsdata.append(fmt.sep(" ", [])) bindata = inner.lsdata[-1].lsdata i = 0 for b in item: bindata.append("%02X" % b) i += 1 if i > 16: bindata.append("\n") i = 0 bindata.append("\n") pp.append(tag) return if isinstance(item, object) and hasattr(item, '__dict__'): inner = fmt.tab([]) tag = fmt.block(name + " " + str(yml_attr('type', item.__class__.__name__)) + ':\n', '', inner) for attr in sorted(vars(item)): to_yml_item(getattr(item, attr), inner.lsdata, attr) if len(vars(item)) == 0: inner.lsdata.append("\n") pp.append(tag) return if isinstance(item, list): inner = fmt.tab([]) tag = fmt.block( name + " " + str(yml_attr('type', 'list')) + ':\n', '', inner ) i = 0 for subitem in item: idxname = str(fmt.sep(" ", ['[', i, ']'])) to_yml_item(subitem, inner.lsdata, idxname) i += 1 if len(item) == 0: inner.lsdata.append("\n") pp.append(tag) return if isinstance(item, tuple): inner = fmt.tab([]) tag = fmt.block(name + " " + str(yml_attr('type', 'tuple')) + ':\n', '', inner) i = 0 for subitem in item: idxname = str(fmt.sep(" ", ["[", i, "]"])) to_yml_item(subitem, inner.lsdata, idxname) i += 1 if len(item) == 0: inner.lsdata.append("\n") pp.append(tag) return if isinstance(item, dict): inner = fmt.tab([]) tag = fmt.block(name + " " + str(yml_attr('type', 'dict')) + ':\n', '', inner) for k in sorted(item.keys()): idxname = str(fmt.sep(" ", ["[", repr(k), "]"])) to_yml_item(item[k], inner.lsdata, idxname) if len(item.keys()) == 0: inner.lsdata.append("\n") pp.append(tag) return if isinstance(item, set): inner = fmt.tab([]) tag = fmt.block(name + " " + str(yml_attr('type', 'set')) + ':\n', '', inner) for subitem in sorted(item): inner.lsdata.append(fmt.sep(", "[repr(subitem)])) if len(item) == 0: inner.lsdata.append("\n") pp.append(tag) return if item is None: tag = fmt.end('\n', [name]) pp.append(tag) return
def to_cython(self, ctype_name: str) -> CStub: cstub = CStub() # SETUP GENERATION cstub.setup = fmt.end('', []) cstub.setup.lsdata.append(tpl_setup.substitute( ctn=ctype_name, )) # CSOURCE GENERATION pyattr = fmt.end('', []) pyxprotos = fmt.end('', []) pprotos = fmt.end('', []) cprotos = fmt.end('', []) gram = fmt.end('//---\n', []) gram.lsdata.append(tpl_file.substitute(ctn=ctype_name)) for k, v in self.__class__._rules.items(): try: if isinstance(v, parsing.Functor): genstate = GenState() fun_name = k.replace('.', '_') # if not isinstance(v, functors.Seq): v = functors.Seq(v) rule_code = v.to_cython(genstate) content = tpl_function.substitute( ctn=ctype_name, rule=fun_name, code=fmt.tab([rule_code]), lvlid=genstate.lvlid, errid=genstate.errid ) rule = fmt.sep( '\n', [ "//--- %s" % k, content ] ) pyattr.lsdata.append(tpl_python_attr.substitute( rule=fun_name )) pprotos.lsdata.append(tpl_pproto.substitute( ctn=ctype_name, rule=fun_name )) cprotos.lsdata.append(tpl_cproto.substitute( ctn=ctype_name, rule=fun_name )) pyxprotos.lsdata.append(tpl_pyx_rules.substitute( ctn=ctype_name, rule=fun_name )) gram.lsdata.append(rule) except: #print("Can't Transform %s = %s" % (k, to_yml.to_yml(v))) pass cstub.csource = gram # PYTHON GENERATION cstub.psource = fmt.end('', []) cstub.psource.lsdata.append(tpl_python.substitute( ctn=ctype_name, rules_attr=str(pyattr) )) # CHEADER GENERATION cstub.cheader = fmt.end('', []) cstub.cheader.lsdata.append(tpl_header.substitute( ctn=ctype_name, cfunctions_proto=str(cprotos) )) # PXD GENERATION cstub.pxd = fmt.end('', []) cstub.pxd.lsdata.append(tpl_pxd.substitute( ctn=ctype_name, pfunctions_proto=str(pprotos) )) # PYX GENERATION cstub.pyx = fmt.end('', []) cstub.pyx.lsdata.append(tpl_pyx.substitute( ctn=ctype_name, rfunctions_proto=str(pyxprotos) )) return cstub
def ctype_to_c(self, func_var_name=""): # our global declarator declarator = fmt.sep("", []) # typename or full decl if func_var_name != "": declarator.lsdata.append(func_var_name) # intern prototype if hasattr(self, 'params'): # param list pf = fmt.sep(", ", []) for p in self.params: if p.ctype is not None: if isinstance(p.ctype, nodes.CType): pf.lsdata.append(p.ctype.ctype_to_c(p._name)) if hasattr(self, '_ellipsis'): pf.lsdata.append('...') if len(pf.lsdata) > 0: declarator.lsdata.append(fmt.block('(', ')', pf)) else: declarator.lsdata.append('()') # for externalize the last qualifier qualextern = None # final output decl_ls = fmt.sep(" ", []) if self.link() is not None: # add all qualifiers if len(declarator.lsdata) > 0: qual_list = declarator else: qual_list = fmt.sep(" ", []) unqual_list = self.link() # qualification of declaration while unqual_list is not None: if isinstance(unqual_list, nodes.ParenType): # surround previous defs by () qual_list = fmt.sep("", [fmt.block("(", ")", [qual_list])]) # () provide param for function pointers if len(unqual_list.params) > 0: pf = fmt.sep(", ", []) for p in unqual_list.params: pf.lsdata.append(p.ctype.ctype_to_c(p._name)) if hasattr(unqual_list, '_ellipsis'): pf.lsdata.append('...') qual_list.lsdata.append(fmt.block('(', ')', pf)) if isinstance(unqual_list, nodes.PointerType): qual_list.lsdata.insert(0, "*") if isinstance(unqual_list, nodes.AttrType): qual_list.lsdata.insert(0, unqual_list._attr + " ") if isinstance(unqual_list, nodes.QualType): if unqual_list._qualifier != nodes.Qualifiers.AUTO: if unqual_list.link() is None: qualextern = unqual_list else: qual_list.lsdata.insert( 0, nodes.Qualifiers.rmap[ unqual_list._qualifier ].lower() + " " ) if isinstance(unqual_list, nodes.ArrayType): # collect all consecutive array consec_ary = [] consec_ary.append(unqual_list) unqual_list = unqual_list.link() while ((unqual_list is not None and isinstance(unqual_list, nodes.ArrayType))): consec_ary.append(unqual_list) unqual_list = unqual_list.link() reordered = [] for ary in consec_ary: if ary.expr is not None: ary_expr = None if hasattr(ary.expr, 'to_c'): ary_expr = ary.expr.to_c() reordered.insert(0, fmt.block("[", "]", [ary_expr])) else: reordered.insert(0, "[]") qual_list.lsdata.extend(reordered) # rewind one for last sentence unqual_list = consec_ary[-1] unqual_list = unqual_list.link() # add qualified declarator decl_ls.lsdata.append(qual_list) elif len(declarator.lsdata) > 0: # no qualifiers just the name decl_ls.lsdata.append(declarator) # for enum if hasattr(self, 'enums'): enums = fmt.sep(",\n", []) for enum in self.enums: if enum.expr is not None and hasattr(enum.expr, 'to_c'): enums.lsdata.append( fmt.sep( " = ", [enum.ident, enum.expr.to_c()] )) else: enums.lsdata.append(enum.ident) decl_ls.lsdata.insert(0, fmt.tab(fmt.block("{\n", "}", enums))) # for struct if hasattr(self, 'fields'): fields = [] for field in self.fields: fields.append(field.to_c()) decl_ls.lsdata.insert(0, fmt.tab(fmt.block("{\n", "}", fields))) # just the type name if hasattr(self, 'identifier'): decl_ls.lsdata.insert(0, self.identifier) # attributes composed if hasattr(self, '_attr_composed'): decl_ls.lsdata.insert(0, fmt.sep(" ", self._attr_composed)) # specifier if self._specifier != nodes.Specifiers.AUTO: if self._specifier == nodes.Specifiers.LONGLONG: decl_ls.lsdata.insert(0, "long long") else: decl_ls.lsdata.insert( 0, nodes.Specifiers.rmap[ self._specifier ].lower() ) # sign if hasattr(self, '_sign') and self._sign != nodes.Signs.AUTO: decl_ls.lsdata.insert(0, nodes.Signs.rmap[self._sign].lower()) # qualifier externalized if qualextern is not None: decl_ls.lsdata.insert( 0, nodes.Qualifiers.rmap[ qualextern._qualifier ].lower() ) # End by storage if self._storage != nodes.Storages.AUTO: decl_ls.lsdata.insert(0, nodes.Storages.rmap[self._storage].lower()) return decl_ls
def to_yml_item(item, pp, name): global scalar refcount = weakref.getweakrefcount(item) if refcount > 0: name += " &" + str(id(item)) if type(item).__name__ in scalar: tag = fmt.end( '\n', fmt.sep("", [name, " ", yml_attr(type(item).__name__, repr(item))])) pp.append(tag) return if isinstance(item, weakref.ref): name += " *" + str(id(item())) tag = fmt.end('\n', fmt.sep("", [name, " ", repr(item)])) pp.append(tag) return if isinstance(item, bytes) or isinstance(item, bytearray): inner = fmt.tab([]) tag = fmt.block(name + " " + str(yml_attr('type', 'bytes')) + ':\n', '----' + name + '----\n', inner) inner.lsdata.append(fmt.sep(" ", [])) bindata = inner.lsdata[-1].lsdata i = 0 for b in item: bindata.append("%02X" % b) i += 1 if i > 16: bindata.append("\n") i = 0 bindata.append("\n") pp.append(tag) return if isinstance(item, object) and hasattr(item, '__dict__'): inner = fmt.tab([]) tag = fmt.block( name + " " + str(yml_attr('type', item.__class__.__name__)) + ':\n', '', inner) for attr in sorted(vars(item)): to_yml_item(getattr(item, attr), inner.lsdata, attr) if len(vars(item)) == 0: inner.lsdata.append("\n") pp.append(tag) return if isinstance(item, list): inner = fmt.tab([]) tag = fmt.block(name + " " + str(yml_attr('type', 'list')) + ':\n', '', inner) i = 0 for subitem in item: idxname = str(fmt.sep(" ", ['[', i, ']'])) to_yml_item(subitem, inner.lsdata, idxname) i += 1 if len(item) == 0: inner.lsdata.append("\n") pp.append(tag) return if isinstance(item, tuple): inner = fmt.tab([]) tag = fmt.block(name + " " + str(yml_attr('type', 'tuple')) + ':\n', '', inner) i = 0 for subitem in item: idxname = str(fmt.sep(" ", ["[", i, "]"])) to_yml_item(subitem, inner.lsdata, idxname) i += 1 if len(item) == 0: inner.lsdata.append("\n") pp.append(tag) return if isinstance(item, dict): inner = fmt.tab([]) tag = fmt.block(name + " " + str(yml_attr('type', 'dict')) + ':\n', '', inner) for k in sorted(item.keys()): idxname = str(fmt.sep(" ", ["[", repr(k), "]"])) to_yml_item(item[k], inner.lsdata, idxname) if len(item.keys()) == 0: inner.lsdata.append("\n") pp.append(tag) return if isinstance(item, set): inner = fmt.tab([]) tag = fmt.block(name + " " + str(yml_attr('type', 'set')) + ':\n', '', inner) for subitem in sorted(item): inner.lsdata.append(fmt.sep(", ", [repr(subitem)])) if len(item) == 0: inner.lsdata.append("\n") pp.append(tag) return if item is None: tag = fmt.end('\n', [name]) pp.append(tag) return
def test_00(self): """Test pprint functions""" data = fmt.end(";", ["tot"]) self.assertEqual(str(data), "tot;", "Failed to format end") data = fmt.tab(fmt.end(";\n", ["tot"])) self.assertEqual(str(data), " tot;\n", "Failed to format end") data = fmt.tab([fmt.end(";\n", ["tot"])]) self.assertEqual(str(data), " tot;\n", "Failed to format end") data = fmt.end(";", ["", fmt.tab(["\ntot", "\nplop"])]) self.assertEqual(str(data), ";\n{tab}tot\n{tab}plop;".format(tab=" " * 4), "Failed to format end") data = fmt.end(";", ["", fmt.tab(["\ntot", "\nplop"])]) self.assertEqual(str(data), ";\n{tab}tot\n{tab}plop;".format(tab=" " * 4), "Failed to format end") data = fmt.sep(",", ['a', 'b', 'c']) self.assertEqual(str(data), "a,b,c", "Failed to format sep") data = fmt.tab(fmt.sep(",\n", ["tot", "tit", "tut"])) self.assertEqual(str(data), "{tab}tot,\n{tab}tit,\n{tab}tut".format(tab=" " * 4), "Failed to format end") data = fmt.sep(",\n", [fmt.tab(["tot", "tit"]), "tut"]) self.assertEqual(str(data), "tottit,\ntut", "Failed to format end") data = fmt.block("{", "}", ['a', 'b', 'c']) self.assertEqual(str(data), "{abc}", "Failed to format block") data = fmt.block("{", "}", [fmt.sep(",", ['a', 'b', 'c'])]) self.assertEqual(str(data), "{a,b,c}", "Failed to format block/sep") data = fmt.sep( ",", [fmt.block("{", "}", ['a', 'b']), fmt.block("{", "}", ['c', 'd'])]) self.assertEqual(str(data), "{ab},{cd}", "Failed to format sep/block") data = fmt.end(";\n", ['a', 'b', 'c']) self.assertEqual(str(data), "a;\nb;\nc;\n", "Failed to format a list end by ';\n'") data = fmt.tab(fmt.block("{\n", "}\n", ['a\n', 'b\n', 'c\n'])) self.assertEqual(str(data), (("{tab}{{\n{tab}a\n" + "{tab}b\n{tab}c\n{tab}}}\n")).format(tab=(" " * 4)), "Failed to indent") data = fmt.block("{\n", "}\n", [fmt.tab(['a\n', 'b\n', 'c\n'])]) self.assertEqual(str(data), (("{{\n{tab}a\n{tab}b\n" + "{tab}c\n}}\n")).format(tab=(" " * 4)), "Failed to indent") data = fmt.block("{\n", "}\n", [ fmt.tab( fmt.end("\n", [ 'a', 'b', fmt.tab(fmt.block("[\n", "]", ['b\n', 'g\n', 'o\n', 'e\n'])), 'c' ])) ]) self.assertEqual( str(data), (("{{\n{tab}a\n{tab}b\n" + "{tab2}[\n{tab2}b\n{tab2}g\n" + "{tab2}o\n{tab2}e\n{tab2}]\n" + "{tab}c\n}}\n")).format( tab=(" " * 4), tab2=(" " * 8)), "Failed to indent") data = fmt.block("{\n", "}\n", [ fmt.tab( fmt.end("\n", [ 'a', 'b', fmt.block( "[\n", "]", [fmt.tab(fmt.tab(['b\n', 'g\n', 'o\n', 'e\n']))]), 'c' ])) ]) self.assertEqual( str(data), (("{{\n{tab}a\n{tab}b\n" + "{tab}[\n{tab2}b\n{tab2}g\n" + "{tab2}o\n{tab2}e\n{tab}]\n" + "{tab}c\n}}\n")).format( tab=(" " * 4), tab2=(" " * 12)), "Failed to indent") data = fmt.block( "{\n", "}\n", fmt.tab([ 'a\n', fmt.block("{\n", "}\n", fmt.tab(['d\n', 'e\n', 'f\n'])), 'c\n' ])) self.assertEqual(str(data), (("{{\n{tab}a\n" + "{tab}{{\n{tab2}d\n{tab2}e\n{tab2}f\n{tab}}}\n" + "{tab}c\n}}\n")).format(tab=(" " * 4), tab2=(" " * 8)), "Failed to indent") data = fmt.block( "{\n", "}\n", fmt.tab( fmt.block("{\n", "}\n", fmt.tab(fmt.end(";\n", ["a", "b", "c"]))))) self.assertEqual(str(data), (("{{\n{tab}{{\n" + "{tab2}a;\n{tab2}b;\n{tab2}c;\n" + "{tab}}}\n}}\n")).format(tab=(" " * 4), tab2=(" " * 8)), "Failed to indent") data = fmt.tab( ["1", "2", [fmt.sep(",\n", ["tot", "tit", "tut"])], "4"]) self.assertEqual(str(data), ("{tab}12tot,\n{tab}tit,\n" + "{tab}tut4").format(tab=" " * 4), "Failed to format end")