def process_pending(self): # process pending self.debug0("6 %s processing pending complex_types", B(self.tns)) for (c_name, e_name), _v in list(self.pending_types.items()): self.process_complex_type(_v) self.debug0("7 %s processing pending elements", YEL(self.tns)) for _k, _v in self.pending_elements.items(): self.process_schema_element(_v)
def process_pending(ctx): # process pending ctx.debug0("6 %s processing pending complex_types", B(ctx.tns)) for (c_name, e_name), _v in ctx.pending_types.items(): process_complex_type(ctx, _v) ctx.debug0("7 %s processing pending elements", Y(ctx.tns)) for _k, _v in ctx.pending_elements.items(): process_schema_element(ctx, _v)
def subserialize(self, ctx, cls, inst, parent, name='', **kwargs): """Bridge between multiple XmlCloth-based protocols. Not supposed to be overridden. """ pstack = ctx.protocol.prot_stack pstack.append(self) logger.debug("%s push prot %r. newlen: %d", R("%"), self, len(pstack)) have_cloth = False cls_cloth = self.get_class_cloth(cls) if cls_cloth is not None: logger.debug("to object cloth for %s", cls.get_type_name()) ret = self.to_parent_cloth(ctx, cls, inst, cls_cloth, parent, name) elif self._root_cloth is not None: logger.debug("to root cloth for %s", cls.get_type_name()) ret = self.to_root_cloth(ctx, cls, inst, self._root_cloth, parent, name) have_cloth = True elif self._cloth is not None: logger.debug("to parent protocol cloth for %s", cls.get_type_name()) ret = self.to_parent_cloth(ctx, cls, inst, self._cloth, parent, name) have_cloth = True else: logger.debug("to parent for %s", cls.get_type_name()) ret = self.start_to_parent(ctx, cls, inst, parent, name, **kwargs) if isgenerator(ret): # Poor man's yield from try: while True: sv2 = (yield) ret.send(sv2) except Break as b: try: ret.throw(b) except StopIteration: pass finally: self._finalize_protocol(ctx, parent, have_cloth) else: self._finalize_protocol(ctx, parent, have_cloth) pstack.pop() logger.debug("%s pop prot %r. newlen: %d", B("%"), self, len(pstack))
def parse_schema(self, elt): self.nsmap = dict(elt.nsmap.items()) self.prefmap = dict([(v, k) for k, v in self.nsmap.items()]) self.schema = schema = _prot.from_element(self, XmlSchema10, elt) self.pending_types = {} self.pending_elements = {} self.tns = tns = schema.target_namespace if self.tns is None: self.tns = tns = '__no_ns__' if tns in self.retval: return self.retval[tns] = _Schema() self.debug0("1 %s processing includes", MAG(tns)) if schema.includes: for include in schema.includes: self.process_includes(include) if schema.elements: schema.elements = odict([(e.name, e) for e in schema.elements]) if schema.complex_types: schema.complex_types = odict([(c.name, c) for c in schema.complex_types]) if schema.simple_types: schema.simple_types = odict([(s.name, s) for s in schema.simple_types]) if schema.attributes: schema.attributes = odict([(a.name, a) for a in schema.attributes]) self.debug0("2 %s processing imports", R(tns)) if schema.imports: for imp in schema.imports: if not imp.namespace in self.retval: self.debug1("%s importing %s", tns, imp.namespace) fname = self.files[imp.namespace] self.clone(2, dirname(fname)).parse_schema_file(fname) self.retval[tns].imports.add(imp.namespace) self.debug0("3 %s processing simple_types", G(tns)) if schema.simple_types: for s in schema.simple_types.values(): self.process_simple_type(s) # no simple types should have been left behind. assert sum((len(v) for v in self.pending_simple_types.values())) == 0, \ self.pending_simple_types.values() self.debug0("4 %s processing attributes", G(tns)) if schema.attributes: for s in schema.attributes.values(): n, t = self.process_attribute(s) self.retval[self.tns].types[n] = t self.debug0("5 %s processing complex_types", B(tns)) if schema.complex_types: for c in schema.complex_types.values(): self.process_complex_type(c) self.debug0("6 %s processing elements", YEL(tns)) if schema.elements: for e in schema.elements.values(): self.process_schema_element(e) self.process_pending() if self.parent is None: # for the top-most schema if self.children is not None: # if it uses <include> or <import> # This is needed for schemas with circular imports for c in chain([self], self.children): c.print_pending() self.debug0('') # FIXME: should put this in a while loop that loops until no # changes occur for c in chain([self], self.children): c.process_pending() for c in chain([self], self.children): c.process_pending() self.debug0('') for c in chain([self], self.children): c.print_pending(fail=(not self.skip_errors)) return self.retval
def to_parent(self, ctx, cls, inst, parent, name, nosubprot=False, **kwargs): pushed = False has_cloth = False prot_name = self.__class__.__name__ cls, switched = self.get_polymorphic_target(cls, inst) cls_attrs = self.get_cls_attrs(cls) inst = self._cast(cls_attrs, inst) # if there is a subprotocol, switch to it subprot = self.get_subprot(ctx, cls_attrs, nosubprot) if subprot is not None: logger.debug("Subprot from %r to %r", self, subprot) ret = self.to_subprot(ctx, cls, inst, parent, name, subprot, **kwargs) else: # if there is a class cloth, switch to it has_cloth, cor_handle = self.check_class_cloths( ctx, cls, inst, parent, name, **kwargs) if has_cloth: ret = cor_handle else: # if instance is None, use the default factory to generate one _df = cls_attrs.default_factory if inst is None and callable(_df): inst = _df() # if instance is still None, use the default value if inst is None: inst = cls_attrs.default # if instance is still None, use the global null handler to # serialize it if inst is None and self.use_global_null_handler: identifier = prot_name + '.null_to_parent' logger.debug("Writing %s using %s for %s.", name, identifier, cls.get_type_name()) self.null_to_parent(ctx, cls, inst, parent, name, **kwargs) return # if requested, ignore wrappers if self.ignore_wrappers and issubclass(cls, ComplexModelBase): cls, inst = self.strip_wrappers(cls, inst) # if cls is an iterable of values and it's not being iterated # on, do it from_arr = kwargs.get('from_arr', False) # we need cls.Attributes here because we need the ACTUAL attrs # that were set by the Array.__new__ if not from_arr and cls.Attributes.max_occurs > 1: ret = self.array_to_parent(ctx, cls, inst, parent, name, **kwargs) else: # fetch the serializer for the class at hand try: handler = self.serialization_handlers[cls] except KeyError: # if this protocol uncapable of serializing this class if self.ignore_uncap: logger.debug("Ignore uncap %r", name) return # ignore it if requested # raise the error otherwise logger.error( "%r is missing handler for " "%r for field %r", self, cls, name) raise # push the instance at hand to instance stack. this makes it # easier for protocols to make decisions based on parents # of instances at hand. ctx.outprot_ctx.inst_stack.append((cls, inst, from_arr)) pushed = True logger.debug("%s %r pushed %r using %r", R("$"), self, cls, handler) # disabled for performance reasons # from spyne.util.web import log_repr # identifier = "%s.%s" % (prot_name, handler.__name__) # log_str = log_repr(inst, cls, # from_array=kwargs.get('from_arr', None)) # logger.debug("Writing %s using %s for %s. Inst: %r", name, # identifier, cls.get_type_name(), log_str) # finally, serialize the value. ret is the coroutine handle ret = handler(ctx, cls, inst, parent, name, **kwargs) if isgenerator(ret): try: while True: sv2 = (yield) ret.send(sv2) except Break as e: try: ret.throw(e) except (Break, StopIteration, GeneratorExit): pass finally: if has_cloth: self._close_cloth(ctx, parent) if pushed: logger.debug("%s %r popped %r %r", B("$"), self, cls, inst) ctx.outprot_ctx.inst_stack.pop() else: if has_cloth: self._close_cloth(ctx, parent) if pushed: logger.debug("%s %r popped %r %r", B("$"), self, cls, inst) ctx.outprot_ctx.inst_stack.pop()
def parse_schema(self, elt): self.nsmap = nsmap = elt.nsmap self.prefmap = prefmap = dict([(v, k) for k, v in self.nsmap.items()]) self.schema = schema = _prot.from_element(self, XmlSchema10, elt) self.pending_types = {} self.pending_elements = {} self.tns = tns = schema.target_namespace if self.tns is None: self.tns = tns = '__no_ns__' if tns in self.retval: return self.retval[tns] = _Schema() self.debug0("1 %s processing includes", M(tns)) if schema.includes: for include in schema.includes: self.process_includes(include) if schema.elements: schema.elements = odict([(e.name, e) for e in schema.elements]) if schema.complex_types: schema.complex_types = odict([(c.name, c) for c in schema.complex_types]) if schema.simple_types: schema.simple_types = odict([(s.name, s) for s in schema.simple_types]) if schema.attributes: schema.attributes = odict([(a.name, a) for a in schema.attributes]) self.debug0("2 %s processing imports", R(tns)) if schema.imports: for imp in schema.imports: if not imp.namespace in self.retval: self.debug1("%s importing %s", tns, imp.namespace) file_name = self.files[imp.namespace] self.clone(2, dirname(file_name)).parse_schema_file(file_name) self.retval[tns].imports.add(imp.namespace) self.debug0("3 %s processing attributes", G(tns)) if schema.attributes: for s in schema.attributes.values(): n, t = self.process_attribute(s) self.retval[self.tns].types[n] = t self.debug0("4 %s processing simple_types", G(tns)) if schema.simple_types: for s in schema.simple_types.values(): st = self.process_simple_type(s) self.retval[self.tns].types[s.name] = st self.debug0("5 %s processing complex_types", B(tns)) if schema.complex_types: for c in schema.complex_types.values(): self.process_complex_type(c) self.debug0("6 %s processing elements", Y(tns)) if schema.elements: for e in schema.elements.values(): self.process_schema_element(e) self.process_pending() if self.parent is None: # for the top-most schema if self.children is not None: # if it uses <include> or <import> # This is needed for schemas with circular imports for c in chain([self], self.children): c.print_pending() self.debug0('') # FIXME: This has no guarantee of working yet covers all the # schema files found in the wild so far. for c in chain([self], self.children): c.process_pending() for c in chain([self], self.children): c.process_pending() self.debug0('') for c in chain([self], self.children): c.print_pending(fail=True) return self.retval
def parse_schema(ctx, elt): ctx.nsmap = nsmap = elt.nsmap ctx.prefmap = prefmap = dict([(v, k) for k, v in ctx.nsmap.items()]) ctx.schema = schema = _prot.from_element(XmlSchema10, elt) ctx.pending_types = {} ctx.pending_elements = {} ctx.tns = tns = schema.target_namespace if tns in ctx.retval: return ctx.retval[tns] = _Schema() ctx.debug0("1 %s processing includes", M(tns)) if schema.includes: for include in schema.includes: process_includes(ctx, include) if schema.elements: schema.elements = odict([(e.name, e) for e in schema.elements]) if schema.complex_types: schema.complex_types = odict([(c.name, c) for c in schema.complex_types]) if schema.simple_types: schema.simple_types = odict([(s.name, s) for s in schema.simple_types]) if schema.attributes: schema.attributes = odict([(a.name, a) for a in schema.attributes]) ctx.debug0("2 %s processing imports", R(tns)) if schema.imports: for imp in schema.imports: if not imp.namespace in ctx.retval: ctx.debug1("%s importing %s", tns, imp.namespace) file_name = ctx.files[imp.namespace] parse_schema_file(ctx.clone(2, dirname(file_name)), file_name) ctx.retval[tns].imports.add(imp.namespace) ctx.debug0("3 %s processing attributes", G(tns)) if schema.attributes: for s in schema.attributes.values(): n, t = process_attribute(ctx, s) ctx.retval[ctx.tns].types[n] = t ctx.debug0("4 %s processing simple_types", G(tns)) if schema.simple_types: for s in schema.simple_types.values(): st = process_simple_type(ctx, s) ctx.retval[ctx.tns].types[s.name] = st ctx.debug0("5 %s processing complex_types", B(tns)) if schema.complex_types: for c in schema.complex_types.values(): process_complex_type(ctx, c) ctx.debug0("6 %s processing elements", Y(tns)) if schema.elements: for e in schema.elements.values(): process_schema_element(ctx, e) process_pending(ctx) if ctx.parent is None: # for the top-most schema if ctx.children is not None: # if it uses <include> or <import> # This is needed for schemas with circular imports for c in chain([ctx], ctx.children): print_pending(c) ctx.debug0('') for c in chain([ctx], ctx.children): process_pending(c) for c in chain([ctx], ctx.children): process_pending(c) ctx.debug0('') for c in chain([ctx], ctx.children): print_pending(c, fail=True) return ctx.retval
def to_cloth(self, ctx, cls, inst, cloth, parent, name=None, from_arr=False, as_attr=False, as_data=False, **kwargs): prot_name = self.__class__.__name__ if issubclass(cls, XmlAttribute): cls = cls.type as_attr = True elif issubclass(cls, XmlData): cls = cls.type as_data = True pushed = False if cloth is None: logger_c.debug("No cloth fround, switching to to_parent...") ret = self.to_parent(ctx, cls, inst, parent, name, **kwargs) else: cls, _ = self.get_polymorphic_target(cls, inst) cls_attrs = self.get_cls_attrs(cls) # if instance is None, use the default factory to generate one _df = cls_attrs.default_factory if inst is None and callable(_df): inst = _df() # if instance is still None, use the default value if inst is None: inst = cls_attrs.default # if there's a subprotocol, switch to it subprot = cls_attrs.prot if subprot is not None and not (subprot is self): # we can't do this because subprotocols don't accept cloths. # so we need to enter the cloth, which make it too late to # set attributes. assert not as_attr, "No subprot supported for fields " \ "to be serialized as attributes, use type casting with " \ "customized serializers in the current protocol instead." self._enter_cloth(ctx, cloth, parent) ret = subprot.subserialize(ctx, cls, inst, parent, name, as_attr=as_attr, **kwargs) # if there is no subprotocol, try rendering the value else: # try rendering the null value if inst is None: if cls.Attributes.min_occurs > 0: attrs = {} if as_attr: # FIXME: test needed attrs[name] = '' self._enter_cloth(ctx, cloth, parent, attrs=attrs) identifier = "%s.%s" % (prot_name, "null_to_cloth") logger_s.debug("Writing '%s' using %s type: %s.", name, identifier, cls.get_type_name()) parent.write(cloth) else: logger_s.debug("Skipping '%s' type: %s because empty.", name, cls.get_type_name()) self._enter_cloth(ctx, cloth, parent, skip=True) return if as_data: # we only support XmlData of a primitive.,. is this a # problem? parent.write(self.to_unicode(cls, inst)) return if as_attr: attrs = {name: self.to_unicode(cls, inst)} self._enter_cloth(ctx, cloth, parent, attrs=attrs) return # push the instance at hand to instance stack. this makes it # easier for protocols to make decisions based on parents of # instances at hand. pushed = True logger_c.debug("%s %r pushed %r %r", R("#"), self, cls, inst) ctx.outprot_ctx.inst_stack.append((cls, inst, from_arr)) # try rendering the array value if not from_arr and cls.Attributes.max_occurs > 1: ret = self.array_to_cloth(ctx, cls, inst, cloth, parent, as_attr=as_attr, name=name) else: # try rendering anything else handler = self.rendering_handlers[cls] # disabled for performance reasons #identifier = "%s.%s" % (prot_name, handler.__name__) #logger_s.debug("Writing %s using %s for %s. Inst: %r", # name, identifier, cls.get_type_name(), # log_repr(inst, cls, from_array=from_arr)) ret = handler(ctx, cls, inst, cloth, parent, name=name, as_attr=as_attr) if isgenerator(ret): try: while True: sv2 = (yield) ret.send(sv2) except Break as e: try: ret.throw(e) except (Break, StopIteration, GeneratorExit): pass finally: if pushed: logger_c.debug("%s %r popped %r %r", B("#"), self, cls, inst) ctx.outprot_ctx.inst_stack.pop() else: if pushed: logger_c.debug("%s %r popped %r %r", B("#"), self, cls, inst) ctx.outprot_ctx.inst_stack.pop()
def to_cloth(self, ctx, cls, inst, cloth, parent, name=None, from_arr=False, **kwargs): prot_name = self.__class__.__name__ pushed = False if cloth is None: logger_c.debug("No cloth fround, switching to to_parent...") ret = self.to_parent(ctx, cls, inst, parent, name, **kwargs) else: cls, _ = self.get_polymorphic_target(cls, inst) cls_attrs = self.get_cls_attrs(cls) # if instance is None, use the default factory to generate one _df = cls_attrs.default_factory if inst is None and callable(_df): inst = _df() # if instance is still None, use the default value if inst is None: inst = cls_attrs.default # if there's a subprotocol, switch to it subprot = cls_attrs.prot if subprot is not None and not (subprot is self): self._enter_cloth(ctx, cloth, parent) ret = subprot.subserialize(ctx, cls, inst, parent, name, **kwargs) # if there is no subprotocol, try rendering the value else: # try rendering the null value if inst is None: if cls.Attributes.min_occurs > 0: self._enter_cloth(ctx, cloth, parent) identifier = "%s.%s" % (prot_name, "null_to_cloth") logger_s.debug("Writing '%s' using %s type: %s.", name, identifier, cls.get_type_name()) parent.write(cloth) else: logger_s.debug("Skipping '%s' type: %s because empty.", name, cls.get_type_name()) self._enter_cloth(ctx, cloth, parent, skip=True) return # push the instance at hand to instance stack. this makes it # easier for protocols to make decisions based on parents of # instances at hand. pushed = True logger_c.debug("%s %r pushed %r %r", R("#"), self, cls, inst) ctx.outprot_ctx.inst_stack.append((cls, inst, from_arr)) # try rendering the array value if not from_arr and cls.Attributes.max_occurs > 1: ret = self.array_to_cloth(ctx, cls, inst, cloth, parent, name=name) else: # try rendering anything else handler = self.rendering_handlers[cls] # disabled for performance reasons #identifier = "%s.%s" % (prot_name, handler.__name__) #logger_s.debug("Writing %s using %s for %s. Inst: %r", # name, identifier, cls.get_type_name(), # log_repr(inst, cls, from_array=from_arr)) ret = handler(ctx, cls, inst, cloth, parent, name=name) if isgenerator(ret): try: while True: sv2 = (yield) ret.send(sv2) except Break as e: try: ret.throw(e) except (Break, StopIteration, GeneratorExit): pass finally: if pushed: logger_c.debug("%s %r popped %r %r", B("#"), self, cls, inst) ctx.outprot_ctx.inst_stack.pop() else: if pushed: logger_c.debug("%s %r popped %r %r", B("#"), self, cls, inst) ctx.outprot_ctx.inst_stack.pop()