Beispiel #1
0
Datei: base.py Projekt: f3at/feat
 def __init__(self, converter_caps=None, pre_converter=None,
              registry=None, externalizer=None,
              source_ver=None, target_ver=None):
     global _global_registry
     assert ((source_ver is None) and (target_ver is None)) \
            or ((source_ver is not None) and (target_ver is not None))
     self.converter_capabilities = converter_caps or DEFAULT_CONVERTER_CAPS
     self._pre_converter = pre_converter and IConverter(pre_converter)
     self._registry = IRegistry(registry) if registry else _global_registry
     self._externalizer = externalizer and IExternalizer(externalizer)
     self._source_ver = source_ver
     self._target_ver = target_ver
     self.reset()
Beispiel #2
0
class Unserializer(object):
    """Base class for unserializers. It handle delayed unpacking
    and instance restoration to resolve circular references.
    If no registry instance is specified when created, the default
    global registry will be used.

    A pre-converter can be specified at creation time, if so the
    data will be first converted by the given converter and then
    the result will be unserialized. Used to parse data before
    unserializing.
    """

    implements(IConverter)

    pass_through_types = ()

    def __init__(
        self,
        converter_caps=None,
        pre_converter=None,
        registry=None,
        externalizer=None,
        source_ver=None,
        target_ver=None,
    ):
        global _global_registry
        assert ((source_ver is None) and (target_ver is None)) or (
            (source_ver is not None) and (target_ver is not None)
        )
        self.converter_capabilities = converter_caps or DEFAULT_CONVERTER_CAPS
        self._pre_converter = pre_converter and IConverter(pre_converter)
        self._registry = IRegistry(registry) if registry else _global_registry
        self._externalizer = externalizer and IExternalizer(externalizer)
        self._source_ver = source_ver
        self._target_ver = target_ver
        self.reset()

    ### IConverter ###

    def convert(self, data):
        try:
            # Pre-convert the data if a convertor was specified
            converted = self.pre_convertion(data)
            # Unpack the first level of values
            unpacked = self.unpack_data(converted)
            # Continue unpacking level by level
            self.finish_unpacking()
            # Should be finished by now
            return unpacked
        finally:
            # Reset the state to cleanup all references
            self.reset()

    ### protected ###

    def pre_convertion(self, data):
        if self._pre_converter is not None:
            return self._pre_converter.convert(data)
        return data

    def reset(self):
        self._references = {}  # {REFERENCE_ID: (DATA_ID, OBJECT)}
        self._pending = []  # Pendings unpacking
        self._instances = []  # [(RESTORATOR, INSTANCE, SNAPSHOT)]
        self._delayed = 0  # If we are in a delayable unpacking

    def unpack_data(self, data):
        return self._unpack_data(data, None, None)

    def delayed_unpacking(self, container, fun, *args, **kwargs):
        """Should be used when unpacking mutable values.
        This allows circular references resolution by pausing serialization."""
        try:
            self._delayed += 1
            blob = self._begin()
            try:
                fun(*args, **kwargs)
                self._commit(blob)
                return container
            except DelayPacking:
                self._rollback(blob)
                continuation = (fun, args, kwargs)
                self._pending.append(continuation)
                return container
        finally:
            self._delayed -= 1

    def finish_unpacking(self):
        while self._pending:
            fun, args, kwargs = self._pending.pop(0)
            fun(*args, **kwargs)

        # Initialize the instance in creation order
        for restorator, instance, snapshot, _refid in self._instances:
            snapshot = self._adapt_snapshot(restorator, snapshot)
            instance.recover(snapshot)

        # Calls the instances post restoration callback in reversed order
        # in an intent to reduce the possibilities of instances relying
        # on there references being fully restored when called.
        # This should not be relied on anyway.
        for _, instance, _, _ in reversed(self._instances):
            restored_fun = getattr(instance, "restored", None)
            if restored_fun is not None:
                restored_fun()

    def restore_type(self, type_name):
        value = reflect.named_object(type_name)
        if issubclass(value, type):
            raise ValueError("type %r unserialized to something that " "isn't a type: %r" % (type_name, value))
        return value

    def restore_external(self, data):
        if self._externalizer is None:
            raise ValueError("Got external reference %r but unserializer " "do not have any IExternalizer")
        identifier = self.unpack_data(data)
        instance = self._externalizer.lookup(identifier)
        if instance is None:
            raise ValueError("No external reference found with identifier %r" % (identifier,))
        return instance

    def prepare_instance(self, type_name):
        restorator = self._lookup_restorator(type_name)
        # Prepare the instance for recovery
        instance = restorator.prepare()
        if instance is not None:
            return restorator, instance

    def restore_instance(self, type_name, data, refid=None, restorator=None, instance=None):
        if restorator is None:
            restorator = self._lookup_restorator(type_name)

        if instance is None:
            # Prepare the instance for recovery
            instance = restorator.prepare()

        if instance is None:
            # Immutable type, we can't delay restoration
            snapshot = self.unpack_data(data)
            snapshot = self._adapt_snapshot(restorator, snapshot)
            return restorator.restore(snapshot)

        # Delay the instance restoration for later to handle circular refs
        return self.delayed_unpacking(instance, self._continue_restoring_instance, restorator, instance, data, refid)

    def restore_reference(self, refid, data):
        if refid in self._references:
            # This is because of DelayUnpacking exception, reference
            # can be registered multiple times
            data_id, value = self._references[refid]
            if data_id == id(data):
                return value
            raise ValueError("Multiple references found with " "the same identifier: %s" % refid)
        value = self._unpack_data(data, refid, data)
        if refid not in self._references:
            # If not yet referenced
            self._references[refid] = (id(data), value)
        return value

    def restore_dereference(self, refid):
        if refid not in self._references:
            # Dereference to an unknown reference
            if self._delayed > 0:
                # If we unpacking can be delayed because we are unpacking
                # a mutable object just delay the unpacking for later
                raise DelayPacking()
            raise ValueError("Dereferencing of yet unknown reference: %s" % refid)
        _data_id, value = self._references[refid]
        return value

    def unpack_unordered_values(self, values):
        """Unpack an unordered list of values taking DelayPacking
        exceptions into account to resolve circular references .
        Used to unpack set values when order is not guaranteed by
        the serializer. See unpack_unordered_pairs()."""

        values = list(values)  # To support iterators
        result = []

        # Try to unpack values more than one time to resolve cross references
        max_loop = 2
        while values and max_loop:
            next_values = []
            for value_data in values:
                blob = self._begin()
                try:
                    # try unpacking the value
                    value = self.unpack_data(value_data)
                    self._commit(blob)
                except DelayPacking:
                    self._rollback(blob)
                    # If it is delayed keep it for later
                    next_values.append(value_data)
                    continue
                result.append(value)
            values = next_values
            max_loop -= 1

        if values:
            # Not all items were resolved
            raise DelayPacking()

        return result

    def unpack_unordered_pairs(self, pairs):
        """Unpack an unordered list of value pairs taking DelayPacking
        exceptions into account to resolve circular references .
        Used to unpack dictionary items when the order is not guarennteed
        by the serializer. When item order change between packing
        and unpacking, references are not guaranteed to appear before
        dereferences anymore. So if unpacking an item fail because
        of unknown dereference, we must keep it aside, continue unpacking
        the other items and continue later."""

        items = [(False, k, v) for k, v in pairs]
        result = []

        # Try to unpack items more than one time to resolve cross references
        max_loop = 2
        while items and max_loop:
            next_items = []
            for key_unpacked, key_data, value_data in items:
                if key_unpacked:
                    key = key_data
                else:
                    blob = self._begin()
                    try:
                        # Try unpacking the key
                        key = self.unpack_data(key_data)
                        self._commit(blob)
                    except DelayPacking:
                        self._rollback(blob)
                        # If it is delayed keep it for later
                        next_items.append((False, key_data, value_data))
                        continue

                blob = self._begin()
                try:
                    # try unpacking the value
                    value = self.unpack_data(value_data)
                    self._commit(blob)
                except DelayPacking:
                    self._rollback(blob)
                    # If it is delayed keep it for later
                    next_items.append((True, key, value_data))
                    continue

                # Update the container with the unpacked value and key
                result.append((key, value))
            items = next_items
            max_loop -= 1

        if items:
            # Not all items were resolved
            raise DelayPacking()

        return result

    ### virtual ###

    def analyse_data(self, data):
        """Analyses the data provided and return a tuple containing
        the data type and a function to unpack it.
        The type can be None for immutable types, instances,
        reference and dereferences."""

    ### private ###

    def _begin(self):
        # Start a DelayPacking protected section
        blob = self._instances
        self._instances = []
        return blob

    def _rollback(self, blob):
        # We need to rollback after a DelayPacking has been raised
        # we only keep instances that has been referenced
        for instance in self._instances:
            refid = instance[3]
            if refid is not None:
                blob.append(instance)
        self._instances = blob

    def _commit(self, blob):
        # Commit after a DelayPacking protected section
        # Joining the instance lists
        blob.extend(self._instances)
        self._instances = blob

    def _lookup_restorator(self, type_name):
        # Lookup the registry for a IRestorator
        restorator = self._registry.lookup(type_name)
        if restorator is None:
            raise TypeError("Type %s not supported by unserializer %s" % (type_name, reflect.canonical_name(self)))
        return restorator

    def _unpack_data(self, data, refid, refdata):
        # Just return pass-through types,
        # support sub-classed base types and metaclasses
        if set(type(data).__mro__) & self.pass_through_types:
            return data

        analysis = self.analyse_data(data)

        if analysis is not None:

            constructor, unpacker = analysis

            if constructor is None:
                # Immutable types
                return unpacker(self, data)

            if callable(constructor):
                # Unpack the mutable containers that provides constructor
                container = constructor()
                if container is not None:
                    if refid is not None:
                        self._references[refid] = (id(refdata), container)
                    return self.delayed_unpacking(container, unpacker, self, container, data)

            else:
                # Instance type name
                prepared = self.prepare_instance(constructor)
                if prepared is None:
                    # Immutable instance
                    return unpacker(self, data, None, None, None)

                restorator, instance = prepared

                if refid is not None:
                    self._references[refid] = (id(refdata), instance)
                return self.delayed_unpacking(instance, unpacker, self, data, refid, restorator, instance)

        raise TypeError(
            "Type %s not supported by unserializer %s" % (type(data).__name__, reflect.canonical_name(self))
        )

    def _continue_restoring_instance(self, restorator, instance, data, refid):
        snapshot = self.unpack_data(data)
        # Delay instance initialization to the end to be sure
        # all snapshots circular references have been resolved
        self._instances.append((restorator, instance, snapshot, refid))
        return instance

    def _adapt_snapshot(self, restorator, snapshot):
        if self._source_ver is not None:
            # TODO: If external adapter is needed change this to a cast
            if IVersionAdapter.providedBy(restorator):
                adapter = IVersionAdapter(restorator)
                snapshot = adapter.adapt_version(snapshot, self._source_ver, self._target_ver)
        return snapshot