def save(self, collection: T.bpy_prop_collection, parent: T.bpy_struct, key: str, context: Context):
        """
        Save this proxy into collection

        Args:
            collection: the collection into which this proxy is saved
            parent: the attribute that contains collection (e.g. a Scene instance)
            key: the name of the collection in parent (e.g "background_images")
            context: the proxy and visit state
        """
        sequence = self._sequence

        # Using clear_from ensures that sequence data is compatible with remaining elements after
        # truncate_collection. This addresses an issue with Nodes, for which the order of default nodes (material
        # output and principled in collection) may not match the order of incoming nodes. Saving node data into a
        # node of the wrong type can lead to a crash.
        clear_from = specifics.clear_from(collection, sequence, context)
        specifics.truncate_collection(collection, clear_from)

        # For collections like `IDMaterials`, the creation API (`.new(datablock_ref)`) also writes the value.
        # For collections like `Nodes`, the creation API (`.new(name)`) does not write the item value.
        # So the value must always be written for all collection types.
        collection_length = len(collection)
        for i, item_proxy in enumerate(sequence[:collection_length]):
            write_attribute(collection, i, item_proxy, context)
        for i, item_proxy in enumerate(sequence[collection_length:], collection_length):
            try:
                specifics.add_element(collection, item_proxy, i, context)
                if self._resolver:
                    self._resolver.resolve(i)
            except AddElementFailed:
                break
            # Must write at once, otherwise the default item name might conflit with a later item name
            write_attribute(collection, i, item_proxy, context)
Exemplo n.º 2
0
    def save(
        self,
        attribute: T.bpy_struct,
        parent: Union[T.bpy_struct, T.bpy_prop_collection],
        key: Union[int, str],
        context: Context,
    ):
        """
        Save this proxy into attribute

        Args:
            attribute: the bpy_struct to store this proxy into
            parent: (e.g an Object instance)
            key: (e.g. "display)
            context: the proxy and visit state
        """
        if key == "animation_data" and (attribute is None or isinstance(attribute, T.AnimData)):
            attribute = _create_clear_animation_data(self, parent)

        if attribute is None:
            logger.info(f"save: attribute is None for {context.visit_state.display_path()}.{key}")
            return

        context.visit_state.path.append(key)
        try:
            for k, v in self._data.items():
                write_attribute(attribute, k, v, context)
        finally:
            context.visit_state.path.pop()
Exemplo n.º 3
0
    def save(self, attribute: T.ID, unused_parent: T.bpy_struct,
             unused_key: Union[int, str], context: Context) -> T.ID:
        """
        Save this proxy into an embedded datablock

        Args:
            attribute: the datablock into which this proxy is saved
            unused_parent: the struct that contains the embedded datablock (e.g. a Scene)
            unused_key: the member name of the datablock in parent (e.g. node_tree)
            context: proxy and visit state

        Returns:
            The saved datablock
        """

        datablock = self._pre_save(attribute, context)
        if datablock is None:
            logger.error(
                f"DatablockProxy.save() get None after _pre_save({attribute})")
            return None

        with context.visit_state.enter_datablock(self, datablock):
            for k, v in self._data.items():
                write_attribute(datablock, k, v, context)

        return datablock
Exemplo n.º 4
0
    def save(self, bl_instance: Any, key: Union[int, str], context: Context):
        """
        Save this proxy into a Blender attribute
        """
        assert isinstance(key, (int, str))

        if isinstance(key, int):
            target = bl_instance[key]
        elif isinstance(bl_instance, T.bpy_prop_collection):
            target = bl_instance.get(key)
        else:
            target = getattr(bl_instance, key, None)
            if target is not None:
                self._pre_save(target, context)

        if target is None:
            if isinstance(bl_instance, T.bpy_prop_collection):
                logger.warning(
                    f"Cannot write to '{bl_instance}', attribute '{key}' because it does not exist."
                )
            else:
                # Don't log this because it produces too many log messages when participants have plugins
                # f"Note: May be due to a plugin used by the sender and not on this Blender"
                # f"Note: May be due to unimplemented 'use_{key}' implementation for type {type(bl_instance)}"
                # f"Note: May be {bl_instance}.{key} should not have been saved"
                pass

            return

        context.visit_state.path.append(key)
        try:
            for k, v in self._data.items():
                write_attribute(target, k, v, context)
        finally:
            context.visit_state.path.pop()
Exemplo n.º 5
0
    def save(self, bl_instance: T.bpy_struct, attr_name: str,
             context: Context):
        """
        Save this proxy the Blender property
        """

        if self.length == 0 and len(self._data) != 0:
            logger.error(
                f"save(): length is {self.length} but _data is {self._data.keys()}"
            )
            # return

        target = getattr(bl_instance, attr_name, None)
        if target is None:
            return

        specifics.fit_aos(target, self, context)
        # nothing to do save here. The buffers that contains vertices and co are serialized apart from the json
        # that contains the Mesh members. The children of this are SoaElement and have no child.
        # They are updated directly bu SoaElement.save_array()

        context.visit_state.path.append(attr_name)
        try:
            for k, v in self._data.items():
                write_attribute(target, k, v, context)
        finally:
            context.visit_state.path.pop()
Exemplo n.º 6
0
    def save(self, collection: T.bpy_prop_collection, parent: T.bpy_struct,
             key: str, context: Context):
        """
        Save this proxy into collection

        Args:
            collection: the collection into which this proxy is saved
            parent: the attribute that contains collection (e.g. a Scene instance)
            key: the name of the collection in parent (e.g "background_images")
            context: the proxy and visit state
        """
        context.visit_state.path.append(key)
        try:
            sequence = self._sequence

            # Using clear_from ensures that sequence data is compatible with remaining elements after
            # truncate_collection. This addresses an issue with Nodes, for which the order of default nodes (material
            # output and principled in collection) may not match the order of incoming nodes. Saving node data into a
            # node of the wrong type can lead to a crash.
            clear_from = specifics.clear_from(collection, sequence)
            specifics.truncate_collection(collection, clear_from)

            # For collections like `IDMaterials`, the creation API (`.new(datablock_ref)`) also writes the value.
            # For collections like `Nodes`, the creation API (`.new(name)`) does not write the item value.
            # So the value must always be written for all collection types.
            for i in range(len(collection), len(sequence)):
                item_proxy = sequence[i]
                specifics.add_element(collection, item_proxy, context)
            for i, v in enumerate(sequence):
                write_attribute(collection, i, v, context)
        finally:
            context.visit_state.path.pop()
Exemplo n.º 7
0
    def save(self, unused_attribute, parent: bpy.types.bpy_prop_collection, key: str, context: Context):
        """Saves this proxy into all parent[i].key

        Args:
            unused_attribute:
            parent: collection of structure (e.g. a SplineBezierPoints instance)
            key: the name of the structure member (e.g "handle_left_type")
        """
        for index, item in self._data.items():
            # serialization turns all dict keys to strings
            write_attribute(parent[int(index)], key, item, context)
    def save(self, attribute: bpy.type.Collection, parent: Any, key: str, context: Context):
        """
        OBSOLETE Save this Proxy a Blender collection that may be a collection of standalone datablocks in bpy.data
        or a collection of referenced datablocks like bpy.type.Collection.children
        """
        if not self._data:
            return

        # collection of standalone datablocks
        for k, v in self._data.items():
            write_attribute(attribute, k, v, context)
Exemplo n.º 9
0
    def save(self,
             bl_instance: Any = None,
             attr_name: str = None,
             context: Context = None) -> T.ID:
        """
        Save this proxy into an existing datablock that may be a bpy.data member item or an embedded datablock
        """
        collection_name = self.collection_name
        if collection_name is not None:
            logger.info(f"IDproxy save standalone {self}")
            # a standalone datablock in a bpy.data collection

            if bl_instance is None:
                bl_instance = self.collection
            if attr_name is None:
                attr_name = self.data("name")
            id_ = bl_instance.get(attr_name)

            if id_ is None:
                logger.warning(
                    f"IDproxy save standalone {self}, not found. Creating")
                id_ = specifics.bpy_data_ctor(collection_name, self, context)
                if id_ is None:
                    logger.warning(
                        f"Cannot create bpy.data.{collection_name}[{attr_name}]"
                    )
                    return None
                if DEBUG:
                    if bl_instance.get(attr_name) != id_:
                        logger.error(
                            f"Name mismatch after creation of bpy.data.{collection_name}[{attr_name}] "
                        )
                id_.mixer_uuid = self.mixer_uuid()
        else:
            logger.info(f"IDproxy save embedded {self}")
            # an is_embedded_data datablock. pre_save will retrieve it by calling target
            id_ = getattr(bl_instance, attr_name)
            pass

        target = self._pre_save(id_, context)
        if target is None:
            logger.warning(
                f"DatablockProxy.save() {bl_instance}.{attr_name} is None")
            return None

        try:
            context.visit_state.datablock_proxy = self
            for k, v in self._data.items():
                write_attribute(target, k, v, context)
        finally:
            context.visit_state.datablock_proxy = None

        return target
Exemplo n.º 10
0
    def _save(self, datablock: T.ID, context: Context) -> T.ID:
        datablock = self._pre_save(datablock, context)
        if datablock is None:
            logger.warning(
                f"DatablockProxy.update_standalone_datablock() {self} pre_save returns None"
            )
            return None, None

        with context.visit_state.enter_datablock(self, datablock):
            for k, v in self._data.items():
                write_attribute(datablock, k, v, context)

        self._custom_properties.save(datablock)
        return datablock
Exemplo n.º 11
0
def write_metaballelements(target, src_sequence, context: Context):
    src_length = len(src_sequence)

    # truncate dst
    while src_length < len(target):
        target.remove(target[-1])

    # extend dst
    while src_length > len(target):
        # Creates a BALL, but will be changed by write_attribute
        target.new()

    assert src_length == len(target)
    for i in range(src_length):
        write_attribute(target, i, src_sequence[i], context)
Exemplo n.º 12
0
    def save(self, parent: Any, key: str, context: Context):
        """
        Save this Proxy a Blender collection that may be a collection of standalone datablocks in bpy.data
        or a collection of referenced datablocks like bpy.type.Collection.children
        """
        if not self._data:
            return

        target = getattr(parent, key, None)
        if target is None:
            # Don't log this, too many messages
            # f"Saving {self} into non existent attribute {bl_instance}.{attr_name} : ignored"
            return

        # collection of standalone datablocks
        for k, v in self._data.items():
            write_attribute(target, k, v, context)
Exemplo n.º 13
0
    def save(
        self,
        attribute: T.bpy_struct,
        parent: Union[T.bpy_struct, T.bpy_prop_collection],
        key: Union[int, str],
        context: Context,
    ):
        """Save this proxy into attribute, which is contained in parent[key] or parent.key

        Args:
            attribute: the attribute into which the proxy is saved.
            parent: the attribute that contains attribute
            key: the string or index that identifies attribute in parent
            context: proxy and visit state
        """
        collection = self._collection(attribute.id_data)
        pointee = collection[self._index]
        write_attribute(parent, key, pointee, context)
Exemplo n.º 14
0
    def save(self, attribute: T.bpy_prop_collection, parent: T.bpy_struct, key: Union[int, str], context: Context):
        """
        Save this proxy into attribute.

        Args:
            attribute: a collection of bpy_struct (e.g. a_Mesh_instance.vertices)
            parent: the attribute that contains attribute (e.g. a Mesh instance)
            key: the name of the bpy_collection in parent (e.g "vertices")
            context: proxy and visit state
        """

        specifics.fit_aos(attribute, self, context)

        # nothing to do save here. The buffers that contains vertices and co are serialized apart from the json
        # that contains the Mesh members. The children of this are SoaElement and have no child.
        # They are updated directly bu SoaElement.save_array()

        for k, v in self._data.items():
            write_attribute(attribute, k, v, context)
Exemplo n.º 15
0
 def save(self, bl_instance: Any, attr_name: str, context: Context):
     """
     Save this proxy the Blender property
     """
     target = getattr(bl_instance, attr_name, None)
     if target is None:
         # # Don't log this, too many messages
         # f"Saving {self} into non existent attribute {bl_instance}.{attr_name} : ignored"
         return
     context.visit_state.path.append(attr_name)
     try:
         sequence = self._sequence
         specifics.truncate_collection(target, len(self._sequence))
         for i in range(len(target), len(sequence)):
             item_proxy = sequence[i]
             specifics.add_element(target, item_proxy, context)
         for i, v in enumerate(sequence):
             write_attribute(target, i, v, context)
     finally:
         context.visit_state.path.pop()
Exemplo n.º 16
0
    def save(
        self,
        attribute: T.bpy_struct,
        parent: Union[T.bpy_struct, T.bpy_prop_collection],
        key: Union[int, str],
        context: Context,
    ):
        """Save this proxy into attribute, which is contained in parent[key] or parent.key

        Args:
            attribute: the attribute into which the proxy is saved.
            parent: the attribute that contains attribute
            key: the string or index that identifies attribute in parent
            context: proxy and visit state
        """

        if self._index == -1:
            pointee = None
        else:
            collection = self._collection(parent.id_data)
            try:
                pointee = collection[self._index]
            except IndexError:
                collection_proxy = self._collection_proxy(
                    parent.id_data, context)
                collection_proxy.register_unresolved(
                    self._index, lambda: write_attribute(
                        parent, key, collection[self._index], context))

                # TODO Fails if an array member references an element not yet created, like bones with parenting reversed
                # Could be solved with a delayed reference resolution:
                # - keep a reference to the collection proxy
                # - store the assignment closure in the collection proxy
                # - when the collection proxy creates the item, call the closure
                logger.error(
                    "save(): Unimplemented: reference an item not yet created ..."
                )
                logger.error(f"... {parent!r}.{key}")
                logger.error(f"... references {collection!r}[{self._index}]")
            else:
                write_attribute(parent, key, pointee, context)
Exemplo n.º 17
0
 def test_write_simple_types(self):
     scene = D.scenes[0]
     object_ = D.objects[0]
     # matrix = [10.0, 20.0, 30.0, 40.0, 11.0, 21.0, 31.0, 41.0, 12.0, 22.0, 32.0, 42.0, 14.0, 24.0, 34.0, 44]
     matrix2 = [[10.0, 20.0, 30.0, 40], [11.0, 21.0, 31.0, 41],
                [12.0, 22.0, 32.0, 42], [14.0, 24.0, 34.0, 44]]
     values = [
         # (scene, "name", "Plop"),
         (scene, "frame_current", 99),
         (scene, "use_gravity", False),
         (scene, "gravity", [-1, -2, -3]),
         (scene, "gravity", Vector([-10, -20, -30])),
         (scene, "sync_mode", "FRAME_DROP"),
         # (object_, "matrix_world", matrix),
         (object_, "matrix_world", Matrix(matrix2)),
     ]
     for bl_instance, name, value in values:
         write_attribute(bl_instance, name, value, self.proxy.context())
         stored_value = getattr(bl_instance, name)
         stored_type = type(stored_value)
         self.assertEqual(stored_type(value), stored_value)
Exemplo n.º 18
0
    def apply(
        self,
        attribute: Union[T.bpy_struct, T.bpy_prop_collection],
        parent: Union[T.bpy_struct, T.bpy_prop_collection],
        key: Union[int, str],
        delta: Delta,
        context: Context,
        to_blender: bool = True,
    ) -> Union[DatablockRefProxy, NonePtrProxy]:
        """
        Apply delta to an attribute with None value.

        This is used for instance Scene.camera is None and update to hold a valid Camera reference

        Args:
            attribute: the Blender attribute to update (e.g a_scene.camera)
            parent: the attribute that contains attribute (e.g. a Scene instance)
            key: the key that identifies attribute in parent (e.g; "camera").
            delta: the delta to apply
            context: proxy and visit state
            to_blender: update attribute in addition to this Proxy
        """
        replace = delta.value

        if to_blender:
            if isinstance(replace, DatablockRefProxy):
                datablock = context.proxy_state.datablock(
                    replace._datablock_uuid)
                if isinstance(key, int):
                    parent[key] = datablock
                else:
                    setattr(parent, key, datablock)

            else:
                # This branch is taken when animation_data or node_tree instance animation_data are set from None to a
                # valid value, after animation_data_create() has been called or use_nodes is set to True
                write_attribute(parent, key, replace, context)

        return replace
Exemplo n.º 19
0
    def save(
        self,
        attribute: T.bpy_struct,
        parent: Union[T.bpy_struct, T.bpy_prop_collection],
        key: Union[int, str],
        context: Context,
    ):
        """
        Save this proxy into attribute

        Args:
            attribute: the bpy_struct to store this proxy into
            parent: (e.g an Object instance)
            key: (e.g. "display)
            context: the proxy and visit state
        """
        if attribute is None:
            if isinstance(parent, T.bpy_prop_collection):
                logger.warning(
                    f"Cannot write to '{parent}', attribute '{key}' because it does not exist."
                )
            else:
                # Don't log this because it produces too many log messages when participants have plugins
                # f"Note: May be due to a plugin used by the sender and not on this Blender"
                # f"Note: May be due to unimplemented 'use_{key}' implementation for type {type(bl_instance)}"
                # f"Note: May be {bl_instance}.{key} should not have been saved"
                pass

            return

        context.visit_state.path.append(key)
        try:
            for k, v in self._data.items():
                write_attribute(attribute, k, v, context)
        finally:
            context.visit_state.path.pop()
Exemplo n.º 20
0
def write_curvemappoints(target, src_sequence, context: Context):
    src_length = len(src_sequence)

    # CurveMapPoints specific (alas ...)
    if src_length < 2:
        logger.error(f"Invalid length for curvemap: {src_length}. Expected at least 2")
        return

    # truncate dst
    while src_length < len(target):
        target.remove(target[-1])

    # extend dst
    while src_length > len(target):
        # .new() parameters are CurveMapPoints specific
        # for CurvemapPoint, we can initialize to anything then overwrite. Not sure this is doable for other types
        # new inserts in head !
        # Not optimal for big arrays, but much simpler given that the new() parameters depend on the collection
        # in a way that cannot be determined automatically
        target.new(0.0, 0.0)

    assert src_length == len(target)
    for i in range(src_length):
        write_attribute(target, i, src_sequence[i], context)
Exemplo n.º 21
0
    def apply(
        self,
        collection: T.bpy_prop_collection,
        parent: T.bpy_struct,
        key: Union[int, str],
        delta: Delta,
        context: Context,
        to_blender=True,
    ) -> StructCollectionProxy:
        """
        Apply delta to this proxy and optionally to the Blender attribute its manages.

        Args:
            attribute: the collection to update (e.g. a_mesh.material)
            parent: the attribute that contains attribute (e.g. a a Mesh instance)
            key: the key that identifies attribute in parent (e.g "materials")
            delta: the delta to apply
            context: proxy and visit state
            to_blender: update the managed Blender attribute in addition to this Proxy
        """
        assert isinstance(key, str)

        update = delta.value
        assert type(update) == type(self)

        if isinstance(delta, DeltaReplace):
            self._sequence = update._sequence
            if to_blender:
                specifics.truncate_collection(collection, 0)
                self.save(collection, parent, key, context)
        else:
            # a sparse update

            context.visit_state.path.append(key)
            try:
                sequence = self._sequence

                # Delete before update and process updates in reverse order to avoid spurious renames.
                # Starting with sequence A, B, C, D and delete B causes :
                # - an update for items 1 and 2 to be renamed into C and D
                # - one delete
                # If the update is processed first, Blender renames item 3 into D.001
                # If the deletes are processed first but the updates are processed in order, Blender renames item 1
                # into C.001

                delete_count = update._diff_deletions
                if delete_count > 0:
                    if to_blender:
                        specifics.truncate_collection(
                            collection,
                            len(collection) - delete_count)
                    del sequence[-delete_count:]

                for i, delta_update in reversed(update._diff_updates):
                    sequence[i] = apply_attribute(collection, i, sequence[i],
                                                  delta_update, context,
                                                  to_blender)

                for i, delta_addition in enumerate(update._diff_additions,
                                                   len(sequence)):
                    if to_blender:
                        item_proxy = delta_addition.value
                        specifics.add_element(collection, item_proxy, context)
                        write_attribute(collection, i, item_proxy, context)
                    sequence.append(delta_addition.value)

            except Exception as e:
                logger.warning(
                    "apply: Exception while processing attribute ...")
                logger.warning(
                    f"... {context.visit_state.display_path()}.{key}")
                logger.warning(f"... {e!r}")
            finally:
                context.visit_state.path.pop()

        return self
Exemplo n.º 22
0
    def apply(self,
              parent: Any,
              key: Union[int, str],
              delta: Optional[DeltaUpdate],
              context: Context,
              to_blender=True) -> StructCollectionProxy:

        assert isinstance(key, (int, str))

        update = delta.value
        assert type(update) == type(self)

        if isinstance(key, int):
            collection = parent[key]
        elif isinstance(parent, T.bpy_prop_collection):
            collection = parent.get(key)
        else:
            collection = getattr(parent, key, None)

        if isinstance(delta, DeltaReplace):
            self._sequence = update._sequence
            if to_blender:
                specifics.truncate_collection(collection, 0)
                self.save(parent, key, context)
        else:
            # a sparse update

            context.visit_state.path.append(key)
            try:
                sequence = self._sequence

                # Delete before update and process updates in reverse order to avoid spurious renames.
                # Starting with sequence A, B, C, D and delete B causes :
                # - an update for items 1 and 2 to be renamed into C and D
                # - one delete
                # If the update is processed first, Blender renames item 3 into D.001
                # If the deletes are processed first but the updates are processed in order, Blender renames item 1
                # into C.001

                for _ in range(update._diff_deletions):
                    if to_blender:
                        item = collection[-1]
                        collection.remove(item)
                    del sequence[-1]

                for i, delta_update in reversed(update._diff_updates):
                    sequence[i] = apply_attribute(collection, i, sequence[i],
                                                  delta_update, context,
                                                  to_blender)

                for i, delta_addition in enumerate(update._diff_additions,
                                                   len(sequence)):
                    if to_blender:
                        item_proxy = delta_addition.value
                        specifics.add_element(collection, item_proxy, context)
                        write_attribute(collection, i, item_proxy, context)
                    sequence.append(delta_addition.value)

            except Exception as e:
                logger.warning(
                    f"StructCollectionProxy.apply(). Processing {delta}")
                logger.warning(f"... for {collection}")
                logger.warning(f"... Exception: {e!r}")
                logger.warning("... Update ignored")

            finally:
                context.visit_state.path.pop()

        return self
Exemplo n.º 23
0
    def save(self, bl_instance: Any, attr_name: str, context: Context):
        """
        Save this proxy the Blender property
        """
        target = getattr(bl_instance, attr_name, None)
        if target is None:
            # # Don't log this, too many messages
            # f"Saving {self} into non existent attribute {bl_instance}.{attr_name} : ignored"
            return
        try:
            context.visit_state.path.append(attr_name)
            sequence = self._data.get(MIXER_SEQUENCE)
            if sequence:
                srna = bl_instance.bl_rna.properties[attr_name].srna
                if srna:
                    # TODO move to specifics
                    if srna.bl_rna is bpy.types.CurveMapPoints.bl_rna:
                        write_curvemappoints(target, sequence, context)
                    elif srna.bl_rna is bpy.types.MetaBallElements.bl_rna:
                        write_metaballelements(target, sequence, context)
                    elif srna.bl_rna is bpy.types.MeshPolygons.bl_rna:
                        # see soable_collection_properties
                        target.add(len(sequence))
                        for i, proxy in enumerate(sequence):
                            write_attribute(target, i, proxy, context)
                    elif srna.bl_rna is bpy.types.GPencilFrames.bl_rna:
                        for i, proxy in enumerate(sequence):
                            frame_number = proxy.data("frame_number")
                            target.new(frame_number)
                            write_attribute(target, i, proxy, context)
                    elif srna.bl_rna is bpy.types.GPencilStrokes.bl_rna:
                        for i, proxy in enumerate(sequence):
                            target.new()
                            write_attribute(target, i, proxy, context)
                    elif srna.bl_rna is bpy.types.GPencilStrokePoints.bl_rna:
                        target.new(len(sequence))
                        for i, proxy in enumerate(sequence):
                            write_attribute(target, i, proxy, context)
                    else:
                        logger.error(f"unsupported sequence type {srna}")
                        pass

                elif len(target) == len(sequence):
                    for i, v in enumerate(sequence):
                        # TODO this way can only save items at pre-existing slots. The bpy_prop_collection API
                        # uses struct specific API and ctors:
                        # - CurveMapPoints uses: .new(x, y) and .remove(point), no .clear(). new() inserts in head !
                        #   Must have at least 2 points left !
                        # - NodeTreeOutputs uses: .new(type, name), .remove(socket), has .clear()
                        # - ActionFCurves uses: .new(data_path, index=0, action_group=""), .remove(fcurve)
                        # - GPencilStrokePoints: .add(count), .pop()
                        write_attribute(target, i, v, context)
                else:
                    logger.warning(
                        f"Not implemented: write sequence of different length (incoming: {len(sequence)}, existing: {len(target)})for {bl_instance}.{attr_name}"
                    )
            else:
                # dictionary
                specifics.truncate_collection(target, self, context)
                for k, v in self._data.items():
                    write_attribute(target, k, v, context)
        finally:
            context.visit_state.path.pop()
Exemplo n.º 24
0
    def apply(
        self, parent: Any, key: Union[int, str], delta: Optional[DeltaUpdate], context: Context, to_blender=True
    ) -> StructProxy:

        assert isinstance(key, (int, str))

        # TODO factorize with save

        if isinstance(key, int):
            collection = parent[key]
        elif isinstance(parent, T.bpy_prop_collection):
            # TODO append an element :
            # https://blenderartists.org/t/how-delete-a-bpy-prop-collection-element/642185/4
            collection = parent.get(key)
            if collection is None:
                collection = specifics.add_element(self, parent, key, context)
        else:
            collection = getattr(parent, key, None)

        update = delta.value
        assert type(update) == type(self)

        try:
            context.visit_state.path.append(key)
            sequence = self._data.get(MIXER_SEQUENCE)
            if sequence:

                # input validity assertions
                add_indices = [i for i, delta in enumerate(update._data.values()) if isinstance(delta, DeltaAddition)]
                del_indices = [i for i, delta in enumerate(update._data.values()) if isinstance(delta, DeltaDeletion)]
                if add_indices or del_indices:
                    # Cannot have deletions and additions
                    assert not add_indices or not del_indices, "not add_indices or not del_indices"
                    indices = add_indices if add_indices else del_indices
                    # Check that adds and deleted are at the end
                    assert (
                        not indices or indices[-1] == len(update._data) - 1
                    ), "not indices or indices[-1] == len(update._data) - 1"
                    # check that adds and deletes are contiguous
                    assert all(
                        a + 1 == b for a, b in zip(indices, iter(indices[1:]))
                    ), "all(a + 1 == b for a, b in zip(indices, iter(indices[1:])))"

                for k, delta in update._data.items():
                    i = int(k)
                    try:
                        if isinstance(delta, DeltaUpdate):
                            sequence[i] = apply_attribute(collection, i, sequence[i], delta, context, to_blender)
                        elif isinstance(delta, DeltaDeletion):
                            if to_blender:
                                item = collection[i]
                                collection.remove(item)
                            del sequence[i]
                        else:  # DeltaAddition
                            # TODO pre save for use_curves
                            # since ordering does not include this requirement
                            if to_blender:
                                raise NotImplementedError("Not implemented: DeltaAddition for array")
                                write_attribute(collection, i, delta.value, context)
                            sequence.append(delta.value)

                    except Exception as e:
                        logger.warning(f"StructCollectionProxy.apply(). Processing {delta}")
                        logger.warning(f"... for {collection}[{i}]")
                        logger.warning(f"... Exception: {e!r}")
                        logger.warning("... Update ignored")
                        continue
            else:
                for k, delta in update._data.items():
                    try:
                        if isinstance(delta, DeltaDeletion):
                            # TODO do all collections have remove ?
                            # see "name collision" in diff()
                            k = k[1:]
                            if to_blender:
                                item = collection[k]
                                collection.remove(item)
                            del self._data[k]
                        elif isinstance(delta, DeltaAddition):
                            # TODO pre save for use_curves
                            # since ordering does not include this requirement

                            # see "name collision" in diff()
                            k = k[1:]
                            if to_blender:
                                write_attribute(collection, k, delta.value, context)
                            self._data[k] = delta.value
                        else:
                            self._data[k] = apply_attribute(collection, k, self._data[k], delta, context, to_blender)
                    except Exception as e:
                        logger.warning(f"StructCollectionProxy.apply(). Processing {delta}")
                        logger.warning(f"... for {collection}[{k}]")
                        logger.warning(f"... Exception: {e!r}")
                        logger.warning("... Update ignored")
                        continue
        finally:
            context.visit_state.path.pop()

        return self
Exemplo n.º 25
0
 def _write_attribute():
     for name in armature_data_proxy._require_context_state:
         write_attribute(armature_object.data, name,
                         armature_data_proxy._data[name], context)
Exemplo n.º 26
0
 def save(self, bl_collection: bpy.types.bpy_prop_collection, attr_name: str, context: Context):
     for index, item in self._data.items():
         write_attribute(bl_collection[int(index)], attr_name, item, context)
Exemplo n.º 27
0
    def create_standalone_datablock(
            self, context: Context
    ) -> Tuple[Optional[T.ID], Optional[RenameChangeset]]:
        """
        Save this proxy into its target standalone datablock
        """
        if self.target(context):
            logger.warning(
                f"create_standalone_datablock: datablock already registered : {self}"
            )
            logger.warning("... update ignored")
            return None, None
        renames: RenameChangeset = []
        incoming_name = self.data("name")
        existing_datablock = self.collection.get(incoming_name)
        if existing_datablock:
            if not existing_datablock.mixer_uuid:
                # A datablock created by VRtist command in the same command batch
                # Not an error, we will make it ours by adding the uuid and registering it
                logger.info(
                    f"create_standalone_datablock for {self} found existing datablock from VRtist"
                )
                datablock = existing_datablock
            else:
                if existing_datablock.mixer_uuid != self.mixer_uuid():
                    # local has a datablock with the same name as remote wants to create, but a different uuid.
                    # It is a simultaneous creation : rename local's datablock. Remote will do the same thing on its side
                    # and we will end up will all renamed datablocks
                    unique_name = f"{existing_datablock.name}_{existing_datablock.mixer_uuid}"
                    logger.warning(
                        f"create_standalone_datablock: Creation name conflict. Renamed existing bpy.data.{self.collection_name}[{existing_datablock.name}] into {unique_name}"
                    )

                    # Rename local's and issue a rename command
                    renames.append((
                        existing_datablock.mixer_uuid,
                        existing_datablock.name,
                        unique_name,
                        f"Conflict bpy.data.{self.collection_name}[{self.data('name')}] into {unique_name}",
                    ))
                    existing_datablock.name = unique_name

                    datablock = specifics.bpy_data_ctor(
                        self.collection_name, self, context)
                else:
                    # a creation for a datablock that we already have. This should not happen
                    logger.error(
                        f"create_standalone_datablock: unregistered uuid for {self}"
                    )
                    logger.error("... update ignored")
                    return None, None
        else:
            datablock = specifics.bpy_data_ctor(self.collection_name, self,
                                                context)

        if datablock is None:
            logger.warning(
                f"Cannot create bpy.data.{self.collection_name}[{self.data('name')}]"
            )
            return None, None

        if DEBUG:
            name = self.data("name")
            if self.collection.get(name).name != datablock.name:
                logger.error(
                    f"Name mismatch after creation of bpy.data.{self.collection_name}[{name}] "
                )

        datablock.mixer_uuid = self.mixer_uuid()

        datablock = self._pre_save(datablock, context)
        if datablock is None:
            logger.warning(
                f"DatablockProxy.update_standalone_datablock() {self} pre_save returns None"
            )
            return None, None
        try:
            context.visit_state.datablock_proxy = self
            for k, v in self._data.items():
                write_attribute(datablock, k, v, context)
        finally:
            context.visit_state.datablock_proxy = None

        return datablock, renames