def apply_to_proxy( self, datablock: T.ID, delta: Optional[DeltaUpdate], context: Context, ): """ Apply delta to this proxy, but do not update Blender state """ if delta is None: return update = delta.value assert type(update) == type(self) try: context.visit_state.datablock_proxy = self for k, delta in update._data.items(): try: current_value = self._data.get(k) self._data[k] = apply_attribute(datablock, k, current_value, delta, context, to_blender=False) except Exception as e: logger.warning(f"apply_to_proxy(). Processing {delta}") logger.warning(f"... for {datablock}.{k}") logger.warning(f"... Exception: {e!r}") logger.warning("... Update ignored") continue finally: context.visit_state.datablock_proxy = None
def apply( self, attribute: T.bpy_struct, parent: Union[T.bpy_struct, T.bpy_prop_collection], key: Union[int, str], delta: Delta, context: Context, to_blender: bool = True, ) -> Union[StructProxy, NonePtrProxy]: """ Apply delta to this proxy and optionally to the Blender attribute its manages. Args: attribute: the struct to update (e.g. a Material instance) parent: the attribute that contains attribute (e.g. bpy.data.materials) key: the key that identifies attribute in parent (e.g "Material") delta: the delta to apply context: proxy and visit state to_blender: update the managed Blender attribute in addition to this Proxy """ # WARNING parent must not be searched for key as it will fail in case of duplicate keys, with libraries update = delta.value if isinstance(delta, DeltaReplace): # The structure is replaced as a whole. # TODO explain when this occurs self.copy_data(update) if to_blender: self.save(attribute, parent, key, context) else: # the structure is updated if key == "animation_data" and (attribute is None or isinstance( attribute, T.AnimData)): # if animation_data is updated to None (cleared), the parent structure is updated to store # a NonePtrProxy if to_blender: attribute = _create_clear_animation_data(update, parent) if attribute is None: return NonePtrProxy() else: if isinstance(update, NonePtrProxy): return NonePtrProxy() if attribute: for k, member_delta in update._data.items(): current_value = self._data.get(k) try: self._data[k] = apply_attribute( attribute, k, current_value, member_delta, context, to_blender) except Exception as e: logger.warning( f"Struct.apply(). Processing {member_delta}") logger.warning(f"... for {attribute}.{k}") logger.warning(f"... Exception: {e!r}") logger.warning("... Update ignored") continue return self
def apply( self, parent: Any, key: Union[int, str], struct_delta: Optional[DeltaUpdate], context: Context, to_blender: bool = True, ) -> StructProxy: """ Apply diff to the Blender attribute at parent[key] or parent.key and update accordingly this proxy entry at key. Args: parent ([type]): [description] key ([type]): [description] delta ([type]): [description] context ([type]): [description] Returns: [type]: [description] """ if struct_delta is None: return assert isinstance(key, (int, str)) struct_update = struct_delta.value if isinstance(key, int): struct = parent[key] elif isinstance(parent, T.bpy_prop_collection): struct = parent.get(key) else: struct = getattr(parent, key, None) if to_blender: struct = struct_update._pre_save(struct, context) assert type(struct_update) == type(self) context.visit_state.path.append(key) try: for k, member_delta in struct_update._data.items(): current_value = self._data.get(k) try: self._data[k] = apply_attribute(struct, k, current_value, member_delta, context, to_blender) except Exception as e: logger.warning( f"Struct.apply(). Processing {member_delta}") logger.warning(f"... for {struct}.{k}") logger.warning(f"... Exception: {e!r}") logger.warning("... Update ignored") continue finally: context.visit_state.path.pop() return self
def apply( self, attribute: T.Mesh, parent: T.BlendDataMeshes, key: Union[int, str], delta: Delta, context: Context, to_blender: bool = True, ) -> MeshProxy: """ Apply delta to this proxy and optionally to the Blender attribute its manages. Args: attribute: the Mesh datablock to update parent: the attribute that contains attribute (e.g. a bpy.data.meshes) key: the key that identifies attribute in parent. delta: the delta to apply context: proxy and visit state to_blender: update the managed Blender attribute in addition to this Proxy """ struct_update = delta.value if isinstance(delta, DeltaReplace): self.copy_data(struct_update) if to_blender: attribute.clear_geometry() # WARNING ensure that parent is not queried for key, which would fail with libraries and duplicate names self.save(attribute, parent, key, context) else: # vertex groups are always replaced as a whole vertex_groups_arrays = struct_update._arrays.get( "vertex_groups", None) if vertex_groups_arrays is not None: self._arrays["vertex_groups"] = vertex_groups_arrays # collection resizing will be done in AosProxy.apply() for k, member_delta in struct_update._data.items(): current_value = self._data.get(k) try: self._data[k] = apply_attribute(attribute, k, current_value, member_delta, context, to_blender) except Exception as e: logger.warning( f"Struct.apply(). Processing {member_delta}") logger.warning(f"... for {attribute}.{k}") logger.warning(f"... Exception: {e!r}") logger.warning("... Update ignored") continue # If a face is removed from a cube, the vertices array is unchanged but the polygon array is changed. # We expect to receive soa updates for arrays that have been modified, but not for unmodified arrays. # however unmodified arrays must be reloaded if clear_geometry was called return self
def apply( self, attribute: T.bpy_struct, parent: Union[T.bpy_struct, T.bpy_prop_collection], key: Union[int, str], delta: Delta, context: Context, to_blender: bool = True, ) -> StructProxy: """ Apply delta to this proxy and optionally to the Blender attribute its manages. Args: attribute: the struct to update (e.g. a Material instance) parent: the attribute that contains attribute (e.g. bpy.data.materials) key: the key that identifies attribute in parent (e.g "Material") delta: the delta to apply context: proxy and visit state to_blender: update the managed Blender attribute in addition to this Proxy """ # WARNING parent must not be searched for key as it will fail in case of duplicate keys, with libraries update = delta.value if isinstance(delta, DeltaReplace): self.copy_data(update) if to_blender: self.save(attribute, parent, key, context) else: assert type(update) == type(self) context.visit_state.path.append(key) try: for k, member_delta in update._data.items(): current_value = self._data.get(k) try: self._data[k] = apply_attribute( attribute, k, current_value, member_delta, context, to_blender) except Exception as e: logger.warning( f"Struct.apply(). Processing {member_delta}") logger.warning(f"... for {attribute}.{k}") logger.warning(f"... Exception: {e!r}") logger.warning("... Update ignored") continue finally: context.visit_state.path.pop() return self
def apply( self, parent: T.BlendDataMeshes, key: str, struct_delta: DeltaUpdate, context: Context, to_blender: bool = True, ) -> MeshProxy: """""" struct = parent.get(key) struct_update = struct_delta.value if to_blender: if update_requires_clear_geometry(struct_update, self): logger.debug(f"clear_geometry for {struct}") struct.clear_geometry() # collection resizing will be done in AosProxy.apply() context.visit_state.path.append(key) try: for k, member_delta in struct_update._data.items(): current_value = self._data.get(k) try: self._data[k] = apply_attribute(struct, k, current_value, member_delta, context, to_blender) except Exception as e: logger.warning( f"Struct.apply(). Processing {member_delta}") logger.warning(f"... for {struct}.{k}") logger.warning(f"... Exception: {e!r}") logger.warning("... Update ignored") continue finally: context.visit_state.path.pop() # If a face is removed from a cube, the vertices array is unchanged but the polygon array is changed. # We expect to receive soa updates for arrays that have been modified, but not for unmodified arrays. # however unmodified arrays must be reloaded if clear_geometry was called return self
def apply( self, collection: T.bpy_prop_collection, parent: T.bpy_struct, key: Union[int, str], delta: Delta, context: Context, to_blender=True, ) -> StructCollectionProxy: """ Apply delta to this proxy and optionally to the Blender attribute its manages. Args: attribute: the collection to update (e.g. a_mesh.material) parent: the attribute that contains attribute (e.g. a a Mesh instance) key: the key that identifies attribute in parent (e.g "materials") delta: the delta to apply context: proxy and visit state to_blender: update the managed Blender attribute in addition to this Proxy """ assert isinstance(key, str) update = delta.value assert type(update) == type(self) if isinstance(delta, DeltaReplace): self._sequence = update._sequence if to_blender: specifics.truncate_collection(collection, 0) self.save(collection, parent, key, context) else: # a sparse update context.visit_state.path.append(key) try: sequence = self._sequence # Delete before update and process updates in reverse order to avoid spurious renames. # Starting with sequence A, B, C, D and delete B causes : # - an update for items 1 and 2 to be renamed into C and D # - one delete # If the update is processed first, Blender renames item 3 into D.001 # If the deletes are processed first but the updates are processed in order, Blender renames item 1 # into C.001 delete_count = update._diff_deletions if delete_count > 0: if to_blender: specifics.truncate_collection( collection, len(collection) - delete_count) del sequence[-delete_count:] for i, delta_update in reversed(update._diff_updates): sequence[i] = apply_attribute(collection, i, sequence[i], delta_update, context, to_blender) for i, delta_addition in enumerate(update._diff_additions, len(sequence)): if to_blender: item_proxy = delta_addition.value specifics.add_element(collection, item_proxy, context) write_attribute(collection, i, item_proxy, context) sequence.append(delta_addition.value) except Exception as e: logger.warning( "apply: Exception while processing attribute ...") logger.warning( f"... {context.visit_state.display_path()}.{key}") logger.warning(f"... {e!r}") finally: context.visit_state.path.pop() return self
def _apply_attribute(): for name in proxies_needing_state.keys(): self._data[name] = apply_attribute(armature_data, name, self._data[name], update._data[name], context, to_blender)
def apply( self, parent: Any, key: Union[int, str], delta: Optional[DeltaUpdate], context: Context, to_blender=True ) -> StructProxy: assert isinstance(key, (int, str)) # TODO factorize with save if isinstance(key, int): collection = parent[key] elif isinstance(parent, T.bpy_prop_collection): # TODO append an element : # https://blenderartists.org/t/how-delete-a-bpy-prop-collection-element/642185/4 collection = parent.get(key) if collection is None: collection = specifics.add_element(self, parent, key, context) else: collection = getattr(parent, key, None) update = delta.value assert type(update) == type(self) try: context.visit_state.path.append(key) sequence = self._data.get(MIXER_SEQUENCE) if sequence: # input validity assertions add_indices = [i for i, delta in enumerate(update._data.values()) if isinstance(delta, DeltaAddition)] del_indices = [i for i, delta in enumerate(update._data.values()) if isinstance(delta, DeltaDeletion)] if add_indices or del_indices: # Cannot have deletions and additions assert not add_indices or not del_indices, "not add_indices or not del_indices" indices = add_indices if add_indices else del_indices # Check that adds and deleted are at the end assert ( not indices or indices[-1] == len(update._data) - 1 ), "not indices or indices[-1] == len(update._data) - 1" # check that adds and deletes are contiguous assert all( a + 1 == b for a, b in zip(indices, iter(indices[1:])) ), "all(a + 1 == b for a, b in zip(indices, iter(indices[1:])))" for k, delta in update._data.items(): i = int(k) try: if isinstance(delta, DeltaUpdate): sequence[i] = apply_attribute(collection, i, sequence[i], delta, context, to_blender) elif isinstance(delta, DeltaDeletion): if to_blender: item = collection[i] collection.remove(item) del sequence[i] else: # DeltaAddition # TODO pre save for use_curves # since ordering does not include this requirement if to_blender: raise NotImplementedError("Not implemented: DeltaAddition for array") write_attribute(collection, i, delta.value, context) sequence.append(delta.value) except Exception as e: logger.warning(f"StructCollectionProxy.apply(). Processing {delta}") logger.warning(f"... for {collection}[{i}]") logger.warning(f"... Exception: {e!r}") logger.warning("... Update ignored") continue else: for k, delta in update._data.items(): try: if isinstance(delta, DeltaDeletion): # TODO do all collections have remove ? # see "name collision" in diff() k = k[1:] if to_blender: item = collection[k] collection.remove(item) del self._data[k] elif isinstance(delta, DeltaAddition): # TODO pre save for use_curves # since ordering does not include this requirement # see "name collision" in diff() k = k[1:] if to_blender: write_attribute(collection, k, delta.value, context) self._data[k] = delta.value else: self._data[k] = apply_attribute(collection, k, self._data[k], delta, context, to_blender) except Exception as e: logger.warning(f"StructCollectionProxy.apply(). Processing {delta}") logger.warning(f"... for {collection}[{k}]") logger.warning(f"... Exception: {e!r}") logger.warning("... Update ignored") continue finally: context.visit_state.path.pop() return self
def apply(self, parent: Any, key: Union[int, str], delta: Optional[DeltaUpdate], context: Context, to_blender=True) -> StructCollectionProxy: assert isinstance(key, (int, str)) update = delta.value assert type(update) == type(self) if isinstance(key, int): collection = parent[key] elif isinstance(parent, T.bpy_prop_collection): collection = parent.get(key) else: collection = getattr(parent, key, None) if isinstance(delta, DeltaReplace): self._sequence = update._sequence if to_blender: specifics.truncate_collection(collection, 0) self.save(parent, key, context) else: # a sparse update context.visit_state.path.append(key) try: sequence = self._sequence # Delete before update and process updates in reverse order to avoid spurious renames. # Starting with sequence A, B, C, D and delete B causes : # - an update for items 1 and 2 to be renamed into C and D # - one delete # If the update is processed first, Blender renames item 3 into D.001 # If the deletes are processed first but the updates are processed in order, Blender renames item 1 # into C.001 for _ in range(update._diff_deletions): if to_blender: item = collection[-1] collection.remove(item) del sequence[-1] for i, delta_update in reversed(update._diff_updates): sequence[i] = apply_attribute(collection, i, sequence[i], delta_update, context, to_blender) for i, delta_addition in enumerate(update._diff_additions, len(sequence)): if to_blender: item_proxy = delta_addition.value specifics.add_element(collection, item_proxy, context) write_attribute(collection, i, item_proxy, context) sequence.append(delta_addition.value) except Exception as e: logger.warning( f"StructCollectionProxy.apply(). Processing {delta}") logger.warning(f"... for {collection}") logger.warning(f"... Exception: {e!r}") logger.warning("... Update ignored") finally: context.visit_state.path.pop() return self