def _diff(self, struct: T.Object, key: str, prop: T.Property, context: Context, diff: Proxy) -> Optional[Delta]: from mixer.blender_data.attributes import diff_attribute must_replace = False data_datablock = struct.data if data_datablock is not None: dirty_vertex_groups = data_datablock.mixer_uuid in context.visit_state.dirty_vertex_groups # Replace the whole Object. Otherwise we would have to merge a DeltaReplace for vertex_groups # and a DeltaUpdate for the remaining items logger.debug(f"_diff: {struct} dirty vertex group: replace") must_replace |= dirty_vertex_groups if not must_replace: # Parenting with ctrl-P generates a Delta with parent, local_matrix and matrix_parent_inverse. # Applying this delta causes a position shift in the parented object. A full replace fixes the problem. # Not that parenting with just updating the parent property in the property panel does not cause # the same problem parent_property = struct.bl_rna.properties["parent"] parent_delta = diff_attribute(struct.parent, "parent", parent_property, self._data["parent"], context) must_replace |= parent_delta is not None if must_replace: diff.load(struct, context) return DeltaReplace(diff) else: return super()._diff(struct, key, prop, context, diff)
def diff( self, aos: T.bpy_prop_collection, key: Union[int, str], prop: T.Property, context: Context ) -> Optional[DeltaUpdate]: """""" # Create a proxy that will be populated with attributes differences, resulting in a hollow dict, # as opposed as the dense self diff = self.__class__() diff.init(aos) diff._aos_length = len(aos) item_bl_rna = prop.fixed_type.bl_rna member_names: Iterable[str] = [] if item_bl_rna is T.UnknownType.bl_rna: # UnknownType used in ShapeKey. Contents depends on the items that has the Key (Curve, Mesh, Lattice) if len(self) != 0: member_names = set(dir(aos[0])) - _unknown_type_attributes else: member_names = [item[0] for item in context.synchronized_properties.properties(item_bl_rna)] for member_name in member_names: # co, normals, ... proxy_data = self._data.get(member_name, SoaElement(member_name)) delta = diff_attribute(aos, member_name, item_bl_rna, proxy_data, context) if delta is not None: diff._data[member_name] = delta # if anything has changed, wrap the hollow proxy in a DeltaUpdate. This may be superfluous but # it is homogenous with additions and deletions if len(diff._data): return DeltaUpdate(diff) return None
def _diff(self, attribute: T.bpy_struct, key: Union[int, str], prop: T.Property, context: Context, diff: StructProxy) -> Optional[Delta]: """ Computes the difference between the state of an item tracked by this proxy and its Blender state and attached the difference to diff. See diff() Args: attribute: the struct to update (e.g. a Material instance) key: the key that identifies attribute in parent (e.g "Material") prop: the Property of struct as found in its enclosing object context: proxy and visit state diff: the proxy that holds the difference and will be transmitted in a Delta Returns: a delta if any difference is found, None otherwise """ # PERF accessing the properties from the synchronized_properties is **far** cheaper that iterating over # _data and the getting the properties with # member_property = struct.bl_rna.properties[k] # line to which py-spy attributes 20% of the total diff ! if prop is not None: context.visit_state.path.append(key) try: properties = context.synchronized_properties.properties(attribute) properties = specifics.conditional_properties( attribute, properties) for k, member_property in properties: # TODO in test_differential.StructDatablockRef.test_remove # target et a scene, k is world and v (current world value) is None # so diff fails. v should be a BpyIDRefNoneProxy # make a difference between None value and no member try: member = getattr(attribute, k) except AttributeError: logger.info(f"diff: unknown attribute {k} in {attribute}") continue proxy_data = self._data.get(k) delta = diff_attribute(member, k, member_property, proxy_data, context) if delta is not None: diff._data[k] = delta finally: if prop is not None: context.visit_state.path.pop() # TODO detect media updates (reload(), and attach a media descriptor to diff) # difficult ? # if anything has changed, wrap the hollow proxy in a DeltaUpdate. This may be superfluous but # it is homogenous with additions and deletions if len(diff._data): return DeltaUpdate(diff) return None
def diff(self, aos: T.bpy_prop_collection, key: str, prop: T.Property, context: Context) -> Optional[DeltaUpdate]: """""" # Create a proxy that will be populated with attributes differences, resulting in a hollow dict, # as opposed as the dense self diff = self.__class__() diff.init(aos) diff._aos_length = len(aos) context.visit_state.path.append(key) try: item_bl_rna = prop.fixed_type.bl_rna for attr_name, _ in context.synchronized_properties.properties( item_bl_rna): # co, normals, ... proxy_data = self._data.get(attr_name, SoaElement()) delta = diff_attribute(aos, attr_name, prop, proxy_data, context) if delta is not None: diff._data[attr_name] = delta finally: context.visit_state.path.pop() # if anything has changed, wrap the hollow proxy in a DeltaUpdate. This may be superfluous but # it is homogenous with additions and deletions if len(diff._data): return DeltaUpdate(diff) return None
def _diff(self, struct: T.Mesh, key: str, prop: T.Property, context: Context, diff: MeshProxy) -> Optional[Union[DeltaUpdate, DeltaReplace]]: if self.requires_clear_geometry(struct): # If any mesh buffer changes requires a clear geometry on the receiver, the receiver will clear all # buffers, including uv_layers and vertex_colors. # Resend everything diff.load(struct, context) # force ObjectProxy._diff to resend the Vertex groups context.visit_state.dirty_vertex_groups.add(struct.mixer_uuid) return DeltaReplace(diff) else: if prop is not None: context.visit_state.path.append(key) try: # vertex groups are always replaced as a whole mesh_vertex_groups = VertexGroups.from_mesh( struct).to_array_sequence() proxy_vertex_groups: ArrayGroup = self._arrays.get( "vertex_groups", []) if mesh_vertex_groups != proxy_vertex_groups: diff._arrays["vertex_groups"] = mesh_vertex_groups # force Object update. This requires that Object updates are processed later, which seems to be # the order they are listed in Depsgraph.updates context.visit_state.dirty_vertex_groups.add( struct.mixer_uuid) properties = context.synchronized_properties.properties(struct) properties = specifics.conditional_properties( struct, properties) for k, member_property in properties: try: member = getattr(struct, k) except AttributeError: logger.warning("diff: unknown attribute ...") logger.warning( f"... {context.visit_state.display_path()}.{k}") continue proxy_data = self._data.get(k) delta = diff_attribute(member, k, member_property, proxy_data, context) if delta is not None: diff._data[k] = delta finally: if prop is not None: context.visit_state.path.pop() if len(diff._data) or len(diff._arrays): return DeltaUpdate(diff) return None
def diff(self, collection: T.bpy_prop_collection, key: str, collection_property: T.Property, context: Context) -> Optional[DeltaUpdate]: """ Computes the difference between the state of an item tracked by this proxy and its Blender state. As this proxy tracks a collection, the result will be a DeltaUpdate that contains a DatablockCollectionProxy with an Delta item per added, deleted or update item Args: collection: the collection diff against this proxy collection_property: the property of collection in its enclosing object """ # This method is called from the depsgraph handler. The proxy holds a representation of the Blender state # before the modification being processed. So the changeset is (Blender state - proxy state) # TODO how can this replace BpyBlendDiff ? diff = self.__class__() item_property = collection_property.fixed_type # keys are uuids # BpyDataCollectionDiff.diff() for why proxies without datablocks are ignores proxy_keys = {k for k, v in self._data.items() if v.target(context)} blender_items = { datablock.mixer_uuid: datablock for datablock in collection.values() } blender_keys = blender_items.keys() added_keys = blender_keys - proxy_keys deleted_keys = proxy_keys - blender_keys maybe_updated_keys = proxy_keys & blender_keys for k in added_keys: value = read_attribute(blender_items[k], k, item_property, collection, context) assert isinstance(value, (DatablockProxy, DatablockRefProxy)) diff._data[k] = DeltaAddition(value) for k in deleted_keys: diff._data[k] = DeltaDeletion(self._data[k]) for k in maybe_updated_keys: delta = diff_attribute(blender_items[k], k, item_property, self.data(k), context) if delta is not None: assert isinstance(delta, DeltaUpdate) diff._data[k] = delta if len(diff._data): return DeltaUpdate(diff) return None
def diff( self, collection: T.bpy_prop_collection, key: Union[int, str], collection_property: T.Property, context: Context ) -> Optional[Union[DeltaUpdate, DeltaReplace]]: """ Computes the difference between the state of an item tracked by this proxy and its Blender state. This proxy tracks a collection of items indexed by string (e.g Scene.render.views) or int. The result will be a ProxyDiff that contains a Delta item per added, deleted or updated item Args: collection; the collection that must be diffed agains this proxy key: the name of the collection, to record in the visit path collection_property; the property os collection as found in its enclosing object """ sequence = self._sequence if len(sequence) == 0 and len(collection) == 0: return None if specifics.diff_must_replace(collection, sequence, collection_property): # A collection cannot be updated because either: # - some of its members cannot be updated : # SplineBezierPoints has no API to remove points, so Curve.splines cannot be update and must be replaced # - updating the name of members will cause unsolicited renames. # When swapping layers A and B in a GreasePencilLayers, renaming layer 0 into B cause an unsolicited # rename of layer 0 into B.001 # Send a replacement for the whole collection self.load(collection, context) return DeltaReplace(self) else: item_property = collection_property.fixed_type diff = self.__class__() # items from clear_from index cannot be updated, most often because eir type has changed (e.g # ObjectModifier) clear_from = specifics.clear_from(collection, sequence, context) # run a diff for the head, that can be updated in-place for i in range(clear_from): delta = diff_attribute(collection[i], i, item_property, sequence[i], context) if delta is not None: diff._diff_updates.append((i, delta)) if specifics.can_resize(collection, context): # delete the existing tail that cannot be modified diff._diff_deletions = len(sequence) - clear_from # add the new tail for i, item in enumerate(collection[clear_from:], clear_from): value = read_attribute(item, i, item_property, collection, context) diff._diff_additions.append(DeltaAddition(value)) if diff._diff_updates or diff._diff_deletions or diff._diff_additions: return DeltaUpdate(diff) return None
def _diff(self, struct: T.Struct, key: str, prop: T.Property, context: Context, diff: MeshProxy) -> Optional[DeltaUpdate]: try: # If any mesh buffer change requires a clear geometry on the receiver, send all buffers # This is the case if a face is separated from a cube. The number of vertices is unchanged # but the number of faces changes, which requires the receiver to call Mesh.clear_geometry(), # hence to reload tall the geometry, including parts that were unchanged. # As an optimized alternative, it should be possible not to send the unchanged arrays # but have MeshProxy.apply() to reload unchanged buffers from in-memory copies. force_send_all = proxy_requires_clear_geometry(self, struct) if force_send_all: logger.debug("requires_clear for %s", struct) if prop is not None: context.visit_state.path.append(key) properties = context.synchronized_properties.properties(struct) properties = specifics.conditional_properties(struct, properties) for k, member_property in properties: try: member = getattr(struct, k) except AttributeError: logger.warning(f"diff: unknown attribute {k} in {struct}") continue proxy_data = self._data.get(k) force_diff = force_send_all and k in mesh_resend_on_clear try: if force_diff: context.visit_state.scratchpad["force_soa_diff"] = True delta = diff_attribute(member, k, member_property, proxy_data, context) if delta is not None: diff._data[k] = delta elif force_send_all and k in mesh_resend_on_clear: diff._data[k] = DeltaUpdate.deep_wrap(proxy_data) finally: if force_diff: del context.visit_state.scratchpad["force_soa_diff"] finally: if prop is not None: context.visit_state.path.pop() if len(diff._data): return DeltaUpdate(diff) return None
def _diff(self, struct: T.Struct, key: str, prop: T.Property, context: Context, diff: StructProxy) -> Optional[DeltaUpdate]: # PERF accessing the properties from the synchronized_properties is **far** cheaper that iterating over # _data and the getting the properties with # member_property = struct.bl_rna.properties[k] # line to which py-spy attributes 20% of the total diff ! try: if prop is not None: context.visit_state.path.append(key) properties = context.synchronized_properties.properties(struct) properties = specifics.conditional_properties(struct, properties) for k, member_property in properties: # TODO in test_differential.StructDatablockRef.test_remove # target et a scene, k is world and v (current world value) is None # so diff fails. v should be a BpyIDRefNoneProxy # make a difference between None value and no member try: member = getattr(struct, k) except AttributeError: logger.warning(f"diff: unknown attribute {k} in {struct}") continue proxy_data = self._data.get(k) delta = diff_attribute(member, k, member_property, proxy_data, context) if delta is not None: diff._data[k] = delta finally: if prop is not None: context.visit_state.path.pop() # TODO detect media updates (reload(), and attach a media descriptor to diff) # difficult ? # if anything has changed, wrap the hollow proxy in a DeltaUpdate. This may be superfluous but # it is homogenous with additions and deletions if len(diff._data): return DeltaUpdate(diff) return None
def _diff_attribute(): return diff_attribute(armature_data.edit_bones, "edit_bones", self._edit_bones_property, self.data("edit_bones"), context)
def diff( self, collection: T.bpy_prop_collection, key: Union[int, str], collection_property: T.Property, context: Context ) -> Optional[DeltaUpdate]: """ Computes the difference between the state of an item tracked by this proxy and its Blender state. This proxy tracks a collection of items indexed by string (e.g Scene.render.views) or int. The result will be a ProxyDiff that contains a Delta item per added, deleted or updated item Args: collection; the collection that must be diffed agains this proxy collection_property; the property os collection as found in its enclosing object """ diff = self.__class__() item_property = collection_property.fixed_type try: context.visit_state.path.append(key) sequence = self._data.get(MIXER_SEQUENCE) if sequence: # indexed by int # TODO This produces one DeltaDeletion by removed item. Produce a range in case may items are # deleted # since the diff sequence is hollow, we cannot store it in a list. Use a dict with int keys instead for i, (proxy_value, blender_value) in enumerate(itertools.zip_longest(sequence, collection)): if proxy_value is None: value = read_attribute(collection[i], i, item_property, context) diff._data[i] = DeltaAddition(value) elif blender_value is None: diff._data[i] = DeltaDeletion(self.data(i)) else: delta = diff_attribute(collection[i], i, item_property, proxy_value, context) if delta is not None: diff._data[i] = delta else: # index by string. This is similar to DatablockCollectionProxy.diff # Renames are detected as Deletion + Addition # This assumes that keys ordring is the same in the proxy and in blender, which is # guaranteed by the fact that proxy load uses SynchronizedProperties.properties() bl_rna = getattr(collection, "bl_rna", None) if bl_rna is not None and isinstance( bl_rna, (type(T.ObjectModifiers.bl_rna), type(T.ObjectGpencilModifiers)) ): # TODO move this into specifics.py # order-dependant collections with different types like Modifiers proxy_names = list(self._data.keys()) blender_names = collection.keys() proxy_types = [self.data(name).data("type") for name in proxy_names] blender_types = [collection[name].type for name in blender_names] if proxy_types == blender_types and proxy_names == blender_names: # Same types and names : do sparse modification for name in proxy_names: delta = diff_attribute(collection[name], name, item_property, self.data(name), context) if delta is not None: diff._data[name] = delta else: # names or types do not match, rebuild all # There are name collisions during Modifier order change for instance, so prefix # the names to avoid them (using a tuple fails in the json encoder) for name in proxy_names: diff._data["D" + name] = DeltaDeletion(self.data(name)) for name in blender_names: value = read_attribute(collection[name], name, item_property, context) diff._data["A" + name] = DeltaAddition(value) else: # non order dependant, uniform types proxy_keys = self._data.keys() blender_keys = collection.keys() added_keys = blender_keys - proxy_keys for k in added_keys: value = read_attribute(collection[k], k, item_property, context) diff._data["A" + k] = DeltaAddition(value) deleted_keys = proxy_keys - blender_keys for k in deleted_keys: diff._data["D" + k] = DeltaDeletion(self.data(k)) maybe_updated_keys = proxy_keys & blender_keys for k in maybe_updated_keys: delta = diff_attribute(collection[k], k, item_property, self.data(k), context) if delta is not None: diff._data[k] = delta finally: context.visit_state.path.pop() if len(diff._data): return DeltaUpdate(diff) return None