def truncate_collection(target: T.bpy_prop_collection, incoming_keys: List[str]): if not hasattr(target, "bl_rna"): return target_rna = target.bl_rna if any(isinstance(target_rna, t) for t in always_clear): target.clear() return incoming_keys = set(incoming_keys) existing_keys = set(target.keys()) truncate_keys = existing_keys - incoming_keys if not truncate_keys: return if isinstance(target_rna, type(T.KeyingSets.bl_rna)): for k in truncate_keys: target.active_index = target.find(k) bpy.ops.anim.keying_set_remove() else: try: for k in truncate_keys: target.remove(target[k]) except Exception: logger.warning( f"Not implemented truncate_collection for type {target.bl_rna} for {target} ..." ) for s in traceback.format_exc().splitlines(): logger.warning(f"...{s}")
def load( self, bl_collection: T.bpy_prop_collection, key: Union[int, str], bl_collection_property: T.Property, context: Context, ): if len(bl_collection) == 0: self._data.clear() return self try: context.visit_state.path.append(key) # no keys means it is a sequence. However bl_collection.items() returns [(index, item)...] is_sequence = not bl_collection.keys() if is_sequence: # easier for the encoder to always have a dict self._data = { MIXER_SEQUENCE: [ StructProxy.make(v).load(v, i, context) for i, v in enumerate(bl_collection.values()) ] } else: self._data = {k: StructProxy().load(v, k, context) for k, v in bl_collection.items()} finally: context.visit_state.path.pop() return self
def truncate_collection(target: T.bpy_prop_collection, proxy: Union[StructCollectionProxy, AosProxy], context: Context): """""" if not hasattr(target, "bl_rna"): return target_rna = target.bl_rna if any(isinstance(target_rna, t) for t in always_clear): target.clear() return if isinstance(target_rna, _resize_geometry_types): existing_length = len(target) incoming_length = proxy.length if existing_length != incoming_length: if existing_length != 0: logger.error(f"resize_geometry(): size mismatch for {target}") logger.error( f"... existing: {existing_length} incoming {incoming_length}" ) return logger.debug( f"resizing geometry: add({incoming_length}) for {target}") target.add(incoming_length) return if isinstance(target_rna, type(T.GPencilStrokePoints.bl_rna)): existing_length = len(target) incoming_length = proxy.length delta = incoming_length - existing_length if delta > 0: target.add(delta) else: while delta < 0: target.pop() delta += 1 return incoming_keys = set(proxy._data.keys()) existing_keys = set(target.keys()) truncate_keys = existing_keys - incoming_keys if not truncate_keys: return if isinstance(target_rna, type(T.KeyingSets.bl_rna)): for k in truncate_keys: target.active_index = target.find(k) bpy.ops.anim.keying_set_remove() else: try: for k in truncate_keys: target.remove(target[k]) except Exception: logger.warning( f"Not implemented truncate_collection for type {target.bl_rna} for {target} ..." ) for s in traceback.format_exc().splitlines(): logger.warning(f"...{s}")
def diff( self, collection: T.bpy_prop_collection, key: Union[int, str], collection_property: T.Property, context: Context ) -> Optional[DeltaUpdate]: """ Computes the difference between the state of an item tracked by this proxy and its Blender state. This proxy tracks a collection of items indexed by string (e.g Scene.render.views) or int. The result will be a ProxyDiff that contains a Delta item per added, deleted or updated item Args: collection; the collection that must be diffed agains this proxy collection_property; the property os collection as found in its enclosing object """ diff = self.__class__() item_property = collection_property.fixed_type try: context.visit_state.path.append(key) sequence = self._data.get(MIXER_SEQUENCE) if sequence: # indexed by int # TODO This produces one DeltaDeletion by removed item. Produce a range in case may items are # deleted # since the diff sequence is hollow, we cannot store it in a list. Use a dict with int keys instead for i, (proxy_value, blender_value) in enumerate(itertools.zip_longest(sequence, collection)): if proxy_value is None: value = read_attribute(collection[i], i, item_property, context) diff._data[i] = DeltaAddition(value) elif blender_value is None: diff._data[i] = DeltaDeletion(self.data(i)) else: delta = diff_attribute(collection[i], i, item_property, proxy_value, context) if delta is not None: diff._data[i] = delta else: # index by string. This is similar to DatablockCollectionProxy.diff # Renames are detected as Deletion + Addition # This assumes that keys ordring is the same in the proxy and in blender, which is # guaranteed by the fact that proxy load uses SynchronizedProperties.properties() bl_rna = getattr(collection, "bl_rna", None) if bl_rna is not None and isinstance( bl_rna, (type(T.ObjectModifiers.bl_rna), type(T.ObjectGpencilModifiers)) ): # TODO move this into specifics.py # order-dependant collections with different types like Modifiers proxy_names = list(self._data.keys()) blender_names = collection.keys() proxy_types = [self.data(name).data("type") for name in proxy_names] blender_types = [collection[name].type for name in blender_names] if proxy_types == blender_types and proxy_names == blender_names: # Same types and names : do sparse modification for name in proxy_names: delta = diff_attribute(collection[name], name, item_property, self.data(name), context) if delta is not None: diff._data[name] = delta else: # names or types do not match, rebuild all # There are name collisions during Modifier order change for instance, so prefix # the names to avoid them (using a tuple fails in the json encoder) for name in proxy_names: diff._data["D" + name] = DeltaDeletion(self.data(name)) for name in blender_names: value = read_attribute(collection[name], name, item_property, context) diff._data["A" + name] = DeltaAddition(value) else: # non order dependant, uniform types proxy_keys = self._data.keys() blender_keys = collection.keys() added_keys = blender_keys - proxy_keys for k in added_keys: value = read_attribute(collection[k], k, item_property, context) diff._data["A" + k] = DeltaAddition(value) deleted_keys = proxy_keys - blender_keys for k in deleted_keys: diff._data["D" + k] = DeltaDeletion(self.data(k)) maybe_updated_keys = proxy_keys & blender_keys for k in maybe_updated_keys: delta = diff_attribute(collection[k], k, item_property, self.data(k), context) if delta is not None: diff._data[k] = delta finally: context.visit_state.path.pop() if len(diff._data): return DeltaUpdate(diff) return None