Exemplo n.º 1
0
def read_attribute(attr: Any, key: Union[int, str], attr_property: T.Property,
                   context: Context):
    """
    Load a property into a python object of the appropriate type, be it a Proxy or a native python object
    """
    attr_type = type(attr)

    if is_builtin(attr_type):
        return attr
    if is_vector(attr_type):
        return list(attr)
    if is_matrix(attr_type):
        return [list(col) for col in attr.col]

    # We have tested the types that are usefully reported by the python binding, now harder work.
    # These were implemented first and may be better implemented with the bl_rna property of the parent struct
    # TODO flatten
    if attr_type == T.bpy_prop_array:
        return list(attr)

    try:
        context.visit_state.recursion_guard.push(attr_property.identifier)
        if attr_type == T.bpy_prop_collection:
            if isinstance(attr_property.fixed_type, bpy.types.ID):
                from mixer.blender_data.datablock_collection_proxy import DatablockRefCollectionProxy

                return DatablockRefCollectionProxy().load(attr, key, context)
            elif is_soable_collection(attr_property):
                from mixer.blender_data.aos_proxy import AosProxy

                return AosProxy().load(attr, key, attr_property, context)
            else:
                from mixer.blender_data.struct_collection_proxy import StructCollectionProxy

                return StructCollectionProxy.make(attr_property).load(
                    attr, key, attr_property, context)

        # TODO merge with previous case
        if isinstance(attr_property, T.CollectionProperty):
            from mixer.blender_data.struct_collection_proxy import StructCollectionProxy

            return StructCollectionProxy().load(attr, key, attr_property,
                                                context)

        bl_rna = attr_property.bl_rna
        if bl_rna is None:
            logger.warning("Not implemented: attribute %s", attr)
            return None

        if issubclass(attr_type, T.PropertyGroup):
            from mixer.blender_data.struct_proxy import StructProxy

            return StructProxy().load(attr, key, context)

        if issubclass(attr_type, T.ID):
            if attr.is_embedded_data:
                from mixer.blender_data.datablock_proxy import DatablockProxy

                return DatablockProxy.make(attr_property).load(
                    attr, key, context)
            else:
                from mixer.blender_data.datablock_ref_proxy import DatablockRefProxy

                return DatablockRefProxy().load(attr, key, context)

        if issubclass(attr_type, T.bpy_struct):
            from mixer.blender_data.struct_proxy import StructProxy

            return StructProxy().load(attr, key, context)

        if attr is None and isinstance(attr_property, T.PointerProperty):
            from mixer.blender_data.misc_proxies import NonePtrProxy

            return NonePtrProxy()

        logger.error(
            f"Unsupported attribute {attr_type} {attr_property} {attr_property.fixed_type} at {context.visit_state.datablock_proxy._class_name}.{context.visit_state.path}.{attr_property.identifier}"
        )
    finally:
        context.visit_state.recursion_guard.pop()
Exemplo n.º 2
0
    def update(self, diff: BpyDataCollectionDiff,
               context: Context) -> Changeset:
        """
        Update the proxy according to local datablock creations, removals or renames (sender side)
        """
        changeset = Changeset()

        # Sort so that the tests receive the messages in deterministic order for two reasons :
        # - The tests compare the creation message streams received from participating Blender and there
        #   is not reason why they would emit creation messages
        # - TestObjectRenameGeneric.test_update_object exhibits a random failure without the sort
        #   Scene creation messages order is then random and an update is missed when the scene that links
        #   the update object is not current. Setting PYTHONHASHSEED is not enough to get a deterministic test outcome.
        added_names = sorted(diff.items_added, key=lambda x: x[0].name_full)

        for datablock, collection_name in added_names:
            name_full = datablock.name_full
            logger.info("Perform update/creation for %s[%s]", collection_name,
                        name_full)
            try:
                uuid = ensure_uuid(datablock)
                context.proxy_state.add_datablock(uuid, datablock)
                proxy = DatablockProxy.make(datablock).load(datablock, context)
                context.proxy_state.proxies[uuid] = proxy
                self._data[uuid] = proxy
                changeset.creations.append(proxy)
            except MaxDepthExceeded as e:
                logger.error(
                    f"MaxDepthExceeded while loading {collection_name}[{name_full}]:"
                )
                logger.error("... Nested attribute depth is too large: ")
                logger.error(f"... {e!r}")
            except Exception:
                logger.error(
                    f"Exception while loading {collection_name}[{name_full}]:")
                for line in traceback.format_exc().splitlines():
                    logger.error(line)

        for proxy in diff.items_removed:
            try:
                logger.warning("Perform removal for %s", proxy)
                uuid = proxy.mixer_uuid
                changeset.removals.append(
                    (uuid, proxy.collection_name, str(proxy)))
                del self._data[uuid]
                del context.proxy_state.proxies[uuid]
                try:
                    context.proxy_state.remove_datablock(uuid)
                except KeyError:
                    logger.warning(
                        f"remove_datablock: n,o entry for {uuid}. Assuming removed by undo"
                    )
            except Exception:
                logger.error(
                    f"Exception during update/removed for proxy {proxy})  :")
                for line in traceback.format_exc().splitlines():
                    logger.error(line)

        #
        # Handle spontaneous renames
        #
        # - local and remote are initially synced with 2 objects with uuid/name D7/A FC/B
        # - local renames D7/A into B
        #   - D7 is actually renamed into B.001 !
        #   - we detect (D7 -> B.001)
        #   - remote proceses normally
        # - local renames D7/B.001 into B
        #   - D7 is renamed into B
        #   - FC is renamed into B.001
        #   - we detect (D7->B, FC->B.001)
        #   - local result is (D7/B, FC/B.001)
        # - local repeatedly renames the item named B.001 into B
        # - at some point on remote, the execution of a rename command will provoke a spontaneous rename,
        #   resulting in a situation where remote has FC/B.001 and D7/B.002 linked to the
        #   Master collection and also a FC/B unlinked
        #
        for proxy, new_name in diff.items_renamed:
            old_name = proxy.data("name")
            changeset.renames.append(
                (proxy.mixer_uuid, old_name, new_name, str(proxy)))
            proxy._data["name"] = new_name

        return changeset
Exemplo n.º 3
0
    def update(self, diff: BpyPropCollectionDiff,
               context: Context) -> Changeset:
        """
        Update the proxy according to local datablock creations, removals or renames
        """
        changeset = Changeset()
        # Sort so that the tests receive the messages in deterministic order. Sad but not very harmfull
        added_names = sorted(diff.items_added.keys())
        for name in added_names:
            collection_name = diff.items_added[name]
            logger.info("Perform update/creation for %s[%s]", collection_name,
                        name)
            try:
                # TODO could have a datablock directly
                collection = getattr(bpy.data, collection_name)
                id_ = collection.get(name)
                if id_ is None:
                    logger.error(
                        "update/ request addition for %s[%s] : not found",
                        collection_name, name)
                    continue
                uuid = ensure_uuid(id_)
                context.proxy_state.datablocks[uuid] = id_
                proxy = DatablockProxy.make(id_).load(
                    id_,
                    name,
                    context,
                    bpy_data_collection_name=collection_name)
                context.proxy_state.proxies[uuid] = proxy
                self._data[uuid] = proxy
                changeset.creations.append(proxy)
            except MaxDepthExceeded as e:
                logger.error(
                    f"MaxDepthExceeded while loading {collection_name}[{name}]:"
                )
                logger.error("... Nested attribute depth is too large: ")
                logger.error(f"... {e!r}")
            except Exception:
                logger.error(
                    f"Exception while loading {collection_name}[{name}]:")
                for line in traceback.format_exc().splitlines():
                    logger.error(line)

        for proxy in diff.items_removed:
            try:
                logger.info("Perform removal for %s", proxy)
                uuid = proxy.mixer_uuid()
                changeset.removals.append(
                    (uuid, proxy.collection_name, str(proxy)))
                del self._data[uuid]
                id_ = context.proxy_state.datablocks[uuid]
                del context.proxy_state.proxies[uuid]
                del context.proxy_state.datablocks[uuid]
            except Exception:
                logger.error(
                    f"Exception during update/removed for proxy {proxy})  :")
                for line in traceback.format_exc().splitlines():
                    logger.error(line)

        #
        # Handle spontaneous renames
        #
        # - local and remote are initially synced with 2 objects with uuid/name D7/A FC/B
        # - local renames D7/A into B
        #   - D7 is actually renamed into B.001 !
        #   - we detect (D7 -> B.001)
        #   - remote proceses normally
        # - local renames D7/B.001 into B
        #   - D7 is renamed into B
        #   - FC is renamed into B.001
        #   - we detect (D7->B, FC->B.001)
        #   - local result is (D7/B, FC/B.001)
        # - local repeatedly renames the item named B.001 into B
        # - at some point on remote, the execution of a rename command will provoke a spontaneous rename,
        #   resulting in a situation where remote has FC/B.001 and D7/B.002 linked to the
        #   Master collection and also a FC/B unlinked
        #
        for proxy, new_name in diff.items_renamed:
            uuid = proxy.mixer_uuid()
            if proxy.collection[
                    new_name] is not context.proxy_state.datablocks[uuid]:
                logger.error(
                    f"update rename : {proxy.collection}[{new_name}] is not {context.proxy_state.datablocks[uuid]} for {proxy}, {uuid}"
                )

            old_name = proxy.data("name")
            changeset.renames.append(
                (proxy.mixer_uuid(), old_name, new_name, str(proxy)))
            proxy.rename(new_name)

        return changeset