コード例 #1
0
ファイル: diff.py プロジェクト: ElonGame/mixer
    def diff(self, proxy: BpyPropDataCollectionProxy, collection_name: str, context: Context):
        self.items_added.clear()
        self.items_removed.clear()
        self.items_renamed.clear()
        proxy_items = {id_proxy.mixer_uuid(): id_proxy for id_proxy in proxy._data.values()}
        bl_collection = getattr(bpy.data, collection_name)
        blender_items = {}
        for name, item in bl_collection.items():
            if skip_bpy_data_item(collection_name, item):
                continue

            uuid = item.mixer_uuid
            if uuid in blender_items.keys():
                # duplicate uuid, from an object duplication
                original_item = blender_items[uuid]
                logger.info(f"Duplicate uuid {uuid} for {original_item[1]} and {item.name}...")
                logger.info("... assuming object was duplicated. Resetting (not an error)")
                # reset the uuid, ensure will regenerate
                item.mixer_uuid = ""

            ensure_uuid(item)
            if item.mixer_uuid in blender_items.keys():
                logger.error(f"Duplicate uuid found for {item}")
                continue

            blender_items[item.mixer_uuid] = (name, collection_name)
        self.items_added, self.items_removed, self.items_renamed = find_renamed(proxy_items, blender_items)
        if not self.empty():
            BlendData.instance().collection(collection_name).set_dirty()
コード例 #2
0
ファイル: diff.py プロジェクト: nondejus/mixer
    def diff(
        self, proxy: DatablockCollectionProxy, collection_name: str, synchronized_properties: SynchronizedProperties
    ):
        self.items_added.clear()
        self.items_removed.clear()
        self.items_renamed.clear()
        proxy_items = {id_proxy.mixer_uuid(): id_proxy for id_proxy in proxy._data.values()}
        bl_collection = getattr(bpy.data, collection_name)
        blender_items = {}
        for name, item in bl_collection.items():
            if skip_bpy_data_item(collection_name, item):
                continue

            uuid = item.mixer_uuid
            if uuid in blender_items.keys():
                # duplicate uuid, from an object duplication
                duplicate_name, duplicate_collection_name = blender_items[uuid]
                logger.info(
                    f"Duplicate uuid {uuid} in bpy.data.{duplicate_collection_name} for {duplicate_name} and bpy.data.{collection_name} for {name}..."
                )
                logger.info("... assuming object was duplicated. Resetting (not an error)")
                # reset the uuid, ensure will regenerate
                item.mixer_uuid = ""

            ensure_uuid(item)
            if item.mixer_uuid in blender_items.keys():
                logger.error(f"Duplicate uuid found for {item}")
                continue

            blender_items[item.mixer_uuid] = (name, collection_name)
        self.items_added, self.items_removed, self.items_renamed = find_renamed(proxy_items, blender_items)
コード例 #3
0
ファイル: diff.py プロジェクト: ElonGame/mixer
    def diff(self, blend_proxy: BpyBlendProxy, context: Context):
        self.collection_deltas.clear()
        self.id_deltas.clear()

        for collection_name, _ in context.properties(bpy_type=T.BlendData):
            delta = BpyPropCollectionDiff()
            delta.diff(blend_proxy._data[collection_name], collection_name, context)
            if not delta.empty():
                self.collection_deltas.append((collection_name, delta))

        # Before this change:
        # Only datablocks handled by the generic synchronization system get a uuid, either from
        # BpyBlendProxy.initialize_ref_targets() during room creation, or later during diff processing.
        # Datablocks of unhandled types get no uuid and BpyIDRefProxy references to them are incorrect.
        # What is more, this means trouble for tests since datablocks of unhandled types are assigned
        # a uuid during the message grabbing, which means that they get different uuids on both ends.
        for collection_name in context.unhandled_bpy_data_collection_names:
            collection = getattr(bpy.data, collection_name)
            for datablock in collection.values():
                ensure_uuid(datablock)
コード例 #4
0
ファイル: diff.py プロジェクト: UPBGE/blender-addons
    def diff(self, blend_proxy: BpyDataProxy,
             synchronized_properties: SynchronizedProperties):
        self._collection_deltas.clear()

        for collection_name, _ in synchronized_properties.properties(
                bpy_type=T.BlendData):
            if collection_name not in blend_proxy._data:
                continue
            delta = BpyDataCollectionDiff()
            delta.diff(blend_proxy._data[collection_name], collection_name,
                       synchronized_properties)
            if not delta.empty():
                self._collection_deltas.append((collection_name, delta))

        # Before this change:
        # Only datablocks handled by the generic synchronization system get a uuid.
        # Datablocks of unhandled types get no uuid and DatablockRefProxy references to them are incorrect.
        # What is more, this means trouble for tests since datablocks of unhandled types are assigned
        # a uuid during the message grabbing, which means that they get different uuids on both ends.
        for collection_name in synchronized_properties.unhandled_bpy_data_collection_names:
            collection = getattr(bpy.data, collection_name)
            for datablock in collection.values():
                ensure_uuid(datablock)
コード例 #5
0
    def load(self, bl_collection: bpy.types.bpy_prop_collection, key: str,
             context: Context):  # noqa N802
        """
        Load bl_collection elements as standalone datablocks.
        """
        for name, item in bl_collection.items():
            collection_name = BlendData.instance().bl_collection_name_from_ID(
                item)
            if skip_bpy_data_item(collection_name, item):
                continue
            uuid = ensure_uuid(item)
            self._data[uuid] = DatablockProxy().load(
                item, name, context, bpy_data_collection_name=collection_name)

        return self
コード例 #6
0
ファイル: bpy_data_proxy.py プロジェクト: nondejus/mixer
    def initialize_ref_targets(
            self, synchronized_properties: SynchronizedProperties):
        """Keep track of all bpy.data items so that loading recognizes references to them

        Call this before updating the proxy from send_scene_content. It is not needed on the
        receiver side.

        TODO check is this is actually required or if we can rely upon is_embedded_data being False
        """
        # Normal operation no more involve BpyDataProxy.load() ad initial synchronization behaves
        # like a creation. The current load_as_what() implementation relies on root_ids to determine if
        # a T.ID must ne loaded as an IDRef (pointer to bpy.data) or an IDDef (pointer to an "owned" ID).
        # so we need to load all the root_ids before loading anything into the proxy.
        # However, root_ids may no more be required if we can load all the proxies inside out (deepmost first, i.e
        # (Mesh, Metaball, ..), then Object, the Scene). This should be possible as as we sort
        # the updates inside out in update() to the receiver gets them in order
        for name, _ in synchronized_properties.properties(
                bpy_type=T.BlendData):
            if name in collection_name_to_type:
                # TODO use BlendData
                bl_collection = getattr(bpy.data, name)
                for _id_name, item in bl_collection.items():
                    uuid = ensure_uuid(item)
                    self.state.datablocks[uuid] = item
コード例 #7
0
ファイル: diff.py プロジェクト: UPBGE/blender-addons
    def diff(self, proxy: DatablockCollectionProxy, collection_name: str,
             synchronized_properties: SynchronizedProperties):
        self._items_added.clear()
        self._items_removed.clear()
        self._items_renamed.clear()

        # Proxies for received image datablocks that failed to load because of a locally misconfigured shared folders do
        # not have a datablock (they have one when loading the .blend file). Do not consider the proxies without
        # datablock otherwise they would be found as deleted and removals would be sent to peers that may have
        # them.
        proxies = {
            datablock_proxy.mixer_uuid: datablock_proxy
            for datablock_proxy in proxy._data.values()
            if datablock_proxy.has_datablock
        }
        bl_collection = getattr(bpy.data, collection_name)

        # (item name, collection name)
        blender_items: Dict[Uuid, Tuple[T.ID, str]] = {}
        conflicts: List[T.ID] = []
        for datablock in bl_collection.values():
            if skip_bpy_data_item(collection_name, datablock):
                continue

            uuid = datablock.mixer_uuid
            if uuid in blender_items.keys():
                conflicts.append(datablock)
            else:
                ensure_uuid(datablock)
                if datablock.mixer_uuid in blender_items.keys():
                    logger.error(f"Duplicate uuid found for {datablock}")
                    continue

                blender_items[datablock.mixer_uuid] = (datablock,
                                                       collection_name)

        for second_datablock in conflicts:
            first_datablock = blender_items[second_datablock.mixer_uuid][0]
            if first_datablock.library is None:
                if second_datablock.library is None:
                    # local/local : assume second is the new conflicting, from a copy paste
                    second_datablock.mixer_uuid = ""
                    ensure_uuid(second_datablock)
                    blender_items[second_datablock.mixer_uuid] = (
                        second_datablock, collection_name)
                else:
                    # local/linked: first is made_local from linked second
                    first_datablock.mixer_uuid = ""
                    ensure_uuid(first_datablock)
                    blender_items[first_datablock.mixer_uuid] = (
                        first_datablock, collection_name)
            else:
                if second_datablock.library is not None:
                    # linked/local: breaks the assumption that local are listed before linked. Strange.
                    # could do as local.linked if we were sure that is doe"s not have another weird cause
                    logger.error(
                        f"Unexpected link datablock {first_datablock} listed before local {second_datablock} ..."
                    )
                    logger.error(f"... {second_datablock} ignored")
                else:
                    # linked/linked: Conflicts between two linked. One of:
                    # - a library contains uuids and is indirectly linked more than once
                    # - a self link
                    # Probably need to locally reset both uuids, keeping the link target uuid for direct link datablock
                    logger.error(
                        f"Linked datablock with duplicate uuids {first_datablock} {second_datablock}..."
                    )
                    logger.error("... unsupported")

        proxy_uuids = set(proxies.keys())
        blender_uuids = set(blender_items.keys())

        # Ignore linked datablocks to find renamed datablocks, as they cannot be renamed locally
        renamed_uuids = {
            uuid
            for uuid in blender_uuids & proxy_uuids
            if not isinstance(proxies[uuid], DatablockLinkProxy)
            and proxies[uuid].data("name") != blender_items[uuid][0].name
        }
        added_uuids = blender_uuids - proxy_uuids - renamed_uuids
        removed_uuids = proxy_uuids - blender_uuids - renamed_uuids

        # this finds standalone datablock, link datablocks and override datablocks
        self._items_added = [(blender_items[uuid][0], blender_items[uuid][1])
                             for uuid in added_uuids]
        self._items_removed = [proxies[uuid] for uuid in removed_uuids]

        # TODO LIB
        self._items_renamed = [(proxies[uuid], blender_items[uuid][0].name)
                               for uuid in renamed_uuids]
コード例 #8
0
    def update(self, diff: BpyDataCollectionDiff,
               context: Context) -> Changeset:
        """
        Update the proxy according to local datablock creations, removals or renames (sender side)
        """
        changeset = Changeset()

        # Sort so that the tests receive the messages in deterministic order for two reasons :
        # - The tests compare the creation message streams received from participating Blender and there
        #   is not reason why they would emit creation messages
        # - TestObjectRenameGeneric.test_update_object exhibits a random failure without the sort
        #   Scene creation messages order is then random and an update is missed when the scene that links
        #   the update object is not current. Setting PYTHONHASHSEED is not enough to get a deterministic test outcome.
        added_names = sorted(diff.items_added, key=lambda x: x[0].name_full)

        for datablock, collection_name in added_names:
            name_full = datablock.name_full
            logger.info("Perform update/creation for %s[%s]", collection_name,
                        name_full)
            try:
                uuid = ensure_uuid(datablock)
                context.proxy_state.add_datablock(uuid, datablock)
                proxy = DatablockProxy.make(datablock).load(datablock, context)
                context.proxy_state.proxies[uuid] = proxy
                self._data[uuid] = proxy
                changeset.creations.append(proxy)
            except MaxDepthExceeded as e:
                logger.error(
                    f"MaxDepthExceeded while loading {collection_name}[{name_full}]:"
                )
                logger.error("... Nested attribute depth is too large: ")
                logger.error(f"... {e!r}")
            except Exception:
                logger.error(
                    f"Exception while loading {collection_name}[{name_full}]:")
                for line in traceback.format_exc().splitlines():
                    logger.error(line)

        for proxy in diff.items_removed:
            try:
                logger.warning("Perform removal for %s", proxy)
                uuid = proxy.mixer_uuid
                changeset.removals.append(
                    (uuid, proxy.collection_name, str(proxy)))
                del self._data[uuid]
                del context.proxy_state.proxies[uuid]
                try:
                    context.proxy_state.remove_datablock(uuid)
                except KeyError:
                    logger.warning(
                        f"remove_datablock: n,o entry for {uuid}. Assuming removed by undo"
                    )
            except Exception:
                logger.error(
                    f"Exception during update/removed for proxy {proxy})  :")
                for line in traceback.format_exc().splitlines():
                    logger.error(line)

        #
        # Handle spontaneous renames
        #
        # - local and remote are initially synced with 2 objects with uuid/name D7/A FC/B
        # - local renames D7/A into B
        #   - D7 is actually renamed into B.001 !
        #   - we detect (D7 -> B.001)
        #   - remote proceses normally
        # - local renames D7/B.001 into B
        #   - D7 is renamed into B
        #   - FC is renamed into B.001
        #   - we detect (D7->B, FC->B.001)
        #   - local result is (D7/B, FC/B.001)
        # - local repeatedly renames the item named B.001 into B
        # - at some point on remote, the execution of a rename command will provoke a spontaneous rename,
        #   resulting in a situation where remote has FC/B.001 and D7/B.002 linked to the
        #   Master collection and also a FC/B unlinked
        #
        for proxy, new_name in diff.items_renamed:
            old_name = proxy.data("name")
            changeset.renames.append(
                (proxy.mixer_uuid, old_name, new_name, str(proxy)))
            proxy._data["name"] = new_name

        return changeset
コード例 #9
0
    def update(self, diff: BpyPropCollectionDiff,
               context: Context) -> Changeset:
        """
        Update the proxy according to local datablock creations, removals or renames
        """
        changeset = Changeset()
        # Sort so that the tests receive the messages in deterministic order. Sad but not very harmfull
        added_names = sorted(diff.items_added.keys())
        for name in added_names:
            collection_name = diff.items_added[name]
            logger.info("Perform update/creation for %s[%s]", collection_name,
                        name)
            try:
                # TODO could have a datablock directly
                collection = getattr(bpy.data, collection_name)
                id_ = collection.get(name)
                if id_ is None:
                    logger.error(
                        "update/ request addition for %s[%s] : not found",
                        collection_name, name)
                    continue
                uuid = ensure_uuid(id_)
                context.proxy_state.datablocks[uuid] = id_
                proxy = DatablockProxy.make(id_).load(
                    id_,
                    name,
                    context,
                    bpy_data_collection_name=collection_name)
                context.proxy_state.proxies[uuid] = proxy
                self._data[uuid] = proxy
                changeset.creations.append(proxy)
            except MaxDepthExceeded as e:
                logger.error(
                    f"MaxDepthExceeded while loading {collection_name}[{name}]:"
                )
                logger.error("... Nested attribute depth is too large: ")
                logger.error(f"... {e!r}")
            except Exception:
                logger.error(
                    f"Exception while loading {collection_name}[{name}]:")
                for line in traceback.format_exc().splitlines():
                    logger.error(line)

        for proxy in diff.items_removed:
            try:
                logger.info("Perform removal for %s", proxy)
                uuid = proxy.mixer_uuid()
                changeset.removals.append(
                    (uuid, proxy.collection_name, str(proxy)))
                del self._data[uuid]
                id_ = context.proxy_state.datablocks[uuid]
                del context.proxy_state.proxies[uuid]
                del context.proxy_state.datablocks[uuid]
            except Exception:
                logger.error(
                    f"Exception during update/removed for proxy {proxy})  :")
                for line in traceback.format_exc().splitlines():
                    logger.error(line)

        #
        # Handle spontaneous renames
        #
        # - local and remote are initially synced with 2 objects with uuid/name D7/A FC/B
        # - local renames D7/A into B
        #   - D7 is actually renamed into B.001 !
        #   - we detect (D7 -> B.001)
        #   - remote proceses normally
        # - local renames D7/B.001 into B
        #   - D7 is renamed into B
        #   - FC is renamed into B.001
        #   - we detect (D7->B, FC->B.001)
        #   - local result is (D7/B, FC/B.001)
        # - local repeatedly renames the item named B.001 into B
        # - at some point on remote, the execution of a rename command will provoke a spontaneous rename,
        #   resulting in a situation where remote has FC/B.001 and D7/B.002 linked to the
        #   Master collection and also a FC/B unlinked
        #
        for proxy, new_name in diff.items_renamed:
            uuid = proxy.mixer_uuid()
            if proxy.collection[
                    new_name] is not context.proxy_state.datablocks[uuid]:
                logger.error(
                    f"update rename : {proxy.collection}[{new_name}] is not {context.proxy_state.datablocks[uuid]} for {proxy}, {uuid}"
                )

            old_name = proxy.data("name")
            changeset.renames.append(
                (proxy.mixer_uuid(), old_name, new_name, str(proxy)))
            proxy.rename(new_name)

        return changeset
コード例 #10
0
    def diff(self, proxy: DatablockCollectionProxy, collection_name: str,
             synchronized_properties: SynchronizedProperties):
        self._items_added.clear()
        self._items_removed.clear()
        self._items_renamed.clear()

        # Proxies for received image datablocks that failed to load because of a locally misconfigured shared folders do
        # not have a datablock (they have one when loading the .blend file). Do not consider the proxies without
        # datablock otherwise they would be found as deleted and removals would be sent to peers that may have
        # them.
        proxies = {
            datablock_proxy.mixer_uuid: datablock_proxy
            for datablock_proxy in proxy._data.values()
            if datablock_proxy.has_datablock
        }
        bl_collection = getattr(bpy.data, collection_name)

        # (item name, collection name)
        blender_items: Dict[Uuid, Tuple[T.ID, str]] = {}

        for datablock in bl_collection.values():
            if skip_bpy_data_item(collection_name, datablock):
                continue

            uuid = datablock.mixer_uuid
            if uuid in blender_items.keys():
                # duplicate uuid, from an object duplication
                duplicate_name, duplicate_collection_name = blender_items[uuid]
                logger.info(
                    f"Duplicate uuid {uuid} in bpy.data.{duplicate_collection_name} for {duplicate_name} and bpy.data.{collection_name} for {datablock.name_full!r}..."
                )
                logger.info(
                    f"... assuming object was duplicated. Resetting {datablock.name_full!r} (not an error)"
                )
                # reset the uuid, ensure will regenerate
                datablock.mixer_uuid = ""

            ensure_uuid(datablock)
            if datablock.mixer_uuid in blender_items.keys():
                logger.error(f"Duplicate uuid found for {datablock}")
                continue

            blender_items[datablock.mixer_uuid] = (datablock, collection_name)

        proxy_uuids = set(proxies.keys())
        blender_uuids = set(blender_items.keys())

        # Ignore linked datablocks to find renamed datablocks, as they cannot be renamed locally
        renamed_uuids = {
            uuid
            for uuid in blender_uuids & proxy_uuids
            if not isinstance(proxies[uuid], DatablockLinkProxy)
            and proxies[uuid].data("name") != blender_items[uuid][0].name
        }
        added_uuids = blender_uuids - proxy_uuids - renamed_uuids
        removed_uuids = proxy_uuids - blender_uuids - renamed_uuids

        # this finds standalone datablock, link datablocks and override datablocks
        self._items_added = [(blender_items[uuid][0], blender_items[uuid][1])
                             for uuid in added_uuids]
        self._items_removed = [proxies[uuid] for uuid in removed_uuids]

        # TODO LIB
        self._items_renamed = [(proxies[uuid], blender_items[uuid][0].name)
                               for uuid in renamed_uuids]