Beispiel #1
0
    def test_rename(self):
        # test_diff.TestDiff.test_create
        new_worlds = ["W0", "W1", "W2"]
        new_worlds.sort()
        for w in new_worlds:
            D.worlds.new(w)

        self.proxy.load(test_context)

        renamed = [("W0", "W00"), ("W2", "W22")]
        renamed.sort(key=sort_renamed_item)
        for old_name, new_name in renamed:
            D.worlds[old_name].name = new_name

        diff = BpyBlendDiff()
        diff.diff(self.proxy, test_context)
        for name, delta in diff.collection_deltas:
            self.assertEqual(0, len(delta.items_added),
                             f"added count mismatch for {name}")
            self.assertEqual(0, len(delta.items_removed),
                             f"removed count mismatch for {name}")
            if name == "worlds":
                self.assertEqual(len(renamed), len(delta.items_renamed),
                                 f"renamed count mismatch for {name}")
                items_renamed = list(delta.items_renamed)
                items_renamed.sort(key=sort_renamed_item)
                items_renamed = [(proxy.data("name"), new_name)
                                 for proxy, new_name in items_renamed]
                self.assertEqual(renamed, items_renamed,
                                 f"removed count mismatch for {name}")
            else:
                self.assertEqual(0, len(delta.items_added),
                                 f"added count mismatch for {name}")
Beispiel #2
0
 def test_create(self):
     # test_diff.TestDiff.test_create
     self.proxy.load(test_properties)
     new_worlds = ["W0", "W1"]
     new_worlds.sort()
     for w in new_worlds:
         D.worlds.new(w)
     diff = BpyBlendDiff()
     diff.diff(self.proxy, test_properties)
     for collection_name, delta in diff.collection_deltas:
         self.assertEqual(0, len(delta.items_removed),
                          f"removed count mismatch for {collection_name}")
         self.assertEqual(0, len(delta.items_renamed),
                          f"renamed count mismatch for {collection_name}")
         if collection_name == "worlds":
             self.assertEqual(
                 len(new_worlds), len(delta.items_added),
                 f"added count mismatch for {collection_name}")
             found = [datablock.name for datablock, _ in delta.items_added]
             found.sort()
             self.assertEqual(
                 new_worlds, found,
                 f"added count mismatch for {collection_name}")
         else:
             self.assertEqual(
                 0, len(delta.items_added),
                 f"added count mismatch for {collection_name}")
Beispiel #3
0
    def test_remove(self):
        # test_diff.TestDiff.test_create
        new_worlds = ["W0", "W1", "W2"]
        new_worlds.sort()
        for w in new_worlds:
            D.worlds.new(w)

        self.proxy.load(test_context)

        removed = ["W0", "W1"]
        removed.sort()
        for w in removed:
            D.worlds.remove(D.worlds[w])

        diff = BpyBlendDiff()
        diff.diff(self.proxy, test_context)
        for name, delta in diff.collection_deltas:
            self.assertEqual(0, len(delta.items_added),
                             f"added count mismatch for {name}")
            self.assertEqual(0, len(delta.items_renamed),
                             f"renamed count mismatch for {name}")
            if name == "worlds":
                self.assertEqual(len(removed), len(delta.items_removed),
                                 f"removed count mismatch for {name}")
                items_removed = [
                    proxy.data("name") for proxy in delta.items_removed
                ]
                items_removed.sort()
                self.assertEqual(removed, items_removed,
                                 f"removed count mismatch for {name}")
            else:
                self.assertEqual(0, len(delta.items_added),
                                 f"added count mismatch for {name}")
Beispiel #4
0
    def load(self, synchronized_properties: SynchronizedProperties):
        """FOR TESTS ONLY Load the current scene into this proxy

        Only used for test. The initial load is performed by update()
        """
        diff = BpyBlendDiff()
        diff.diff(self, synchronized_properties)
        self.update(diff, set(), False, synchronized_properties)
        return self
Beispiel #5
0
def send_scene_data_to_server(scene, dummy):

    logger.debug(
        "send_scene_data_to_server(): skip_next_depsgraph_update %s, pending_test_update %s",
        share_data.client.skip_next_depsgraph_update,
        share_data.pending_test_update,
    )

    depsgraph = bpy.context.evaluated_depsgraph_get()
    if depsgraph.updates:
        logger.debug(
            f"DG updates for {depsgraph.scene} {depsgraph.view_layer}")
        for update in depsgraph.updates:
            logger.debug(" ......%r", update.id.original)
    else:
        # FIXME Possible missed update :
        # If an updated datablock is not linked in the current scene/view_layer, the update triggers
        # an empty DG update batch. This can happen when the update is from a script.
        logger.info(
            f"DG updates empty for {depsgraph.scene} {depsgraph.view_layer}")

    # prevent processing self events, but always process test updates
    if not share_data.pending_test_update and share_data.client.skip_next_depsgraph_update:
        share_data.client.skip_next_depsgraph_update = False
        logger.debug(
            "send_scene_data_to_server canceled (skip_next_depsgraph_update = True) ..."
        )
        return

    share_data.pending_test_update = False
    bpy_data_proxy = share_data.bpy_data_proxy
    depsgraph = bpy.context.evaluated_depsgraph_get()

    updates, delayed_updates = updates_to_check(depsgraph)

    # delayed update processing is delayed until the selected objects return to OBJECT mode
    process_delayed_updates = not delayed_updates

    if delayed_updates:
        bpy_data_proxy.append_delayed_updates(delayed_updates)

    # Compute the difference between the proxy state and the Blender state
    # It is a coarse difference at the ID level(created, removed, renamed)
    diff = BpyBlendDiff()
    diff.diff(bpy_data_proxy, safe_properties)

    # Ask the proxy to compute the list of elements to synchronize and update itself
    changeset = bpy_data_proxy.update(diff, updates, process_delayed_updates,
                                      safe_properties)

    # Send creations before update so that collection updates for new object have a valid target
    data_api.send_data_creations(changeset.creations)
    data_api.send_data_removals(changeset.removals)
    data_api.send_data_renames(changeset.renames)
    data_api.send_data_updates(changeset.updates)

    logger.debug("send_scene_data_to_server: end")
Beispiel #6
0
    def test_create_delete_rename(self):
        # test_diff.TestDiff.test_create
        new_worlds = ["W0", "W1", "W2", "W4"]
        new_worlds.sort()
        for w in new_worlds:
            D.worlds.new(w)

        self.proxy.load(test_properties)

        renamed = [("W0", "W00"), ("W2", "W22"), ("W4", "W44")]
        renamed.sort(key=sort_renamed_item)
        for old_name, new_name in renamed:
            D.worlds[old_name].name = new_name

        added = ["W0", "W5"]
        added.sort()
        for w in added:
            D.worlds.new(w)

        removed = ["W1", "W00"]
        removed.sort()
        for w in removed:
            D.worlds.remove(D.worlds[w])

        diff = BpyBlendDiff()
        diff.diff(self.proxy, test_properties)
        for name, delta in diff.collection_deltas:
            if name == "worlds":
                items_added = [
                    datablock.name for datablock, _ in delta.items_added
                ]
                items_added.sort()
                self.assertEqual(items_added, ["W0", "W5"],
                                 f"added count mismatch for {name}")

                items_renamed = delta.items_renamed
                items_renamed.sort(key=sort_renamed_item)
                items_renamed = [(proxy.data("name"), new_name)
                                 for proxy, new_name in items_renamed]
                self.assertEqual(items_renamed, [("W2", "W22"), ("W4", "W44")],
                                 f"renamed count mismatch for {name}")

                items_removed = [
                    proxy.data("name") for proxy in delta.items_removed
                ]
                items_removed.sort()
                self.assertEqual(items_removed, ["W0", "W1"],
                                 f"removed count mismatch for {name}")
            else:
                self.assertEqual(0, len(delta.items_renamed),
                                 f"renamed count mismatch for {name}")
                self.assertEqual(0, len(delta.items_removed),
                                 f"removed count mismatch for {name}")
                self.assertEqual(0, len(delta.items_added),
                                 f"added count mismatch for {name}")
Beispiel #7
0
 def test_create(self):
     # test_diff.TestDiff.test_create
     self.proxy.load(test_context)
     new_worlds = ["W0", "W1"]
     new_worlds.sort()
     for w in new_worlds:
         D.worlds.new(w)
     diff = BpyBlendDiff()
     diff.diff(self.proxy, test_context)
     for name, delta in diff.collection_deltas:
         self.assertEqual(0, len(delta.items_removed),
                          f"removed count mismatch for {name}")
         self.assertEqual(0, len(delta.items_renamed),
                          f"renamed count mismatch for {name}")
         if name == "worlds":
             self.assertEqual(len(new_worlds), len(delta.items_added),
                              f"added count mismatch for {name}")
             found = list(delta.items_added.keys())
             found.sort()
             self.assertEqual(new_worlds, found,
                              f"added count mismatch for {name}")
         else:
             self.assertEqual(0, len(delta.items_added),
                              f"added count mismatch for {name}")
Beispiel #8
0
class TestWorld(unittest.TestCase):
    def setUp(self):
        bpy.ops.wm.open_mainfile(filepath=test_blend_file)
        self.bpy_data_proxy = BpyDataProxy()
        self.diff = BpyBlendDiff()
        bpy.data.worlds[0].name = "World"
        register_bl_equals(self, safe_properties)

    def test_world(self):
        # test_end_to_end.TestWorld.test_world
        world = bpy.data.worlds[0]
        world.use_nodes = True
        self.assertGreaterEqual(len(world.node_tree.nodes), 2)

        self.diff.diff(self.bpy_data_proxy, safe_properties)
        sent_ids = {}
        sent_ids.update({("worlds", world.name): world})

        changeset = self.bpy_data_proxy.update(self.diff, {}, False,
                                               safe_properties)
        updates = changeset.creations
        # avoid clash on restore
        world.name = world.name + "_bak"

        codec = Codec()
        for update in updates:
            key = (update.collection_name, update.data("name"))
            sent_id = sent_ids.get(key)
            if sent_id is None:
                continue

            # pretend it is a new one
            update._datablock_uuid += "_new"

            encoded = codec.encode(update)
            # sender side
            #######################
            # receiver side
            decoded = codec.decode(encoded)
            created, _ = self.bpy_data_proxy.create_datablock(decoded)
            self.assertEqual(created, sent_id)

    def test_non_existing(self):
        # test_end_to_end.TestWorld.test_non_existing
        world = bpy.data.worlds[0]

        self.diff.diff(self.bpy_data_proxy, safe_properties)
        sent_ids = {}
        sent_ids.update({("worlds", world.name): world})

        changeset = self.bpy_data_proxy.update(self.diff, {}, False,
                                               safe_properties)
        creations = changeset.creations
        # avoid clash on restore
        world.name = world.name + "_bak"

        codec = Codec()
        for update in creations:
            key = (update.collection_name, update.data("name"))
            sent_id = sent_ids.get(key)
            if sent_id is None:
                continue

            # pretend it is a new one
            update._datablock_uuid += "_new"

            # create a property on the send proxy and test that is does not fail on the receiver
            # property on ID
            update._data["does_not_exist_property"] = ""
            update._data["does_not_exist_struct"] = StructProxy()
            update._data["does_not_exist_ID"] = DatablockProxy()

            encoded = codec.encode(update)
            # sender side
            #######################
            # receiver side
            decoded = codec.decode(encoded)
            created, _ = self.bpy_data_proxy.create_datablock(decoded)
            self.assertEqual(created, sent_id)
Beispiel #9
0
 def generate_all_uuids(self):
     # as a side effect, BpyBlendDiff generates the uuids
     _ = BpyBlendDiff()
     _.diff(self.proxy, test_properties)
Beispiel #10
0
def send_scene_data_to_server(scene, dummy):
    logger.debug(
        "send_scene_data_to_server(): skip_next_depsgraph_update %s, pending_test_update %s",
        share_data.client.skip_next_depsgraph_update,
        share_data.pending_test_update,
    )

    timer = share_data.current_stats_timer

    if not share_data.client:
        logger.info("send_scene_data_to_server canceled (no client instance)")
        return

    share_data.set_dirty()
    with timer.child("clear_lists"):
        share_data.clear_lists()

    depsgraph = bpy.context.evaluated_depsgraph_get()
    if depsgraph.updates:
        logger.debug("Current dg updates ...")
        for update in depsgraph.updates:
            logger.debug(" ......%s", update.id.original)

    # prevent processing self events, but always process test updates
    if not share_data.pending_test_update and share_data.client.skip_next_depsgraph_update:
        share_data.client.skip_next_depsgraph_update = False
        logger.debug("send_scene_data_to_server canceled (skip_next_depsgraph_update = True) ...")
        return

    share_data.pending_test_update = False

    if not is_in_object_mode():
        logger.info("send_scene_data_to_server canceled (not is_in_object_mode)")
        return

    update_object_state(share_data.old_objects, share_data.blender_objects)

    with timer.child("update_scenes_state"):
        update_scenes_state()

    with timer.child("update_collections_state"):
        update_collections_state()

    changed = False
    with timer.child("checkForChangeAndSendUpdates"):
        changed |= remove_objects_from_collections()
        changed |= remove_objects_from_scenes()
        changed |= remove_collections_from_collections()
        changed |= remove_collections_from_scenes()
        changed |= remove_collections()
        changed |= remove_scenes()
        changed |= add_scenes()
        changed |= add_collections()
        changed |= add_objects()

        # Updates from the VRtist protocol and from the full Blender protocol must be cafully intermixed
        # This is an unfortunate requirements from the current coexistence status of
        # both protocols

        # After creation of meshes : meshes are not yet supported by full Blender protocol,
        # but needed to properly create objects
        # Before creation of objects :  the VRtint protocol  will implicitely create objects with
        # unappropriate default values (e.g. transform creates an object with no data)
        if share_data.use_experimental_sync():
            # Compute the difference between the proxy state and the Blender state
            # It is a coarse difference at the ID level(created, removed, renamed)
            diff = BpyBlendDiff()
            diff.diff(share_data.proxy, safe_context)

            # Ask the proxy to compute the list of elements to synchronize and update itself
            depsgraph = bpy.context.evaluated_depsgraph_get()
            updates, removals = share_data.proxy.update(diff, safe_context, depsgraph.updates)

            # Send the data update messages (includes serialization)
            data_api.send_data_removals(removals)
            data_api.send_data_updates(updates)
            share_data.proxy.debug_check_id_proxies()

        # send the VRtist transforms after full Blender protocol has the opportunity to create the object data
        # that is not handled by VRtist protocol, otherwise the receiver creates an empty when it receives a transform
        changed |= update_transforms()
        changed |= add_collections_to_scenes()
        changed |= add_collections_to_collections()
        changed |= add_objects_to_collections()
        changed |= add_objects_to_scenes()
        changed |= update_collections_parameters()
        changed |= create_vrtist_objects()
        changed |= delete_scene_objects()
        changed |= rename_objects()
        changed |= update_objects_visibility()
        changed |= update_objects_transforms()
        changed |= reparent_objects()
        changed |= shot_manager.check_montage_mode()

    if not changed:
        with timer.child("update_objects_data"):
            update_objects_data()

    # update for next change
    with timer.child("update_current_data"):
        share_data.update_current_data()

    logger.debug("send_scene_data_to_server: end")
Beispiel #11
0
def send_scene_data_to_server(scene, dummy):

    logger.debug(
        "send_scene_data_to_server(): skip_next_depsgraph_update %s, pending_test_update %s",
        share_data.client.skip_next_depsgraph_update,
        share_data.pending_test_update,
    )

    depsgraph = bpy.context.evaluated_depsgraph_get()
    if depsgraph.updates:
        logger.debug("Current dg updates ...")
        for update in depsgraph.updates:
            logger.debug(" ......%s", update.id.original)

    # prevent processing self events, but always process test updates
    if not share_data.pending_test_update and share_data.client.skip_next_depsgraph_update:
        share_data.client.skip_next_depsgraph_update = False
        logger.debug(
            "send_scene_data_to_server canceled (skip_next_depsgraph_update = True) ..."
        )
        return

    share_data.pending_test_update = False
    bpy_data_proxy = share_data.bpy_data_proxy
    depsgraph = bpy.context.evaluated_depsgraph_get()

    # Delay the update of Object data to avoid Mesh updates in edit mode, but keep other updates.
    # Mesh separate delivers Collection as well as created Object and Mesh updates while the edited
    # object is in edit mode, and these updates are not delivered when leaving edit mode, so
    # make sure to process them anyway. It is also possible to edit multiple objects at once
    selected_objects = getattr(bpy.context, "selected_objects", {})
    updates = {update.id.original for update in depsgraph.updates}
    delayed_updates = set()
    for datablock in updates:
        if datablock in selected_objects and datablock.mode != "OBJECT" and datablock.data is not None:
            delayed_updates.add(datablock)
            delayed_updates.add(datablock.data)
    updates -= delayed_updates

    # delayed update processing is delayed until the selected objects return to OBJECT mode
    process_delayed_updates = not delayed_updates

    if delayed_updates:
        bpy_data_proxy.append_delayed_updates(delayed_updates)
        logger.info("send_scene_data_to_server. Delaying updates ")
        for update in delayed_updates:
            logger.info("... %s", update)

    # Compute the difference between the proxy state and the Blender state
    # It is a coarse difference at the ID level(created, removed, renamed)
    diff = BpyBlendDiff()
    diff.diff(bpy_data_proxy, safe_properties)

    # Ask the proxy to compute the list of elements to synchronize and update itself
    changeset = bpy_data_proxy.update(diff, updates, process_delayed_updates,
                                      safe_properties)

    # Send creations before update so that collection updates for new object have a valid target
    data_api.send_data_creations(changeset.creations)
    data_api.send_data_removals(changeset.removals)
    data_api.send_data_renames(changeset.renames)
    data_api.send_data_updates(changeset.updates)

    logger.debug("send_scene_data_to_server: end")
Beispiel #12
0
def send_scene_data_to_server(scene, dummy):

    logger.debug(
        "send_scene_data_to_server(): skip_next_depsgraph_update %s, pending_test_update %s",
        share_data.client.skip_next_depsgraph_update,
        share_data.pending_test_update,
    )

    depsgraph = bpy.context.evaluated_depsgraph_get()
    if depsgraph.updates:
        logger.debug("DG updates for {depsgraph.scene} {depsgraph.view_layer}")
        for update in depsgraph.updates:
            logger.debug(" ......%s", update.id.original)
    else:
        # FIXME Possible missed update :
        # If an updated datablock is not linked in the current scene/view_layer, the update triggers
        # an empty DG update batch. This can happen when the update is from a script.
        logger.info(
            f"DG updates empty for {depsgraph.scene} {depsgraph.view_layer}")

    # prevent processing self events, but always process test updates
    if not share_data.pending_test_update and share_data.client.skip_next_depsgraph_update:
        share_data.client.skip_next_depsgraph_update = False
        logger.debug(
            "send_scene_data_to_server canceled (skip_next_depsgraph_update = True) ..."
        )
        return

    share_data.pending_test_update = False
    bpy_data_proxy = share_data.bpy_data_proxy
    depsgraph = bpy.context.evaluated_depsgraph_get()

    updates = {update.id.original for update in depsgraph.updates}

    # in some cases (TestShapeKey.test_rename_key), the Key update is missing. Always check for shape_keys
    shape_key_updates = {
        datablock.shape_keys
        for datablock in updates if hasattr(datablock, "shape_keys")
        and isinstance(datablock.shape_keys, T.Key)
    }
    updates.update(shape_key_updates)

    # Delay the update of Object data to avoid Mesh updates in edit or paint mode, but keep other updates.
    # Mesh separate delivers Collection as well as created Object and Mesh updates while the edited
    # object is in edit mode, and these updates are not delivered when leaving edit mode, so
    # make sure to process them anyway. It is also possible to edit multiple objects at once

    # When no Object is selected and a Mesh is selected the Object with the selected Mesh is the
    # active_object, but not in selected_objects
    current_objects = set(getattr(bpy.context, "selected_objects", []))
    active_object = getattr(bpy.context, "active_object", None)
    if active_object:
        current_objects.add(active_object)

    delayed_updates = set()
    for datablock in updates:
        if datablock in current_objects and datablock.mode != "OBJECT" and datablock.data is not None:
            delayed_updates.add(datablock)
            delayed_updates.add(datablock.data)
    updates -= delayed_updates

    # delayed update processing is delayed until the selected objects return to OBJECT mode
    process_delayed_updates = not delayed_updates

    if delayed_updates:
        bpy_data_proxy.append_delayed_updates(delayed_updates)
        logger.info("send_scene_data_to_server. Delaying updates ")
        for update in delayed_updates:
            logger.info("... %s", update)

    # Compute the difference between the proxy state and the Blender state
    # It is a coarse difference at the ID level(created, removed, renamed)
    diff = BpyBlendDiff()
    diff.diff(bpy_data_proxy, safe_properties)

    # Ask the proxy to compute the list of elements to synchronize and update itself
    changeset = bpy_data_proxy.update(diff, updates, process_delayed_updates,
                                      safe_properties)

    # Send creations before update so that collection updates for new object have a valid target
    data_api.send_data_creations(changeset.creations)
    data_api.send_data_removals(changeset.removals)
    data_api.send_data_renames(changeset.renames)
    data_api.send_data_updates(changeset.updates)

    logger.debug("send_scene_data_to_server: end")
Beispiel #13
0
class TestWorld(unittest.TestCase):
    def setUp(self):
        self.bpy_data_proxy = BpyBlendProxy()
        self.diff = BpyBlendDiff()
        bpy.data.worlds[0].name = "World"
        register_bl_equals(self, safe_context)

    def test_world(self):
        world = bpy.data.worlds[0]
        world.use_nodes = True
        self.assertGreaterEqual(len(world.node_tree.nodes), 2)

        self.diff.diff(self.bpy_data_proxy, safe_context)
        sent_ids = {}
        sent_ids.update({("worlds", world.name): world})

        changeset = self.bpy_data_proxy.update(self.diff, safe_context)
        updates = changeset.creations
        # avoid clash on restore
        world.name = world.name + "_bak"

        codec = Codec()
        for update in updates:
            key = (update.collection_name, update.data("name"))
            sent_id = sent_ids.get(key)
            if sent_id is None:
                continue

            encoded = codec.encode(update)
            # sender side
            #######################
            # receiver side
            decoded = codec.decode(encoded)
            created = self.bpy_data_proxy.update_datablock(decoded)
            self.assertEqual(created, sent_id)

    def test_non_existing(self):
        world = bpy.data.worlds[0]

        self.diff.diff(self.bpy_data_proxy, safe_context)
        sent_ids = {}
        sent_ids.update({("worlds", world.name): world})

        changeset = self.bpy_data_proxy.update(self.diff, safe_context)
        creations = changeset.creations
        # avoid clash on restore
        world.name = world.name + "_bak"

        codec = Codec()
        for update in creations:
            key = (update.collection_name, update.data("name"))
            sent_id = sent_ids.get(key)
            if sent_id is None:
                continue

            # create a property on the send proxy and test that is does not fail on the receiver
            # property on ID
            update._data["does_not_exist_property"] = ""
            update._data["does_not_exist_struct"] = BpyStructProxy()
            update._data["does_not_exist_ID"] = BpyIDProxy()

            encoded = codec.encode(update)
            # sender side
            #######################
            # receiver side
            decoded = codec.decode(encoded)
            created = self.bpy_data_proxy.update_datablock(decoded)
            self.assertEqual(created, sent_id)