def build_data_create(buffer): if not share_data.use_experimental_sync(): return proxy_string, index = decode_string(buffer, 0) codec = Codec() rename_changeset = None try: datablock_proxy: DatablockProxy = codec.decode(proxy_string) logger.info("%s: %s", "build_data_create", datablock_proxy) # TODO temporary until VRtist protocol uses Blenddata instead of blender_objects & co share_data.set_dirty() _, rename_changeset = share_data.bpy_data_proxy.create_datablock( datablock_proxy) _decode_and_build_soas(datablock_proxy.mixer_uuid(), buffer, index) except Exception: logger.error("Exception during build_data_create") for line in traceback.format_exc().splitlines(): logger.error(line) logger.error(buffer[0:200]) logger.error("...") logger.error(buffer[-200:0]) logger.error("ignored") if rename_changeset: send_data_renames(rename_changeset)
def build_data_update(buffer: bytes): if not share_data.use_experimental_sync(): return proxy_string, index = decode_string(buffer, 0) codec = Codec() try: delta: DeltaUpdate = codec.decode(proxy_string) logger.info("%s: %s", "build_data_update", delta) # TODO temporary until VRtist protocol uses Blenddata instead of blender_objects & co share_data.set_dirty() share_data.bpy_data_proxy.update_datablock(delta) datablock_proxy = delta.value if datablock_proxy is not None: _decode_and_build_soas(datablock_proxy.mixer_uuid(), buffer, index) except Exception: logger.error("Exception during build_data_update") for line in traceback.format_exc().splitlines(): logger.error(line) logger.error(f"During processing of buffer for {delta}") logger.error(buffer[0:200]) logger.error("...") logger.error(buffer[-200:0]) logger.error("ignored")
def test_non_existing(self): world = bpy.data.worlds[0] self.diff.diff(self.bpy_data_proxy, safe_context) sent_ids = {} sent_ids.update({("worlds", world.name): world}) changeset = self.bpy_data_proxy.update(self.diff, safe_context) creations = changeset.creations # avoid clash on restore world.name = world.name + "_bak" codec = Codec() for update in creations: key = (update.collection_name, update.data("name")) sent_id = sent_ids.get(key) if sent_id is None: continue # create a property on the send proxy and test that is does not fail on the receiver # property on ID update._data["does_not_exist_property"] = "" update._data["does_not_exist_struct"] = BpyStructProxy() update._data["does_not_exist_ID"] = BpyIDProxy() encoded = codec.encode(update) # sender side ####################### # receiver side decoded = codec.decode(encoded) created = self.bpy_data_proxy.update_datablock(decoded) self.assertEqual(created, sent_id)
def test_world(self): # test_end_to_end.TestWorld.test_world world = bpy.data.worlds[0] world.use_nodes = True self.assertGreaterEqual(len(world.node_tree.nodes), 2) self.diff.diff(self.bpy_data_proxy, safe_properties) sent_ids = {} sent_ids.update({("worlds", world.name): world}) changeset = self.bpy_data_proxy.update(self.diff, {}, False, safe_properties) updates = changeset.creations # avoid clash on restore world.name = world.name + "_bak" codec = Codec() for update in updates: key = (update.collection_name, update.data("name")) sent_id = sent_ids.get(key) if sent_id is None: continue # pretend it is a new one update._datablock_uuid += "_new" encoded = codec.encode(update) # sender side ####################### # receiver side decoded = codec.decode(encoded) created, _ = self.bpy_data_proxy.create_datablock(decoded) self.assertEqual(created, sent_id)
def test_world(self): world = bpy.data.worlds[0] world.use_nodes = True self.assertGreaterEqual(len(world.node_tree.nodes), 2) self.diff.diff(self.bpy_data_proxy, safe_context) sent_ids = {} sent_ids.update({("worlds", world.name): world}) changeset = self.bpy_data_proxy.update(self.diff, safe_context) updates = changeset.creations # avoid clash on restore world.name = world.name + "_bak" codec = Codec() for update in updates: key = (update.collection_name, update.data("name")) sent_id = sent_ids.get(key) if sent_id is None: continue encoded = codec.encode(update) # sender side ####################### # receiver side decoded = codec.decode(encoded) created = self.bpy_data_proxy.update_datablock(decoded) self.assertEqual(created, sent_id)
def build_data_update(buffer): if not share_data.use_experimental_sync(): return buffer, _ = common.decode_string(buffer, 0) codec = Codec() try: id_proxy = codec.decode(buffer) try: collection_name, key = blenddata_path(id_proxy) except InvalidPath: logger.error("... update ignored") return uuid = id_proxy.mixer_uuid() logger.info("build_data_update: %s[%s] %s", collection_name, key, uuid) share_data.proxy.update_one(id_proxy) # TODO temporary until VRtist protocol uses Blenddata instead of blender_objects & co share_data.set_dirty() except Exception: logger.error("Exception during build_data_update") log_traceback(logger.error) logger.error( f"During processing of buffer with blenddata_path {id_proxy._blenddata_path}" ) logger.error(buffer[0:200]) logger.error("...") logger.error(buffer[-200:0]) logger.error( f"Creation or update of bpy.data.{collection_name}[{key}] was ignored" )
def _build_data_update_or_create(buffer, display_name: str, func: Callable[[BpyBlendProxy], BpyIDProxy]): """ Process a datablock update request """ def log_exception(when: str): logger.error(f"Exception during {display_name}, decode") for line in traceback.format_exc().splitlines(): logger.error(line) logger.error(f"During {when}") logger.error(buffer[0:200]) logger.error("...") logger.error(buffer[-200:0]) logger.error("ignored") if not share_data.use_experimental_sync(): return buffer, _ = common.decode_string(buffer, 0) codec = Codec() try: id_proxy = codec.decode(buffer) except Exception: log_exception("decode") logger.info("%s: %s", display_name, id_proxy) try: # TODO temporary until VRtist protocol uses Blenddata instead of blender_objects & co share_data.set_dirty() return func(share_data.bpy_data_proxy, id_proxy) except Exception: log_exception(f"processing of buffer for {id_proxy}")
def build_data_update(buffer: bytes): if share_data.use_vrtist_protocol(): return share_data.set_dirty() codec = Codec() try: message = BlenderDataMessage() message.decode(buffer) delta: Delta = codec.decode(message.proxy_string) logger.debug("%s: %s", "build_data_update", delta) delta.value.arrays = message.arrays share_data.bpy_data_proxy.update_datablock(delta) datablock_proxy = delta.value if datablock_proxy is not None: _build_soas(datablock_proxy.mixer_uuid, message.soas) except DecodeError as e: logger.error( f"Decode error for {str(e.args[1])[:100]} . Possible causes...") logger.error("... user error: version mismatch") logger.error( "... internal error: Proxy class not registered. Import it in blender_data.__init__.py" ) except Exception: logger.error("Exception during build_data_update") for line in traceback.format_exc().splitlines(): logger.error(line) logger.error(f"During processing of buffer for {delta}") logger.error(buffer[0:200]) logger.error("...") logger.error(buffer[-200:0]) logger.error("ignored")
def build_data_create(buffer): if share_data.use_vrtist_protocol(): return share_data.set_dirty() rename_changeset = None codec = Codec() try: message = BlenderDataMessage() message.decode(buffer) datablock_proxy = codec.decode(message.proxy_string) logger.info("%s %s", "build_data_create", datablock_proxy) datablock_proxy.arrays = message.arrays _, rename_changeset = share_data.bpy_data_proxy.create_datablock( datablock_proxy) _build_soas(datablock_proxy.mixer_uuid, message.soas) except DecodeError as e: logger.error(f"Decode error for {str(e.args[1])[:100]} ...") logger.error("... possible version mismatch") return except Exception: logger.error("Exception during build_data_create") for line in traceback.format_exc().splitlines(): logger.error(line) logger.error(buffer[0:200]) logger.error("...") logger.error(buffer[-200:0]) logger.error("ignored") return if rename_changeset: send_data_renames(rename_changeset)
def test_camera(self): # test_codec.TestCodec.test_camera # prepare camera transmit_name = "transmit_camera" cam_sent = D.cameras["Camera_0"] cam_sent.dof.focus_object = D.objects["Cube"] # load into proxy self.proxy.load(test_context) # patch the name so that it does not get mixed up as we restore later in the same scene cam_proxy_sent = self.proxy.data("cameras").data("Camera_0") cam_proxy_sent._data["name"] = transmit_name self.assertIsInstance(cam_proxy_sent, BpyIDProxy) # encode codec = Codec() message = codec.encode(cam_proxy_sent) # # transmit # # create cam_received = D.cameras.new(transmit_name) # decode into proxy cam_proxy_received = codec.decode(message) focus_object_proxy = cam_proxy_received.data("dof").data( "focus_object") self.assertIsInstance(focus_object_proxy, BpyIDRefProxy) self.assertEqual(focus_object_proxy._datablock_uuid, cam_sent.dof.focus_object.mixer_uuid) # save into blender cam_proxy_received.save(D.cameras, transmit_name, self.proxy.visit_state()) self.assertEqual(cam_sent, cam_received) pass
def test_camera(self): # test_codec.TestCodec.test_camera # prepare camera transmit_name = "transmit_camera" cam_sent = D.cameras["Camera_0"] cam_sent.dof.focus_object = D.objects["Cube"] # load into proxy self.proxy.load(test_properties) # patch the name so that it does not get mixed up as we restore later in the same scene cam_proxy_sent = self.proxy.data("cameras").search_one("Camera_0") cam_proxy_sent._data["name"] = transmit_name self.assertIsInstance(cam_proxy_sent, DatablockProxy) # encode codec = Codec() message = codec.encode(cam_proxy_sent) # # transmit # # decode into proxy cam_proxy_received = codec.decode(message) focus_object_proxy = cam_proxy_received.data("dof").data( "focus_object") self.assertIsInstance(focus_object_proxy, DatablockRefProxy) self.assertEqual(focus_object_proxy._datablock_uuid, cam_sent.dof.focus_object.mixer_uuid) # save into blender cam_proxy_received._datablock_uuid = "__" + cam_proxy_received._datablock_uuid cam_received, _ = cam_proxy_received.create_standalone_datablock( self.proxy.context()) self.assertEqual(cam_sent, cam_received) pass