def build_collection_to_collection(data): parent_name, index = common.decode_string(data, 0) child_name, _ = common.decode_string(data, index) # This message is not emitted by VRtist, only by Blender, so it is used only for Blender/Blender sync. # In generic mode, it conflicts with generic messages, so drop it if share_data.use_experimental_sync(): logger.warning( "build_collection_to_collection %s <- %s, ignore in experimental mode", parent_name, child_name) return logger.info("build_collection_to_collection %s <- %s", parent_name, child_name) parent = share_data.blender_collections[parent_name] child = share_data.blender_collections[child_name] try: parent.children.link(child) except RuntimeError as e: if share_data.use_experimental_sync(): # Added by the Blender Protocol logger.info( f"build_collection_to_collection(): parent {parent_name}, child {child_name}..." ) logger.info("... Exception during parent.children.link() ...") logger.info("... Safe in experimental_sync ...") logger.info(f"... {e!r}") else: logger.warning( f"build_collection_to_collection(): parent {parent_name}, child {child_name}..." ) logger.warning("... Exception during parent.children.link() ...") logger.warning(f"... {e!r}")
def build_collection_to_scene(data): scene_name, index = common.decode_string(data, 0) collection_name, _ = common.decode_string(data, index) logger.info("build_collection_to_scene %s <- %s", scene_name, collection_name) try: scene = share_data.blender_scenes[scene_name] except KeyError: if share_data.use_experimental_sync(): # Removed by the Blender Protocol logger.info( f"build_collection_to_scene(): scene not found {scene_name}. Safe in experimental_sync ..." ) return else: raise collection = share_data.blender_collections[collection_name] try: scene.collection.children.link(collection) except RuntimeError as e: if share_data.use_experimental_sync(): # Added by the Blender Protocol logger.info( f"build_collection_to_scene(): scene {scene_name}, collection {collection_name}..." ) logger.info( f"... Exception during scene.collection.children.link() ...") logger.info(f"... Safe in experimental_sync ...") logger.info(f"... {e}") else: raise share_data.update_collection_temporary_visibility(collection_name)
def handler_send_scene_data_to_server(scene, dummy): global processing_depsgraph_handler if processing_depsgraph_handler: logger.error("Depsgraph handler recursion attempt") return processing_depsgraph_handler = True try: logger.debug("handler_send_scene_data_to_server") # Ensure we will rebuild accessors when a depsgraph update happens # todo investigate why we need this... share_data.set_dirty() if share_data.client.block_signals: logger.debug( "handler_send_scene_data_to_server canceled (block_signals = True)" ) return if share_data.use_experimental_sync(): generic.send_scene_data_to_server(scene, dummy) else: send_scene_data_to_server(scene, dummy) finally: processing_depsgraph_handler = False
def _build_data_update_or_create(buffer, display_name: str, func: Callable[[BpyBlendProxy], BpyIDProxy]): """ Process a datablock update request """ def log_exception(when: str): logger.error(f"Exception during {display_name}, decode") for line in traceback.format_exc().splitlines(): logger.error(line) logger.error(f"During {when}") logger.error(buffer[0:200]) logger.error("...") logger.error(buffer[-200:0]) logger.error("ignored") if not share_data.use_experimental_sync(): return buffer, _ = common.decode_string(buffer, 0) codec = Codec() try: id_proxy = codec.decode(buffer) except Exception: log_exception("decode") logger.info("%s: %s", display_name, id_proxy) try: # TODO temporary until VRtist protocol uses Blenddata instead of blender_objects & co share_data.set_dirty() return func(share_data.bpy_data_proxy, id_proxy) except Exception: log_exception(f"processing of buffer for {id_proxy}")
def build_data_update(buffer): if not share_data.use_experimental_sync(): return buffer, _ = common.decode_string(buffer, 0) codec = Codec() try: id_proxy = codec.decode(buffer) try: collection_name, key = blenddata_path(id_proxy) except InvalidPath: logger.error("... update ignored") return uuid = id_proxy.mixer_uuid() logger.info("build_data_update: %s[%s] %s", collection_name, key, uuid) share_data.proxy.update_one(id_proxy) # TODO temporary until VRtist protocol uses Blenddata instead of blender_objects & co share_data.set_dirty() except Exception: logger.error("Exception during build_data_update") log_traceback(logger.error) logger.error( f"During processing of buffer with blenddata_path {id_proxy._blenddata_path}" ) logger.error(buffer[0:200]) logger.error("...") logger.error(buffer[-200:0]) logger.error( f"Creation or update of bpy.data.{collection_name}[{key}] was ignored" )
def send_data_updates(updates: List[BpyIDProxy]): if not share_data.use_experimental_sync(): return if not updates: return codec = Codec() for proxy in updates: # We send an ID, so we need to make sure that it includes a bp.data collection name # and the associated key try: collection_name, key = blenddata_path(proxy) except InvalidPath: logger.error("... update ignored") continue logger.info("send_data_update %s[%s]", collection_name, key) try: encoded_proxy = codec.encode(proxy) except InvalidPath: logger.error("send_update: Exception :") log_traceback(logger.error) logger.error( f"while processing bpy.data.{collection_name}[{key}]:") # For BpyIdProxy, the target is encoded in the proxy._blenddata_path buffer = common.encode_string(encoded_proxy) command = common.Command(common.MessageType.BLENDER_DATA_UPDATE, buffer, 0) share_data.client.add_command(command)
def build_collection_to_collection(data): parent_name, index = common.decode_string(data, 0) child_name, _ = common.decode_string(data, index) logger.info("build_collection_to_collection %s <- %s", parent_name, child_name) parent = share_data.blender_collections[parent_name] child = share_data.blender_collections[child_name] try: parent.children.link(child) except RuntimeError as e: if share_data.use_experimental_sync(): # Added by the Blender Protocol logger.info( f"build_collection_to_collection(): parent {parent_name}, child {child_name}..." ) logger.info(f"... Exception during parent.children.link() ...") logger.info(f"... Safe in experimental_sync ...") logger.info(f"... {e}") else: logger.warning( f"build_collection_to_collection(): parent {parent_name}, child {child_name}..." ) logger.warning(f"... Exception during parent.children.link() ...") logger.warning(f"... {e}")
def build_remove_object_from_collection(data): collection_name, index = common.decode_string(data, 0) object_name, _ = common.decode_string(data, index) # This message is not emitted by VRtist, only by Blender, so it is used only for Blender/Blender sync. # In generic mode, it conflicts with generic messages, so drop it if share_data.use_experimental_sync(): logger.warning( "build_remove_object_from_collection %s <- %s, ignore in experimental mode", collection_name, object_name) return logger.info("build_remove_object_from_collection %s <- %s", collection_name, object_name) collection = share_data.blender_collections[collection_name] object_ = share_data.blender_objects.get(object_name) if object_: # otherwise already removed by Blender protocol try: collection.objects.unlink(object_) except Exception as e: logger.info( "build_remove_object_from_collection: exception during unlink... " ) logger.info(f"... {e!r} ")
def build_data_update(buffer: bytes): if not share_data.use_experimental_sync(): return proxy_string, index = decode_string(buffer, 0) codec = Codec() try: delta: DeltaUpdate = codec.decode(proxy_string) logger.info("%s: %s", "build_data_update", delta) # TODO temporary until VRtist protocol uses Blenddata instead of blender_objects & co share_data.set_dirty() share_data.bpy_data_proxy.update_datablock(delta) datablock_proxy = delta.value if datablock_proxy is not None: _decode_and_build_soas(datablock_proxy.mixer_uuid(), buffer, index) except Exception: logger.error("Exception during build_data_update") for line in traceback.format_exc().splitlines(): logger.error(line) logger.error(f"During processing of buffer for {delta}") logger.error(buffer[0:200]) logger.error("...") logger.error(buffer[-200:0]) logger.error("ignored")
def build_collection(data): name_full, index = common.decode_string(data, 0) # This message is not emitted by VRtist, only by Blender, so it is used only for Blender/Blender sync. # In generic mode, it conflicts with generic messages, so drop it if share_data.use_experimental_sync(): logger.warning("build_collection %s, ignored in experimental mode", name_full) return # Blender/Blender in VRtist (non generic) mode visible, index = common.decode_bool(data, index) hide_viewport = not visible offset, index = common.decode_vector3(data, index) temporary_visibility, index = common.decode_bool(data, index) logger.info("build_collection %s", name_full) collection = share_data.blender_collections.get(name_full) if collection is None: collection = bpy.data.collections.new(name_full) share_data.blender_collections[name_full] = collection collection.hide_viewport = hide_viewport collection.instance_offset = offset layer_collection = share_data.blender_layer_collections.get(name_full) if layer_collection: layer_collection.hide_viewport = not temporary_visibility else: # if the layer collection does not exists, store its state for later share_data.blender_collection_temporary_visibility[ name_full] = temporary_visibility
def build_data_create(buffer): if not share_data.use_experimental_sync(): return proxy_string, index = decode_string(buffer, 0) codec = Codec() rename_changeset = None try: datablock_proxy: DatablockProxy = codec.decode(proxy_string) logger.info("%s: %s", "build_data_create", datablock_proxy) # TODO temporary until VRtist protocol uses Blenddata instead of blender_objects & co share_data.set_dirty() _, rename_changeset = share_data.bpy_data_proxy.create_datablock( datablock_proxy) _decode_and_build_soas(datablock_proxy.mixer_uuid(), buffer, index) except Exception: logger.error("Exception during build_data_create") for line in traceback.format_exc().splitlines(): logger.error(line) logger.error(buffer[0:200]) logger.error("...") logger.error(buffer[-200:0]) logger.error("ignored") if rename_changeset: send_data_renames(rename_changeset)
def handler_on_undo_redo_pre(scene): logger.error(f"Undo/redo pre on {scene}") share_data.client.send_error( f"Undo/redo pre from {get_mixer_prefs().user}") if share_data.use_experimental_sync(): # generic.send_scene_data_to_server(scene, None) return send_scene_data_to_server(scene, None)
def send_data_removals(removals: RemovalChangeset): if not share_data.use_experimental_sync(): return for uuid, _, debug_info in removals: logger.info("send_removal: %s (%s)", uuid, debug_info) buffer = encode_string(uuid) + encode_string(debug_info) command = Command(MessageType.BLENDER_DATA_REMOVE, buffer, 0) share_data.client.add_command(command)
def build_collection_to_scene(data): scene_name, index = common.decode_string(data, 0) collection_name, _ = common.decode_string(data, index) # This message is not emitted by VRtist, only by Blender, so it is used only for Blender/Blender sync. # In generic mode, it conflicts with generic messages, so drop it if share_data.use_experimental_sync(): logger.warning("build_scene_renamed %s <- %s", scene_name, collection_name) return logger.info("build_collection_to_scene %s <- %s", scene_name, collection_name) try: scene = share_data.blender_scenes[scene_name] except KeyError: if share_data.use_experimental_sync(): # Removed by the Blender Protocol logger.info( f"build_collection_to_scene(): scene not found {scene_name}. Safe in experimental_sync ..." ) return else: raise collection = share_data.blender_collections[collection_name] try: scene.collection.children.link(collection) except RuntimeError as e: if share_data.use_experimental_sync(): # Added by the Blender Protocol logger.info( f"build_collection_to_scene(): scene {scene_name}, collection {collection_name}..." ) logger.info( "... Exception during scene.collection.children.link() ...") logger.info("... Safe in experimental_sync ...") logger.info(f"... {e!r}") else: raise share_data.update_collection_temporary_visibility(collection_name)
def build_data_remove(buffer): if not share_data.use_experimental_sync(): return uuid, index = decode_string(buffer, 0) debug_info, index = decode_string(buffer, index) logger.info("build_data_remove: %s (%s)", uuid, debug_info) share_data.bpy_data_proxy.remove_datablock(uuid) # TODO temporary until VRtist protocol uses Blenddata instead of blender_objects & co share_data.set_dirty()
def send_data_renames(renames: RenameChangeset): if not share_data.use_experimental_sync(): return for uuid, new_name, debug_info in renames: logger.info("send_rename: %s (%s) into %s", uuid, debug_info, new_name) buffer = common.encode_string(uuid) + common.encode_string( new_name) + common.encode_string(debug_info) command = common.Command(common.MessageType.BLENDER_DATA_RENAME, buffer, 0) share_data.client.add_command(command)
def send_data_removals(removals: List[Tuple[str, str]]): if not share_data.use_experimental_sync(): return for collection_name, key in removals: logger.info("send_removal: %s[%s]", collection_name, key) buffer = common.encode_string(collection_name) + common.encode_string( key) command = common.Command(common.MessageType.BLENDER_DATA_REMOVE, buffer, 0) share_data.client.add_command(command)
def build_data_remove(buffer): if not share_data.use_experimental_sync(): return collection_name, index = common.decode_string(buffer, 0) key, index = common.decode_string(buffer, index) logger.info("build_data_remove: %s[%s]", collection_name, key) # Update through the proxy so that it updates itself and does not trigger removals share_data.proxy.remove_one(collection_name, key) # TODO temporary until VRtist protocol uses Blenddata instead of blender_objects & co share_data.set_dirty()
def send_data_renames(renames: RenameChangeset): if not renames: return if not share_data.use_experimental_sync(): return items = [] for uuid, old_name, new_name, debug_info in renames: logger.info("send_rename: %s (%s) into %s", uuid, debug_info, new_name) items.extend([uuid, old_name, new_name]) buffer = encode_string_array(items) command = Command(MessageType.BLENDER_DATA_RENAME, buffer, 0) share_data.client.add_command(command)
def build_scene_renamed(data): # TODO check if obsolete old_name, index = common.decode_string(data, 0) new_name, _ = common.decode_string(data, index) # This message is not emitted by VRtist, only by Blender, so it is used only for Blender/Blender sync. # In generic mode, it conflicts with generic messages, so drop it if share_data.use_experimental_sync(): logger.warning("build_scene_renamed %s to %s", old_name, new_name) return logger.info("build_scene_renamed %s to %s", old_name, new_name) scene = share_data.blender_scenes.get(old_name) scene.name = new_name share_data.blender_scenes_dirty = True
def build_remove_collection_from_collection(data): parent_name, index = common.decode_string(data, 0) child_name, _ = common.decode_string(data, index) # This message is not emitted by VRtist, only by Blender, so it is used only for Blender/Blender sync. # In generic mode, it conflicts with generic messages, so drop it if share_data.use_experimental_sync(): logger.warning( "build_remove_collection_from_collection %s <- %s, ignore in experimental mode", parent_name, child_name) return logger.info("build_remove_collection_from_collection %s <- %s", parent_name, child_name) parent = share_data.blender_collections[parent_name] child = share_data.blender_collections[child_name] parent.children.unlink(child)
def build_rename(self, data): # Object rename, actually # renaming the data referenced by Object.data (Light, Camera, ...) is not supported old_path, index = common.decode_string(data, 0) new_path, index = common.decode_string(data, index) logger.info("build_rename %s into %s", old_path, new_path) old_name = old_path.split("/")[-1] new_name = new_path.split("/")[-1] old_object = share_data.blender_objects.get(old_name) if old_object is not None: share_data.blender_objects.get(old_name).name = new_name else: if share_data.use_experimental_sync(): # Renamed by the Blender Protocol logger.info(f"build_rename(): old object {old_name} not found. Safe in experimental mode") else: logger.info(f"build_rename(): old object {old_name} not found.") share_data.blender_objects_dirty = True share_data.old_objects = share_data.blender_objects
def send_data_updates(updates: UpdateChangeset): if not share_data.use_experimental_sync(): return codec = Codec() for update in updates: logger.info("%s %s", "send_data_update", update) try: encoded_update = codec.encode(update) except Exception: logger.error(f"send_data_update: encode exception for {update}") for line in traceback.format_exc().splitlines(): logger.error(line) continue items: List[bytes] = [] items.append(encode_string(encoded_update)) items.extend(soa_buffers(update.value)) command = Command(MessageType.BLENDER_DATA_UPDATE, b"".join(items), 0) share_data.client.add_command(command)
def build_add_object_to_collection(data): collection_name, index = common.decode_string(data, 0) object_name, _ = common.decode_string(data, index) # This message is not emitted by VRtist, only by Blender, so it is used only for Blender/Blender sync. # In generic mode, it conflicts with generic messages, so drop it if share_data.use_experimental_sync(): logger.warning( "build_add_object_to_collection %s <- %s, ignore in experimental mode", collection_name, object_name) return logger.info("build_add_object_to_collection %s <- %s", collection_name, object_name) collection = share_data.blender_collections[collection_name] # We may have received an object creation message before this collection link message # and object creation will have created and linked the collection if needed if collection.objects.get(object_name) is None: object_ = share_data.blender_objects[object_name] collection.objects.link(object_)
def build_add_object_to_scene(data): scene_name, index = common.decode_string(data, 0) object_name, _ = common.decode_string(data, index) logger.info("build_add_object_to_scene %s <- %s", scene_name, object_name) try: scene = share_data.blender_scenes[scene_name] except KeyError: if share_data.use_experimental_sync(): # Removed by the Blender Protocol logger.info( f"build_collection_to_scene(): scene not found {scene_name}. Safe in experimental_sync ..." ) return else: raise # We may have received an object creation message before this collection link message # and object creation will have created and linked the collecetion if needed if scene.collection.objects.get(object_name) is None: object_ = share_data.blender_objects[object_name] scene.collection.objects.link(object_)
def build_collection_instance(data): instance_name, index = common.decode_string(data, 0) instantiated_name, _ = common.decode_string(data, index) # This message is not emitted by VRtist, only by Blender, so it is used only for Blender/Blender sync. # In generic mode, it conflicts with generic messages, so drop it if share_data.use_experimental_sync(): logger.warning( "build_collection_instance %s <- %s, ignore in experimental mode", instantiated_name, instance_name) return logger.info("build_collection_instance %s from %s", instantiated_name, instance_name) instantiated = share_data.blender_collections[instantiated_name] instance = bpy.data.objects.new(name=instance_name, object_data=None) instance.instance_collection = instantiated instance.instance_type = "COLLECTION" share_data.blender_objects[instance_name] = instance
def _send_data_create_or_update(proxies: Union[CreationChangeset, UpdateChangeset], display_name: str, message: common.MessageType): if not share_data.use_experimental_sync(): return codec = Codec() for proxy in proxies: logger.info("%s %s", display_name, proxy) try: encoded_proxy = codec.encode(proxy) except Exception: logger.error(f"{display_name}: encode exception for {proxy}") for line in traceback.format_exc().splitlines(): logger.error(line) continue buffer = common.encode_string(encoded_proxy) command = common.Command(message, buffer, 0) share_data.client.add_command(command)
def build_data_rename(buffer): if not share_data.use_experimental_sync(): return strings, _ = decode_string_array(buffer, 0) # (uuid1, old1, new1, uuid2, old2, new2, ...) to ((uuid1, old1, new1), (uuid2, old2, new2), ...) args = [iter(strings)] * 3 # do not consume the iterator on the log loop ! items = list(itertools.zip_longest(*args)) for uuid, old_name, new_name in items: logger.info("build_data_rename: %s (%s) into %s", uuid, old_name, new_name) rename_changeset = share_data.bpy_data_proxy.rename_datablocks(items) # TODO temporary until VRtist protocol uses Blenddata instead of blender_objects & co share_data.set_dirty() if rename_changeset: send_data_renames(rename_changeset)
def build_collection_removed(data): name_full, index = common.decode_string(data, 0) # This message is not emitted by VRtist, only by Blender, so it is used only for Blender/Blender sync. # In generic mode, it conflicts with generic messages, so drop it if share_data.use_experimental_sync(): logger.warning( "build_collection_remove %s, ignore in experimental mode", name_full) return # Blender/Blender in VRtist (non generic) mode logger.info("build_collectionRemove %s", name_full) collection = share_data.blender_collections.get(name_full) if collection: # otherwise already removed by Blender protocol try: del share_data.blender_collections[name_full] bpy.data.collections.remove(collection) except Exception as e: logger.info( "build_remove_collection_from_scene: exception during unlink... " ) logger.info(f"... {e!r} ")
def send_data_creations(proxies: CreationChangeset): if not share_data.use_experimental_sync(): return codec = Codec() for datablock_proxy in proxies: logger.info("%s %s", "send_data_create", datablock_proxy) try: encoded_proxy = codec.encode(datablock_proxy) except Exception: logger.error( f"send_data_create: encode exception for {datablock_proxy}") for line in traceback.format_exc().splitlines(): logger.error(line) continue send_media_creations(datablock_proxy) # creation so that it is available at bpy_data_ctor() time items: List[bytes] = [] items.append(encode_string(encoded_proxy)) items.extend(soa_buffers(datablock_proxy)) command = Command(MessageType.BLENDER_DATA_CREATE, b"".join(items), 0) share_data.client.add_command(command)