def build_add_object_to_scene(data): scene_name, index = common.decode_string(data, 0) object_name, _ = common.decode_string(data, index) logger.info("build_add_object_to_scene %s <- %s", scene_name, object_name) scene = share_data.blender_scenes[scene_name] # We may have received an object creation message before this collection link message # and object creation will have created and linked the collecetion if needed if scene.collection.objects.get(object_name) is None: object_ = share_data.blender_objects[object_name] scene.collection.objects.link(object_)
def build_collection_to_scene(data): scene_name, index = common.decode_string(data, 0) collection_name, _ = common.decode_string(data, index) logger.info("build_collection_to_scene %s <- %s", scene_name, collection_name) scene = share_data.blender_scenes[scene_name] collection = share_data.blender_collections[collection_name] scene.collection.children.link(collection) share_data.update_collection_temporary_visibility(collection_name)
def build_rename(self, data): # Object rename, actually # renaming the data referenced by Object.data (Light, Camera, ...) is not supported old_path, index = common.decode_string(data, 0) new_path, index = common.decode_string(data, index) logger.info("build_rename %s into %s", old_path, new_path) old_name = old_path.split("/")[-1] new_name = new_path.split("/")[-1] share_data.blender_objects.get(old_name).name = new_name share_data.blender_objects_dirty = True share_data.old_objects = share_data.blender_objects
def build_data_remove(buffer): if not share_data.use_experimental_sync(): return collection_name, index = common.decode_string(buffer, 0) key, index = common.decode_string(buffer, index) logger.info("build_data_remove: %s[%s]", collection_name, key) # Update through the proxy so that it updates itself and does not trigger removals share_data.proxy.remove_one(collection_name, key) # TODO temporary until VRtist protocol uses Blenddata instead of blender_objects & co share_data.set_dirty()
def build_remove_keyframe(self, data): index = 0 name, index = common.decode_string(data, index) if name not in share_data.blender_objects: return name ob = share_data.blender_objects[name] channel, index = common.decode_string(data, index) channel_index, index = common.decode_int(data, index) if not hasattr(ob, channel): ob = ob.data ob.keyframe_delete(channel, index=channel_index) return name
def build_collection_instance(data): instance_name, index = common.decode_string(data, 0) instantiated_name, _ = common.decode_string(data, index) logger.info("build_collection_instance %s from %s", instantiated_name, instance_name) instantiated = share_data.blender_collections[instantiated_name] instance = bpy.data.objects.new(name=instance_name, object_data=None) instance.instance_collection = instantiated instance.instance_type = "COLLECTION" share_data.blender_objects[instance_name] = instance
def build_data_rename(buffer): if not share_data.use_experimental_sync(): return uuid, index = common.decode_string(buffer, 0) new_name, index = common.decode_string(buffer, index) debug_info, index = common.decode_string(buffer, index) logger.info("build_data_rename: %s (%s) into %s", uuid, debug_info, new_name) share_data.bpy_data_proxy.rename_datablock(uuid, new_name) # TODO temporary until VRtist protocol uses Blenddata instead of blender_objects & co share_data.set_dirty()
def build_assign_material(data): index = 0 object_name, index = common.decode_string(data, index) material_name, index = common.decode_string(data, index) mesh = share_data.blender_objects[object_name] material = get_or_create_material(material_name) # If the mesh hasn't any material slot, just append the material (this will create the material) if len(mesh.material_slots) == 0: mesh.data.materials.append(material) # Else assign the material to all slots else: for slot in mesh.material_slots: slot.material = material
def build_remove_object_from_collection(data): collection_name, index = common.decode_string(data, 0) object_name, _ = common.decode_string(data, index) logger.info("build_remove_object_from_collection %s <- %s", collection_name, object_name) collection = share_data.blender_collections[collection_name] object_ = share_data.blender_objects.get(object_name) if object_: # otherwise already removed by Blender protocol try: collection.objects.unlink(object_) except Exception as e: logger.info("build_remove_object_from_collection: exception during unlink... ") logger.info(f"... {e} ")
def build_add_constraint(data): index = 0 constraint_type, index = common.decode_int(data, index) object_name, index = common.decode_string(data, index) target_name, index = common.decode_string(data, index) ob = share_data.blender_objects[object_name] target = share_data.blender_objects[target_name] if constraint_type == ConstraintType.PARENT: add_parent_constraint(ob, target) elif constraint_type == ConstraintType.LOOK_AT: add_lookat_constraint(ob, target) else: logger.warning(f"Unknown constraint {constraint_type}")
def build_remove_object_from_scene(data): scene_name, index = common.decode_string(data, 0) object_name, _ = common.decode_string(data, index) logger.info("build_remove_object_from_scene %s <- %s", scene_name, object_name) scene = share_data.blender_scenes[scene_name] object_ = share_data.blender_objects.get(object_name) if object_: # otherwise already removed by Blender protocol try: scene.collection.objects.unlink(object_) except Exception as e: logger.warning( f"build_remove_object_from_scene: exception during unlink... ") logger.warning(f"... {e} ")
def _build_data_update_or_create(buffer, display_name: str, func: Callable[[BpyBlendProxy], BpyIDProxy]): """ Process a datablock update request """ def log_exception(when: str): logger.error(f"Exception during {display_name}, decode") for line in traceback.format_exc().splitlines(): logger.error(line) logger.error(f"During {when}") logger.error(buffer[0:200]) logger.error("...") logger.error(buffer[-200:0]) logger.error("ignored") if not share_data.use_experimental_sync(): return buffer, _ = common.decode_string(buffer, 0) codec = Codec() try: id_proxy = codec.decode(buffer) except Exception: log_exception("decode") logger.info("%s: %s", display_name, id_proxy) try: # TODO temporary until VRtist protocol uses Blenddata instead of blender_objects & co share_data.set_dirty() return func(share_data.bpy_data_proxy, id_proxy) except Exception: log_exception(f"processing of buffer for {id_proxy}")
def build_remove_collection_from_scene(data): scene_name, index = common.decode_string(data, 0) collection_name, _ = common.decode_string(data, index) logger.info("build_remove_collection_from_scene %s <- %s", scene_name, collection_name) scene = share_data.blender_scenes[scene_name] collection = share_data.blender_collections.get(collection_name) if collection: # otherwise already removed by Blender protocol try: scene.collection.children.unlink(collection) except Exception as e: logger.info( f"build_remove_collection_from_scene: exception during unlink... " ) logger.info(f"... {e} ")
def build_scene_renamed(data): # TODO check if obsolete old_name, index = common.decode_string(data, 0) new_name, _ = common.decode_string(data, index) # This message is not emitted by VRtist, only by Blender, so it is used only for Blender/Blender sync. # In generic mode, it conflicts with generic messages, so drop it if not share_data.use_vrtist_protocol(): logger.warning("build_scene_renamed %s to %s", old_name, new_name) return logger.info("build_scene_renamed %s to %s", old_name, new_name) scene = share_data.blender_scenes.get(old_name) scene.name = new_name share_data.blender_scenes_dirty = True
def merge_command(): """ Add the command to the room list, possibly merge with the previous command. """ command_type = command.type if command_type.value > common.MessageType.OPTIMIZED_COMMANDS.value: command_path = common.decode_string(command.data, 0)[0] if self.command_count() > 0: stored_command = self._commands[-1] if (command_type == stored_command.type and command_path == common.decode_string( stored_command.data, 0)[0]): self._commands.pop() self.byte_size -= stored_command.byte_size() self._commands.append(command) self.byte_size += command.byte_size()
def build_data_update(buffer): if not share_data.use_experimental_sync(): return buffer, _ = common.decode_string(buffer, 0) codec = Codec() try: id_proxy = codec.decode(buffer) try: collection_name, key = blenddata_path(id_proxy) except InvalidPath: logger.error("... update ignored") return uuid = id_proxy.mixer_uuid() logger.info("build_data_update: %s[%s] %s", collection_name, key, uuid) share_data.proxy.update_one(id_proxy) # TODO temporary until VRtist protocol uses Blenddata instead of blender_objects & co share_data.set_dirty() except Exception: logger.error("Exception during build_data_update") log_traceback(logger.error) logger.error( f"During processing of buffer with blenddata_path {id_proxy._blenddata_path}" ) logger.error(buffer[0:200]) logger.error("...") logger.error(buffer[-200:0]) logger.error( f"Creation or update of bpy.data.{collection_name}[{key}] was ignored" )
def assert_stream_equals(self, a_stream: CommandStream, b_stream: CommandStream, msg: str = None): a, b = a_stream.data, b_stream.data self.assertEqual(a.keys(), b.keys()) keep = [ MessageType.BLENDER_DATA_REMOVE, MessageType.BLENDER_DATA_RENAME, MessageType.BLENDER_DATA_UPDATE, ] for k in a.keys(): if k not in keep: continue message_type = str(MessageType(k)) message_count = len(a[k]) # self.assertEqual(message_count, len(b[k]), f"len mismatch for {message_type}") if message_count != 0: logger.info(f"Message count for {message_type:16} : {message_count}") expected_count = self.expected_counts.get(k) if expected_count is not None: self.assertEqual( expected_count, message_count, f"Unexpected message count for message {message_type}. Expected {expected_count}: found {message_count}", ) for i, buffers in enumerate(zip(a[k], b[k])): strings = [decode_string(buffer, 0)[0] for buffer in buffers] dicts = [json.loads(string) for string in strings] self.assertDictAlmostEqual(*dicts, f"content mismatch for {message_type} {i}")
def build_collection(data): name_full, index = common.decode_string(data, 0) # This message is not emitted by VRtist, only by Blender, so it is used only for Blender/Blender sync. # In generic mode, it conflicts with generic messages, so drop it if share_data.use_experimental_sync(): logger.warning("build_collection %s, ignored in experimental mode", name_full) return # Blender/Blender in VRtist (non generic) mode visible, index = common.decode_bool(data, index) hide_viewport = not visible offset, index = common.decode_vector3(data, index) temporary_visibility, index = common.decode_bool(data, index) logger.info("build_collection %s", name_full) collection = share_data.blender_collections.get(name_full) if collection is None: collection = bpy.data.collections.new(name_full) share_data.blender_collections[name_full] = collection collection.hide_viewport = hide_viewport collection.instance_offset = offset layer_collection = share_data.blender_layer_collections.get(name_full) if layer_collection: layer_collection.hide_viewport = not temporary_visibility else: # if the layer collection does not exists, store its state for later share_data.blender_collection_temporary_visibility[ name_full] = temporary_visibility
def add_and_process_command( self, command: common.Command, expected_response_type: common.MessageType = None): if not self.send_command(command): self.disconnect() return received = None while received is None or (expected_response_type is not None and received.type != expected_response_type): received_commands = self.fetch_incoming_commands() for command in received_commands: if command.type == common.MessageType.SEND_ERROR: logger.error(common.decode_string(command.data, 0)[0]) return elif command.type == expected_response_type or expected_response_type is None: received = command break else: logger.info("Ignoring command %s", command.type) if expected_response_type is not None: print(self.formatter.format(received))
def build_data_media(buffer: bytes): # TODO save to resolved path. # The packed data with be saved to file, not a problem path, index = decode_string(buffer, 0) bytes_ = buffer[index:] # TODO this does not overwrite outdated local files get_local_or_create_cache_file(path, bytes_)
def build_data_create(buffer): if not share_data.use_experimental_sync(): return proxy_string, index = decode_string(buffer, 0) codec = Codec() rename_changeset = None try: datablock_proxy: DatablockProxy = codec.decode(proxy_string) logger.info("%s: %s", "build_data_create", datablock_proxy) # TODO temporary until VRtist protocol uses Blenddata instead of blender_objects & co share_data.set_dirty() _, rename_changeset = share_data.bpy_data_proxy.create_datablock( datablock_proxy) _decode_and_build_soas(datablock_proxy.mixer_uuid(), buffer, index) except Exception: logger.error("Exception during build_data_create") for line in traceback.format_exc().splitlines(): logger.error(line) logger.error(buffer[0:200]) logger.error("...") logger.error(buffer[-200:0]) logger.error("ignored") if rename_changeset: send_data_renames(rename_changeset)
def build_data_update(buffer: bytes): if not share_data.use_experimental_sync(): return proxy_string, index = decode_string(buffer, 0) codec = Codec() try: delta: DeltaUpdate = codec.decode(proxy_string) logger.info("%s: %s", "build_data_update", delta) # TODO temporary until VRtist protocol uses Blenddata instead of blender_objects & co share_data.set_dirty() share_data.bpy_data_proxy.update_datablock(delta) datablock_proxy = delta.value if datablock_proxy is not None: _decode_and_build_soas(datablock_proxy.mixer_uuid(), buffer, index) except Exception: logger.error("Exception during build_data_update") for line in traceback.format_exc().splitlines(): logger.error(line) logger.error(f"During processing of buffer for {delta}") logger.error(buffer[0:200]) logger.error("...") logger.error(buffer[-200:0]) logger.error("ignored")
def build_clear_animations(self, data): index = 0 name, index = common.decode_string(data, index) ob = share_data.blender_objects[name] ob.animation_data_clear() if ob.data: ob.data.animation_data_clear()
def build_restore_from_trash(self, data): name, index = common.decode_string(data, 0) path, index = common.decode_string(data, index) obj = share_data.blender_objects[name] trash_collection = self.get_or_create_collection("__Trash__") trash_collection.hide_viewport = True trash_collection.objects.unlink(obj) restore_to = share_data.restore_to_collections[obj.name_full] for collection_name in restore_to: collection = self.get_or_create_collection(collection_name) collection.objects.link(obj) del share_data.restore_to_collections[obj.name_full] if len(path) > 0: parent_name = path.split("/")[-1] obj.parent = share_data.blender_objects.get(parent_name, None)
def _handle_client_disconnected(self, command: common.Command): client_id, _ = common.decode_string(command.data, 0) if client_id not in self.clients_attributes: logger.warning("Client %s disconnected but no attributes in internal view.", client_id) return del self.clients_attributes[client_id]
def _handle_room_deleted(self, command: common.Command): room_name, _ = common.decode_string(command.data, 0) if room_name not in self.rooms_attributes: logger.warning("Room %s deleted but no attributes in internal view.", room_name) return del self.rooms_attributes[room_name]
def build_scene_removed(data): # TODO check if obsolete scene_name, _ = common.decode_string(data, 0) logger.info("build_scene_removed %s", scene_name) scene = share_data.blender_scenes.get(scene_name) delete_scene(scene) share_data.blender_scenes_dirty = True
def build_remove_collection_from_collection(data): parent_name, index = common.decode_string(data, 0) child_name, _ = common.decode_string(data, index) # This message is not emitted by VRtist, only by Blender, so it is used only for Blender/Blender sync. # In generic mode, it conflicts with generic messages, so drop it if share_data.use_experimental_sync(): logger.warning( "build_remove_collection_from_collection %s <- %s, ignore in experimental mode", parent_name, child_name) return logger.info("build_remove_collection_from_collection %s <- %s", parent_name, child_name) parent = share_data.blender_collections[parent_name] child = share_data.blender_collections[child_name] parent.children.unlink(child)
def build_grease_pencil_material(data): grease_pencil_material_name, start = common.decode_string(data, 0) material = share_data.blender_materials.get(grease_pencil_material_name) if not material: material = bpy.data.materials.new(grease_pencil_material_name) share_data._blender_materials[material.name_full] = material if not material.grease_pencil: bpy.data.materials.create_gpencil_data(material) gp_material = material.grease_pencil gp_material.show_stroke, start = common.decode_bool(data, start) gp_material.mode, start = common.decode_string(data, start) gp_material.stroke_style, start = common.decode_string(data, start) gp_material.color, start = common.decode_color(data, start) gp_material.use_overlap_strokes, start = common.decode_bool(data, start) gp_material.show_fill, start = common.decode_bool(data, start) gp_material.fill_style, start = common.decode_string(data, start) gp_material.fill_color, start = common.decode_color(data, start)
def decode_arrays(buffer: bytes, index) -> Tuple[ArrayGroups, int]: array_group_count, index = decode_int(buffer, index) if array_group_count == 0: return {}, index array_groups: ArrayGroups = {} for _groups_index in range(array_group_count): array_group_name, index = decode_string(buffer, index) array_group_length, index = decode_int(buffer, index) array_group: ArrayGroup = [] for _array_index in range(array_group_length): key_string, index = decode_string(buffer, index) key = json.loads(key_string) array_, index = decode_py_array(buffer, index) array_group.append((key, array_), ) array_groups[array_group_name] = array_group return array_groups, index