def execute(self, context): assert not share_data.client.current_room share_data.set_dirty() props = get_mixer_props() room_index = props.room_index room = props.rooms[room_index].name logger.warning(f"JoinRoomOperator.execute({room})") room_attributes = get_selected_room_dict() logger.warning( f"Client Blender version: {room_attributes.get(RoomAttributes.BLENDER_VERSION, '')}" ) logger.warning( f"Client Mixer version: {room_attributes.get(RoomAttributes.MIXER_VERSION, '')}" ) clear_undo_history() mixer_prefs = get_mixer_prefs() shared_folders = [] for item in mixer_prefs.shared_folders: shared_folders.append(item.shared_folder) join_room( room, not room_attributes.get(RoomAttributes.GENERIC_PROTOCOL, True), shared_folders, mixer_prefs.ignore_version_check, ) return {"FINISHED"}
def build_data_update(buffer: bytes): if share_data.use_vrtist_protocol(): return share_data.set_dirty() codec = Codec() try: message = BlenderDataMessage() message.decode(buffer) delta: Delta = codec.decode(message.proxy_string) logger.debug("%s: %s", "build_data_update", delta) delta.value.arrays = message.arrays share_data.bpy_data_proxy.update_datablock(delta) datablock_proxy = delta.value if datablock_proxy is not None: _build_soas(datablock_proxy.mixer_uuid, message.soas) except DecodeError as e: logger.error( f"Decode error for {str(e.args[1])[:100]} . Possible causes...") logger.error("... user error: version mismatch") logger.error( "... internal error: Proxy class not registered. Import it in blender_data.__init__.py" ) except Exception: logger.error("Exception during build_data_update") for line in traceback.format_exc().splitlines(): logger.error(line) logger.error(f"During processing of buffer for {delta}") logger.error(buffer[0:200]) logger.error("...") logger.error(buffer[-200:0]) logger.error("ignored")
def handler_send_scene_data_to_server(scene, dummy): global processing_depsgraph_handler if processing_depsgraph_handler: logger.error("Depsgraph handler recursion attempt") return processing_depsgraph_handler = True try: logger.debug("handler_send_scene_data_to_server") # Ensure we will rebuild accessors when a depsgraph update happens # todo investigate why we need this... share_data.set_dirty() if share_data.client.block_signals: logger.debug( "handler_send_scene_data_to_server canceled (block_signals = True)" ) return if share_data.use_vrtist_protocol(): send_scene_data_to_server(scene, dummy) else: generic.send_scene_data_to_server(scene, dummy) finally: processing_depsgraph_handler = False
def handler_on_undo_redo_post(scene, dummy): logger.info("on_undo_redo_post") share_data.set_dirty() share_data.clear_lists() # apply only in object mode if not is_in_object_mode(): return old_objects_name = dict([(k, None) for k in share_data.old_objects.keys() ]) # value not needed remap_objects_info() for k, v in share_data.old_objects.items(): if k in old_objects_name: old_objects_name[k] = v update_object_state(old_objects_name, share_data.old_objects) update_collections_state() update_scenes_state() remove_objects_from_scenes() remove_objects_from_collections() remove_collections_from_scenes() remove_collections_from_collections() remove_collections() remove_scenes() add_scenes() add_objects() add_collections() add_collections_to_scenes() add_collections_to_collections() add_objects_to_collections() add_objects_to_scenes() update_collections_parameters() create_vrtist_objects() delete_scene_objects() rename_objects() update_objects_visibility() update_objects_transforms() reparent_objects() # send selection content (including data) materials = set() for obj in bpy.context.selected_objects: update_transform(obj) if hasattr(obj, "data"): update_params(obj) if hasattr(obj, "material_slots"): for slot in obj.material_slots[:]: materials.add(slot.material) for material in materials: share_data.client.send_material(material) share_data.update_current_data()
def build_data_create(buffer): if not share_data.use_experimental_sync(): return proxy_string, index = decode_string(buffer, 0) codec = Codec() rename_changeset = None try: datablock_proxy: DatablockProxy = codec.decode(proxy_string) logger.info("%s: %s", "build_data_create", datablock_proxy) # TODO temporary until VRtist protocol uses Blenddata instead of blender_objects & co share_data.set_dirty() _, rename_changeset = share_data.bpy_data_proxy.create_datablock( datablock_proxy) _decode_and_build_soas(datablock_proxy.mixer_uuid(), buffer, index) except Exception: logger.error("Exception during build_data_create") for line in traceback.format_exc().splitlines(): logger.error(line) logger.error(buffer[0:200]) logger.error("...") logger.error(buffer[-200:0]) logger.error("ignored") if rename_changeset: send_data_renames(rename_changeset)
def build_data_update(buffer): if not share_data.use_experimental_sync(): return buffer, _ = common.decode_string(buffer, 0) codec = Codec() try: id_proxy = codec.decode(buffer) try: collection_name, key = blenddata_path(id_proxy) except InvalidPath: logger.error("... update ignored") return uuid = id_proxy.mixer_uuid() logger.info("build_data_update: %s[%s] %s", collection_name, key, uuid) share_data.proxy.update_one(id_proxy) # TODO temporary until VRtist protocol uses Blenddata instead of blender_objects & co share_data.set_dirty() except Exception: logger.error("Exception during build_data_update") log_traceback(logger.error) logger.error( f"During processing of buffer with blenddata_path {id_proxy._blenddata_path}" ) logger.error(buffer[0:200]) logger.error("...") logger.error(buffer[-200:0]) logger.error( f"Creation or update of bpy.data.{collection_name}[{key}] was ignored" )
def _build_data_update_or_create(buffer, display_name: str, func: Callable[[BpyBlendProxy], BpyIDProxy]): """ Process a datablock update request """ def log_exception(when: str): logger.error(f"Exception during {display_name}, decode") for line in traceback.format_exc().splitlines(): logger.error(line) logger.error(f"During {when}") logger.error(buffer[0:200]) logger.error("...") logger.error(buffer[-200:0]) logger.error("ignored") if not share_data.use_experimental_sync(): return buffer, _ = common.decode_string(buffer, 0) codec = Codec() try: id_proxy = codec.decode(buffer) except Exception: log_exception("decode") logger.info("%s: %s", display_name, id_proxy) try: # TODO temporary until VRtist protocol uses Blenddata instead of blender_objects & co share_data.set_dirty() return func(share_data.bpy_data_proxy, id_proxy) except Exception: log_exception(f"processing of buffer for {id_proxy}")
def build_data_update(buffer: bytes): if not share_data.use_experimental_sync(): return proxy_string, index = decode_string(buffer, 0) codec = Codec() try: delta: DeltaUpdate = codec.decode(proxy_string) logger.info("%s: %s", "build_data_update", delta) # TODO temporary until VRtist protocol uses Blenddata instead of blender_objects & co share_data.set_dirty() share_data.bpy_data_proxy.update_datablock(delta) datablock_proxy = delta.value if datablock_proxy is not None: _decode_and_build_soas(datablock_proxy.mixer_uuid(), buffer, index) except Exception: logger.error("Exception during build_data_update") for line in traceback.format_exc().splitlines(): logger.error(line) logger.error(f"During processing of buffer for {delta}") logger.error(buffer[0:200]) logger.error("...") logger.error(buffer[-200:0]) logger.error("ignored")
def build_data_create(buffer): if share_data.use_vrtist_protocol(): return share_data.set_dirty() rename_changeset = None codec = Codec() try: message = BlenderDataMessage() message.decode(buffer) datablock_proxy = codec.decode(message.proxy_string) logger.info("%s %s", "build_data_create", datablock_proxy) datablock_proxy.arrays = message.arrays _, rename_changeset = share_data.bpy_data_proxy.create_datablock( datablock_proxy) _build_soas(datablock_proxy.mixer_uuid, message.soas) except DecodeError as e: logger.error(f"Decode error for {str(e.args[1])[:100]} ...") logger.error("... possible version mismatch") return except Exception: logger.error("Exception during build_data_create") for line in traceback.format_exc().splitlines(): logger.error(line) logger.error(buffer[0:200]) logger.error("...") logger.error(buffer[-200:0]) logger.error("ignored") return if rename_changeset: send_data_renames(rename_changeset)
def execute(self, context): assert not share_data.client.current_room share_data.set_dirty() props = get_mixer_props() room_index = props.room_index room = props.rooms[room_index].name join_room(room) return {"FINISHED"}
def build_data_remove(buffer): if not share_data.use_experimental_sync(): return uuid, index = decode_string(buffer, 0) debug_info, index = decode_string(buffer, index) logger.info("build_data_remove: %s (%s)", uuid, debug_info) share_data.bpy_data_proxy.remove_datablock(uuid) # TODO temporary until VRtist protocol uses Blenddata instead of blender_objects & co share_data.set_dirty()
def build_data_remove(buffer): if share_data.use_vrtist_protocol(): return message = BlenderRemoveMessage() message.decode(buffer) logger.info("build_data_remove: %s (%s)", message.uuid, message.debug_info) share_data.bpy_data_proxy.remove_datablock(message.uuid) # TODO temporary until VRtist protocol uses Blenddata instead of blender_objects & co share_data.set_dirty()
def handler_send_scene_data_to_server(scene, dummy): logger.debug("handler_send_scene_data_to_server") # Ensure we will rebuild accessors when a depsgraph update happens # todo investigate why we need this... share_data.set_dirty() if share_data.client.block_signals: logger.debug("handler_send_scene_data_to_server canceled (block_signals = True)") return send_scene_data_to_server(scene, dummy)
def build_data_remove(buffer): if not share_data.use_experimental_sync(): return collection_name, index = common.decode_string(buffer, 0) key, index = common.decode_string(buffer, index) logger.info("build_data_remove: %s[%s]", collection_name, key) # Update through the proxy so that it updates itself and does not trigger removals share_data.proxy.remove_one(collection_name, key) # TODO temporary until VRtist protocol uses Blenddata instead of blender_objects & co share_data.set_dirty()
def execute(self, context): assert not share_data.client.current_room share_data.set_dirty() props = get_mixer_props() room_index = props.room_index room = props.rooms[room_index].name experimental_sync = get_mixer_prefs().experimental_sync logger.warning(f"JoinRoomOperator.execute({room})") join_room(room, experimental_sync) return {"FINISHED"}
def execute(self, context): assert not share_data.client.current_room share_data.set_dirty() props = get_mixer_props() room_index = props.room_index room = props.rooms[room_index].name logger.warning(f"JoinRoomOperator.execute({room})") prefs = get_mixer_prefs() join_room(room, prefs.vrtist_protocol) return {"FINISHED"}
def build_data_rename(buffer): if not share_data.use_experimental_sync(): return strings, _ = decode_string_array(buffer, 0) # (uuid1, old1, new1, uuid2, old2, new2, ...) to ((uuid1, old1, new1), (uuid2, old2, new2), ...) args = [iter(strings)] * 3 # do not consume the iterator on the log loop ! items = list(itertools.zip_longest(*args)) for uuid, old_name, new_name in items: logger.info("build_data_rename: %s (%s) into %s", uuid, old_name, new_name) rename_changeset = share_data.bpy_data_proxy.rename_datablocks(items) # TODO temporary until VRtist protocol uses Blenddata instead of blender_objects & co share_data.set_dirty() if rename_changeset: send_data_renames(rename_changeset)
def build_data_rename(buffer): if share_data.use_vrtist_protocol(): return message = BlenderRenamesMessage() message.decode(buffer) renames = message.renames # (uuid1, old1, new1, uuid2, old2, new2, ...) to ((uuid1, old1, new1), (uuid2, old2, new2), ...) args = [iter(renames)] * 3 # do not consume the iterator on the log loop ! items = list(itertools.zip_longest(*args)) for uuid, old_name, new_name in items: logger.info("build_data_rename: %s (%s) into %s", uuid, old_name, new_name) rename_changeset = share_data.bpy_data_proxy.rename_datablocks(items) # TODO temporary until VRtist protocol uses Blenddata instead of blender_objects & co share_data.set_dirty() if rename_changeset: send_data_renames(rename_changeset)
def send_scene_data_to_server(scene, dummy): logger.debug( "send_scene_data_to_server(): skip_next_depsgraph_update %s, pending_test_update %s", share_data.client.skip_next_depsgraph_update, share_data.pending_test_update, ) timer = share_data.current_stats_timer if not share_data.client: logger.info("send_scene_data_to_server canceled (no client instance)") return share_data.set_dirty() with timer.child("clear_lists"): share_data.clear_lists() depsgraph = bpy.context.evaluated_depsgraph_get() if depsgraph.updates: logger.debug("Current dg updates ...") for update in depsgraph.updates: logger.debug(" ......%s", update.id.original) # prevent processing self events, but always process test updates if not share_data.pending_test_update and share_data.client.skip_next_depsgraph_update: share_data.client.skip_next_depsgraph_update = False logger.debug("send_scene_data_to_server canceled (skip_next_depsgraph_update = True) ...") return share_data.pending_test_update = False if not is_in_object_mode(): logger.info("send_scene_data_to_server canceled (not is_in_object_mode)") return update_object_state(share_data.old_objects, share_data.blender_objects) with timer.child("update_scenes_state"): update_scenes_state() with timer.child("update_collections_state"): update_collections_state() changed = False with timer.child("checkForChangeAndSendUpdates"): changed |= remove_objects_from_collections() changed |= remove_objects_from_scenes() changed |= remove_collections_from_collections() changed |= remove_collections_from_scenes() changed |= remove_collections() changed |= remove_scenes() changed |= add_scenes() changed |= add_collections() changed |= add_objects() # Updates from the VRtist protocol and from the full Blender protocol must be cafully intermixed # This is an unfortunate requirements from the current coexistence status of # both protocols # After creation of meshes : meshes are not yet supported by full Blender protocol, # but needed to properly create objects # Before creation of objects : the VRtint protocol will implicitely create objects with # unappropriate default values (e.g. transform creates an object with no data) if share_data.use_experimental_sync(): # Compute the difference between the proxy state and the Blender state # It is a coarse difference at the ID level(created, removed, renamed) diff = BpyBlendDiff() diff.diff(share_data.proxy, safe_context) # Ask the proxy to compute the list of elements to synchronize and update itself depsgraph = bpy.context.evaluated_depsgraph_get() updates, removals = share_data.proxy.update(diff, safe_context, depsgraph.updates) # Send the data update messages (includes serialization) data_api.send_data_removals(removals) data_api.send_data_updates(updates) share_data.proxy.debug_check_id_proxies() # send the VRtist transforms after full Blender protocol has the opportunity to create the object data # that is not handled by VRtist protocol, otherwise the receiver creates an empty when it receives a transform changed |= update_transforms() changed |= add_collections_to_scenes() changed |= add_collections_to_collections() changed |= add_objects_to_collections() changed |= add_objects_to_scenes() changed |= update_collections_parameters() changed |= create_vrtist_objects() changed |= delete_scene_objects() changed |= rename_objects() changed |= update_objects_visibility() changed |= update_objects_transforms() changed |= reparent_objects() changed |= shot_manager.check_montage_mode() if not changed: with timer.child("update_objects_data"): update_objects_data() # update for next change with timer.child("update_current_data"): share_data.update_current_data() logger.debug("send_scene_data_to_server: end")
def network_consumer(self): """ This method can be considered the entry point of this class. It is meant to be called regularly to send pending commands to the server, and receive then process new ones. Pending commands are accumulated with add_command(), most calls originate from handlers function. Incoming commands are read from the socket and directly processed here to update Blender's data. This can be costly and a possible optimization in the future would be to split the processing accross several timer run. This can be challenging because we need to keep the current update state. Maybe this can be solved naturally with coroutines. We call it from the timer registered by the addon. """ from mixer.bl_panels import redraw as redraw_panels, update_ui_lists assert self.is_connected() set_draw_handlers() # Loop remains infinite while we have GROUP_BEGIN commands without their corresponding GROUP_END received # todo Change this -> probably not a good idea because the sending client might disconnect before GROUP_END occurs # or it needs to be guaranteed by the server group_count = 0 while True: received_commands = self.fetch_commands(get_mixer_prefs().commands_send_interval) set_dirty = True # Process all received commands for command in received_commands: if self._joining and command.type.value > common.MessageType.COMMAND.value: self._received_byte_size += command.byte_size() self._received_command_count += 1 if self._joining_room_name in self.rooms_attributes: get_mixer_props().joining_percentage = ( self._received_byte_size / self.rooms_attributes[self._joining_room_name][RoomAttributes.BYTE_SIZE] ) redraw_panels() if command.type == MessageType.GROUP_BEGIN: group_count += 1 continue if command.type == MessageType.GROUP_END: group_count -= 1 continue if self.has_default_handler(command.type): if command.type == MessageType.JOIN_ROOM and self._joining: self._joining = False get_mixer_props().joining_percentage = 1 update_ui_lists() self.block_signals = False # todo investigate why we should but this to false here continue if set_dirty: share_data.set_dirty() set_dirty = False self.block_signals = True try: # manage wrapped commands with this blender id # time synced command for now # Consume messages with its client_id to receive commands from other clients # like play/pause. Ignore all other client_id. if command.type == MessageType.CLIENT_ID_WRAPPER: id, index = common.decode_string(command.data, 0) if id != share_data.client.client_id: continue command_type, index = common.decode_int(command.data, index) command_data = command.data[index:] command = common.Command(command_type, command_data) if command.type == MessageType.CONTENT: # The server asks for scene content (at room creation) try: assert share_data.client.current_room is not None self.set_room_attributes( share_data.client.current_room, # Documentation to update if you change "experimental_sync": doc/protocol.md {"experimental_sync": get_mixer_prefs().experimental_sync}, ) send_scene_content() # Inform end of content self.add_command(common.Command(MessageType.CONTENT)) except Exception as e: raise SendSceneContentFailed() from e continue # Put this to true by default # todo Check build commands that do not trigger depsgraph update # because it can lead to ignoring real updates when a false positive is encountered command_triggers_depsgraph_update = True if command.type == MessageType.GREASE_PENCIL_MESH: grease_pencil_api.build_grease_pencil_mesh(command.data) elif command.type == MessageType.GREASE_PENCIL_MATERIAL: grease_pencil_api.build_grease_pencil_material(command.data) elif command.type == MessageType.GREASE_PENCIL_CONNECTION: grease_pencil_api.build_grease_pencil_connection(command.data) elif command.type == MessageType.CLEAR_CONTENT: clear_scene_content() self._joining = True self._received_command_count = 0 self._received_byte_size = 0 get_mixer_props().joining_percentage = 0 redraw_panels() elif command.type == MessageType.MESH: self.build_mesh(command.data) elif command.type == MessageType.TRANSFORM: self.build_transform(command.data) elif command.type == MessageType.MATERIAL: material_api.build_material(command.data) elif command.type == MessageType.ASSIGN_MATERIAL: material_api.build_assign_material(command.data) elif command.type == MessageType.DELETE: self.build_delete(command.data) elif command.type == MessageType.CAMERA: camera_api.build_camera(command.data) elif command.type == MessageType.LIGHT: light_api.build_light(command.data) elif command.type == MessageType.RENAME: self.build_rename(command.data) elif command.type == MessageType.DUPLICATE: self.build_duplicate(command.data) elif command.type == MessageType.SEND_TO_TRASH: self.build_send_to_trash(command.data) elif command.type == MessageType.RESTORE_FROM_TRASH: self.build_restore_from_trash(command.data) elif command.type == MessageType.TEXTURE: self.build_texture_file(command.data) elif command.type == MessageType.COLLECTION: collection_api.build_collection(command.data) elif command.type == MessageType.COLLECTION_REMOVED: collection_api.build_collection_removed(command.data) elif command.type == MessageType.INSTANCE_COLLECTION: collection_api.build_collection_instance(command.data) elif command.type == MessageType.ADD_COLLECTION_TO_COLLECTION: collection_api.build_collection_to_collection(command.data) elif command.type == MessageType.REMOVE_COLLECTION_FROM_COLLECTION: collection_api.build_remove_collection_from_collection(command.data) elif command.type == MessageType.ADD_OBJECT_TO_COLLECTION: collection_api.build_add_object_to_collection(command.data) elif command.type == MessageType.REMOVE_OBJECT_FROM_COLLECTION: collection_api.build_remove_object_from_collection(command.data) elif command.type == MessageType.ADD_COLLECTION_TO_SCENE: scene_api.build_collection_to_scene(command.data) elif command.type == MessageType.REMOVE_COLLECTION_FROM_SCENE: scene_api.build_remove_collection_from_scene(command.data) elif command.type == MessageType.ADD_OBJECT_TO_SCENE: scene_api.build_add_object_to_scene(command.data) elif command.type == MessageType.REMOVE_OBJECT_FROM_SCENE: scene_api.build_remove_object_from_scene(command.data) elif command.type == MessageType.SCENE: scene_api.build_scene(command.data) elif command.type == MessageType.SCENE_REMOVED: scene_api.build_scene_removed(command.data) elif command.type == MessageType.SCENE_RENAMED: scene_api.build_scene_renamed(command.data) elif command.type == MessageType.OBJECT_VISIBILITY: object_api.build_object_visibility(command.data) elif command.type == MessageType.FRAME: self.build_frame(command.data) elif command.type == MessageType.QUERY_CURRENT_FRAME: self.query_current_frame() elif command.type == MessageType.PLAY: self.build_play(command.data) elif command.type == MessageType.PAUSE: self.build_pause(command.data) elif command.type == MessageType.ADD_KEYFRAME: self.build_add_keyframe(command.data) elif command.type == MessageType.REMOVE_KEYFRAME: self.build_remove_keyframe(command.data) elif command.type == MessageType.QUERY_OBJECT_DATA: self.build_query_object_data(command.data) elif command.type == MessageType.CLEAR_ANIMATIONS: self.build_clear_animations(command.data) elif command.type == MessageType.SHOT_MANAGER_MONTAGE_MODE: self.build_montage_mode(command.data) elif command.type == MessageType.SHOT_MANAGER_ACTION: shot_manager.build_shot_manager_action(command.data) elif command.type == MessageType.BLENDER_DATA_UPDATE: data_api.build_data_update(command.data) elif command.type == MessageType.BLENDER_DATA_REMOVE: data_api.build_data_remove(command.data) else: # Command is ignored, so no depsgraph update can be triggered command_triggers_depsgraph_update = False if command_triggers_depsgraph_update: self.skip_next_depsgraph_update = True except Exception as e: logger.warning(f"Exception during processing of message {str(command.type)}") log_traceback(logger.warning) if get_mixer_prefs().env == "development" or isinstance(e, SendSceneContentFailed): raise finally: self.block_signals = False if group_count == 0: break if not set_dirty: share_data.update_current_data() # Some objects may have been obtained before their parent # In that case we resolve parenting here # todo Parenting strategy should be changed: we should store the name of the parent in the command instead of # having a path as name if len(share_data.pending_parenting) > 0: remaining_parentings = set() for path in share_data.pending_parenting: path_elem = path.split("/") ob = None parent = None for elem in path_elem: ob = share_data.blender_objects.get(elem) if not ob: remaining_parentings.add(path) break if ob.parent != parent: # do it only if needed, otherwise it resets matrix_parent_inverse ob.parent = parent parent = ob share_data.pending_parenting = remaining_parentings self.set_client_attributes(self.compute_client_custom_attributes())
def send_scene_data_to_server(scene, dummy): logger.debug( "send_scene_data_to_server(): skip_next_depsgraph_update %s, pending_test_update %s", share_data.client.skip_next_depsgraph_update, share_data.pending_test_update, ) if not share_data.client: logger.info("send_scene_data_to_server canceled (no client instance)") return share_data.set_dirty() share_data.clear_lists() depsgraph = bpy.context.evaluated_depsgraph_get() if depsgraph.updates: logger.debug("Current dg updates ...") for update in depsgraph.updates: logger.debug(" ......%s", update.id.original) # prevent processing self events, but always process test updates if not share_data.pending_test_update and share_data.client.skip_next_depsgraph_update: share_data.client.skip_next_depsgraph_update = False logger.debug( "send_scene_data_to_server canceled (skip_next_depsgraph_update = True) ..." ) return share_data.pending_test_update = False if not is_in_object_mode(): if depsgraph.updates: logger.info( "send_scene_data_to_server canceled (not is_in_object_mode). Skipping updates" ) for update in depsgraph.updates: logger.info(" ......%s", update.id.original) return update_object_state(share_data.old_objects, share_data.blender_objects) update_scenes_state() update_collections_state() changed = False changed |= remove_objects_from_collections() changed |= remove_objects_from_scenes() changed |= remove_collections_from_collections() changed |= remove_collections_from_scenes() changed |= remove_collections() changed |= add_scenes() changed |= add_collections() changed |= add_objects() changed |= update_transforms() changed |= add_collections_to_scenes() changed |= add_collections_to_collections() changed |= add_objects_to_collections() changed |= add_objects_to_scenes() changed |= update_collections_parameters() changed |= create_vrtist_objects() changed |= delete_scene_objects() changed |= rename_objects() changed |= update_objects_visibility() changed |= update_objects_constraints() changed |= update_objects_transforms() changed |= reparent_objects() changed |= shot_manager.check_montage_mode() if not changed: update_objects_data() # update for next change share_data.update_current_data() logger.debug("send_scene_data_to_server: end")
def handler_on_undo_redo_post(scene, dummy): logger.error(f"Undo/redo post on {scene}") share_data.client.send_error( f"Undo/redo post from {get_mixer_prefs().user}") if not share_data.use_vrtist_protocol(): # Generic sync: reload all datablocks undone = share_data.bpy_data_proxy.snapshot_undo_post() logger.warning(f"undone uuids : {undone}") share_data.bpy_data_proxy.reload_datablocks() else: share_data.set_dirty() share_data.clear_lists() # apply only in object mode if not is_in_object_mode(): return old_objects_name = dict([ (k, None) for k in share_data.old_objects.keys() ]) # value not needed remap_objects_info() for k, v in share_data.old_objects.items(): if k in old_objects_name: old_objects_name[k] = v update_object_state(old_objects_name, share_data.old_objects) update_collections_state() update_scenes_state() remove_objects_from_scenes() remove_objects_from_collections() remove_collections_from_scenes() remove_collections_from_collections() remove_collections() add_scenes() add_objects() add_collections() add_collections_to_scenes() add_collections_to_collections() add_objects_to_collections() add_objects_to_scenes() update_collections_parameters() create_vrtist_objects() delete_scene_objects() rename_objects() update_objects_visibility() update_objects_constraints() update_objects_transforms() reparent_objects() # send selection content (including data) materials = set() for obj in bpy.context.selected_objects: update_transform(obj) if hasattr(obj, "data"): update_params(obj) if hasattr(obj, "material_slots"): for slot in obj.material_slots[:]: materials.add(slot.material) for material in materials: share_data.client.send_material(material) share_data.update_current_data()