def test_loginAndLogout() -> None: preferences = Preferences() authorization_service = AuthorizationService(OAUTH_SETTINGS, preferences) authorization_service.onAuthenticationError.emit = MagicMock() authorization_service.onAuthStateChanged.emit = MagicMock() authorization_service.initialize() # Let the service think there was a successful response with patch.object(AuthorizationHelpers, "parseJWT", return_value=UserProfile()): authorization_service._onAuthStateChanged(SUCCESSFUL_AUTH_RESPONSE) # Ensure that the error signal was not triggered assert authorization_service.onAuthenticationError.emit.call_count == 0 # Since we said that it went right this time, validate that we got a signal. assert authorization_service.onAuthStateChanged.emit.call_count == 1 assert authorization_service.getUserProfile() is not None assert authorization_service.getAccessToken() == "beep" # Check that we stored the authentication data, so next time the user won't have to log in again. assert preferences.getValue("test/auth_data") is not None # We're logged in now, also check if logging out works authorization_service.deleteAuthData() assert authorization_service.onAuthStateChanged.emit.call_count == 2 assert authorization_service.getUserProfile() is None # Ensure the data is gone after we logged out. assert preferences.getValue("test/auth_data") == "{}"
def test_loginAndLogout() -> None: preferences = Preferences() authorization_service = AuthorizationService(OAUTH_SETTINGS, preferences) authorization_service.onAuthenticationError.emit = MagicMock() authorization_service.onAuthStateChanged.emit = MagicMock() authorization_service.initialize() # Let the service think there was a succesfull response with patch.object(AuthorizationHelpers, "parseJWT", return_value=UserProfile()): authorization_service._onAuthStateChanged(SUCCESFULL_AUTH_RESPONSE) # Ensure that the error signal was not triggered assert authorization_service.onAuthenticationError.emit.call_count == 0 # Since we said that it went right this time, validate that we got a signal. assert authorization_service.onAuthStateChanged.emit.call_count == 1 assert authorization_service.getUserProfile() is not None assert authorization_service.getAccessToken() == "beep" # Check that we stored the authentication data, so next time the user won't have to log in again. assert preferences.getValue("test/auth_data") is not None # We're logged in now, also check if logging out works authorization_service.deleteAuthData() assert authorization_service.onAuthStateChanged.emit.call_count == 2 assert authorization_service.getUserProfile() is None # Ensure the data is gone after we logged out. assert preferences.getValue("test/auth_data") == "{}"
def test_addPreference(preference): preferences = Preferences() preferences.addPreference(preference["key"], preference["default"]) assert preferences.getValue(preference["key"]) == parseValue(preference["default"]) # Attempt to add the preference again, but with a different default. preferences.addPreference(preference["key"], preference["key"]) assert preferences.getValue(preference["key"]) == parseValue(preference["key"])
def test_deserialize(): preferences = Preferences() path = Resources.getPath(Resources.Preferences, "preferences_test.cfg") with open(path, "r", encoding="utf-8") as f: preferences.deserialize(f.read()) assert preferences.getValue("general/foo") == "omgzomg" assert preferences.getValue("general/derp") == True
def test_loginAndLogout() -> None: preferences = Preferences() authorization_service = AuthorizationService(OAUTH_SETTINGS, preferences) authorization_service.onAuthenticationError.emit = MagicMock() authorization_service.onAuthStateChanged.emit = MagicMock() authorization_service.initialize() mock_reply = Mock( ) # The user profile that the service should respond with. mock_reply.error = Mock(return_value=QNetworkReply.NetworkError.NoError) http_mock = Mock() http_mock.get = lambda url, headers_dict, callback, error_callback: callback( mock_reply) http_mock.readJSON = Mock( return_value={"data": { "user_id": "di_resu", "username": "******" }}) # Let the service think there was a successful response with patch( "UM.TaskManagement.HttpRequestManager.HttpRequestManager.getInstance", MagicMock(return_value=http_mock)): authorization_service._onAuthStateChanged(SUCCESSFUL_AUTH_RESPONSE) # Ensure that the error signal was not triggered assert authorization_service.onAuthenticationError.emit.call_count == 0 # Since we said that it went right this time, validate that we got a signal. assert authorization_service.onAuthStateChanged.emit.call_count == 1 with patch( "UM.TaskManagement.HttpRequestManager.HttpRequestManager.getInstance", MagicMock(return_value=http_mock)): def callback(profile): assert profile is not None authorization_service.getUserProfile(callback) assert authorization_service.getAccessToken() == "beep" # Check that we stored the authentication data, so next time the user won't have to log in again. assert preferences.getValue("test/auth_data") is not None # We're logged in now, also check if logging out works authorization_service.deleteAuthData() assert authorization_service.onAuthStateChanged.emit.call_count == 2 with patch( "UM.TaskManagement.HttpRequestManager.HttpRequestManager.getInstance", MagicMock(return_value=http_mock)): def callback(profile): assert profile is None authorization_service.getUserProfile(callback) # Ensure the data is gone after we logged out. assert preferences.getValue("test/auth_data") == "{}"
def test_setResetValue(new_value): preferences = Preferences() default_value = "omgzomg" preferences.preferenceChanged.emit = MagicMock() preferences.addPreference("test/test", default_value) assert preferences.preferenceChanged.emit.call_count == 0 preferences.setValue("test/test", new_value) assert preferences.getValue("test/test") == parseValue(new_value) if new_value != default_value: assert preferences.preferenceChanged.emit.call_count == 1 preferences.resetPreference("test/test") if new_value != default_value: assert preferences.preferenceChanged.emit.call_count == 2 else: # The preference never changed. Neither the set or the reset should trigger an emit. assert preferences.preferenceChanged.emit.call_count == 0 assert preferences.getValue("test/test") == default_value
def test_storeAuthData(get_user_profile) -> None: preferences = Preferences() authorization_service = AuthorizationService(OAUTH_SETTINGS, preferences) authorization_service.initialize() # Write stuff to the preferences. authorization_service._storeAuthData(SUCCESSFUL_AUTH_RESPONSE) preference_value = preferences.getValue(OAUTH_SETTINGS.AUTH_DATA_PREFERENCE_KEY) # Check that something was actually put in the preferences assert preference_value is not None and preference_value != {} # Create a second auth service, so we can load the data. second_auth_service = AuthorizationService(OAUTH_SETTINGS, preferences) second_auth_service.initialize() second_auth_service.loadAuthDataFromPreferences() assert second_auth_service.getAccessToken() == SUCCESSFUL_AUTH_RESPONSE.access_token
def test_readWrite(): preferences = Preferences() path = Resources.getPath(Resources.Preferences, "preferences_test.cfg") preferences.readFromFile(path) # Check if it has been loaded correctly assert preferences.getValue("general/foo") == "omgzomg" assert preferences.getValue("general/derp") == True # Write contents of the preference to a buffer in_memory_storage = StringIO() preferences.writeToFile(in_memory_storage) # type: ignore new_preferences = Preferences() # For some reason, if write was used, the read doesn't work. If we do it like this, it does work. new_preferences.readFromFile(StringIO(in_memory_storage.getvalue())) assert preferences.getValue("general/foo") == new_preferences.getValue("general/foo") assert preferences.getValue("test/more_test") == new_preferences.getValue("test/more_test")
def read(self, file_name): archive = zipfile.ZipFile(file_name, "r") cura_file_names = [ name for name in archive.namelist() if name.startswith("Cura/") ] # Create a shadow copy of the preferences (we don't want all of the preferences, but we do want to re-use its # parsing code. temp_preferences = Preferences() temp_preferences.readFromFile( io.TextIOWrapper(archive.open("Cura/preferences.cfg")) ) # We need to wrap it, else the archive parser breaks. # Copy a number of settings from the temp preferences to the global global_preferences = Preferences.getInstance() visible_settings = temp_preferences.getValue( "general/visible_settings") if visible_settings is None: Logger.log( "w", "Workspace did not contain visible settings. Leaving visibility unchanged" ) else: global_preferences.setValue("general/visible_settings", visible_settings) categories_expanded = temp_preferences.getValue( "cura/categories_expanded") if categories_expanded is None: Logger.log( "w", "Workspace did not contain expanded categories. Leaving them unchanged" ) else: global_preferences.setValue("cura/categories_expanded", categories_expanded) Application.getInstance().expandedCategoriesChanged.emit( ) # Notify the GUI of the change self._id_mapping = {} # We don't add containers right away, but wait right until right before the stack serialization. # We do this so that if something goes wrong, it's easier to clean up. containers_to_add = [] # TODO: For the moment we use pretty naive existence checking. If the ID is the same, we assume in quite a few # TODO: cases that the container loaded is the same (most notable in materials & definitions). # TODO: It might be possible that we need to add smarter checking in the future. Logger.log("d", "Workspace loading is checking definitions...") # Get all the definition files & check if they exist. If not, add them. definition_container_files = [ name for name in cura_file_names if name.endswith(self._definition_container_suffix) ] for definition_container_file in definition_container_files: container_id = self._stripFileToId(definition_container_file) definitions = self._container_registry.findDefinitionContainers( id=container_id) if not definitions: definition_container = DefinitionContainer(container_id) definition_container.deserialize( archive.open(definition_container_file).read().decode( "utf-8")) self._container_registry.addContainer(definition_container) Job.yieldThread() Logger.log("d", "Workspace loading is checking materials...") material_containers = [] # Get all the material files and check if they exist. If not, add them. xml_material_profile = self._getXmlProfileClass() if self._material_container_suffix is None: self._material_container_suffix = ContainerRegistry.getMimeTypeForContainer( xml_material_profile).suffixes[0] if xml_material_profile: material_container_files = [ name for name in cura_file_names if name.endswith(self._material_container_suffix) ] for material_container_file in material_container_files: container_id = self._stripFileToId(material_container_file) materials = self._container_registry.findInstanceContainers( id=container_id) if not materials: material_container = xml_material_profile(container_id) material_container.deserialize( archive.open(material_container_file).read().decode( "utf-8")) containers_to_add.append(material_container) else: if not materials[0].isReadOnly( ): # Only create new materials if they are not read only. if self._resolve_strategies["material"] == "override": materials[0].deserialize( archive.open(material_container_file).read(). decode("utf-8")) elif self._resolve_strategies["material"] == "new": # Note that we *must* deserialize it with a new ID, as multiple containers will be # auto created & added. material_container = xml_material_profile( self.getNewId(container_id)) material_container.deserialize( archive.open(material_container_file).read(). decode("utf-8")) containers_to_add.append(material_container) material_containers.append(material_container) Job.yieldThread() Logger.log("d", "Workspace loading is checking instance containers...") # Get quality_changes and user profiles saved in the workspace instance_container_files = [ name for name in cura_file_names if name.endswith(self._instance_container_suffix) ] user_instance_containers = [] quality_changes_instance_containers = [] for instance_container_file in instance_container_files: container_id = self._stripFileToId(instance_container_file) instance_container = InstanceContainer(container_id) # Deserialize InstanceContainer by converting read data from bytes to string instance_container.deserialize( archive.open(instance_container_file).read().decode("utf-8")) container_type = instance_container.getMetaDataEntry("type") Job.yieldThread() if container_type == "user": # Check if quality changes already exists. user_containers = self._container_registry.findInstanceContainers( id=container_id) if not user_containers: containers_to_add.append(instance_container) else: if self._resolve_strategies[ "machine"] == "override" or self._resolve_strategies[ "machine"] is None: user_containers[0].deserialize( archive.open(instance_container_file).read(). decode("utf-8")) elif self._resolve_strategies["machine"] == "new": # The machine is going to get a spiffy new name, so ensure that the id's of user settings match. extruder_id = instance_container.getMetaDataEntry( "extruder", None) if extruder_id: new_id = self.getNewId( extruder_id) + "_current_settings" instance_container._id = new_id instance_container.setName(new_id) instance_container.setMetaDataEntry( "extruder", self.getNewId(extruder_id)) containers_to_add.append(instance_container) machine_id = instance_container.getMetaDataEntry( "machine", None) if machine_id: new_id = self.getNewId( machine_id) + "_current_settings" instance_container._id = new_id instance_container.setName(new_id) instance_container.setMetaDataEntry( "machine", self.getNewId(machine_id)) containers_to_add.append(instance_container) user_instance_containers.append(instance_container) elif container_type == "quality_changes": # Check if quality changes already exists. quality_changes = self._container_registry.findInstanceContainers( id=container_id) if not quality_changes: containers_to_add.append(instance_container) else: if self._resolve_strategies[ "quality_changes"] == "override": quality_changes[0].deserialize( archive.open(instance_container_file).read(). decode("utf-8")) elif self._resolve_strategies["quality_changes"] is None: # The ID already exists, but nothing in the values changed, so do nothing. pass quality_changes_instance_containers.append(instance_container) else: continue # Add all the containers right before we try to add / serialize the stack for container in containers_to_add: self._container_registry.addContainer(container) container.setDirty(True) # Get the stack(s) saved in the workspace. Logger.log("d", "Workspace loading is checking stacks containers...") container_stack_files = [ name for name in cura_file_names if name.endswith(self._container_stack_suffix) ] global_stack = None extruder_stacks = [] container_stacks_added = [] try: for container_stack_file in container_stack_files: container_id = self._stripFileToId(container_stack_file) # Check if a stack by this ID already exists; container_stacks = self._container_registry.findContainerStacks( id=container_id) if container_stacks: stack = container_stacks[0] if self._resolve_strategies["machine"] == "override": # TODO: HACK # There is a machine, check if it has authenticationd data. If so, keep that data. network_authentication_id = container_stacks[ 0].getMetaDataEntry("network_authentication_id") network_authentication_key = container_stacks[ 0].getMetaDataEntry("network_authentication_key") container_stacks[0].deserialize( archive.open(container_stack_file).read().decode( "utf-8")) if network_authentication_id: container_stacks[0].addMetaDataEntry( "network_authentication_id", network_authentication_id) if network_authentication_key: container_stacks[0].addMetaDataEntry( "network_authentication_key", network_authentication_key) elif self._resolve_strategies["machine"] == "new": new_id = self.getNewId(container_id) stack = ContainerStack(new_id) stack.deserialize( archive.open(container_stack_file).read().decode( "utf-8")) # Ensure a unique ID and name stack._id = new_id # Extruder stacks are "bound" to a machine. If we add the machine as a new one, the id of the # bound machine also needs to change. if stack.getMetaDataEntry("machine", None): stack.setMetaDataEntry( "machine", self.getNewId( stack.getMetaDataEntry("machine"))) if stack.getMetaDataEntry("type") != "extruder_train": # Only machines need a new name, stacks may be non-unique stack.setName( self._container_registry.uniqueName( stack.getName())) container_stacks_added.append(stack) self._container_registry.addContainer(stack) else: Logger.log( "w", "Resolve strategy of %s for machine is not supported", self._resolve_strategies["machine"]) else: stack = ContainerStack(container_id) # Deserialize stack by converting read data from bytes to string stack.deserialize( archive.open(container_stack_file).read().decode( "utf-8")) container_stacks_added.append(stack) self._container_registry.addContainer(stack) if stack.getMetaDataEntry("type") == "extruder_train": extruder_stacks.append(stack) else: global_stack = stack Job.yieldThread() except: Logger.logException( "w", "We failed to serialize the stack. Trying to clean up.") # Something went really wrong. Try to remove any data that we added. for container in containers_to_add: self._container_registry.getInstance().removeContainer( container.getId()) for container in container_stacks_added: self._container_registry.getInstance().removeContainer( container.getId()) return None if self._resolve_strategies["machine"] == "new": # A new machine was made, but it was serialized with the wrong user container. Fix that now. for container in user_instance_containers: extruder_id = container.getMetaDataEntry("extruder", None) if extruder_id: for extruder in extruder_stacks: if extruder.getId() == extruder_id: extruder.replaceContainer(0, container) continue machine_id = container.getMetaDataEntry("machine", None) if machine_id: if global_stack.getId() == machine_id: global_stack.replaceContainer(0, container) continue if self._resolve_strategies["quality_changes"] == "new": # Quality changes needs to get a new ID, added to registry and to the right stacks for container in quality_changes_instance_containers: old_id = container.getId() container.setName( self._container_registry.uniqueName(container.getName())) # We're not really supposed to change the ID in normal cases, but this is an exception. container._id = self.getNewId(container.getId()) # The container was not added yet, as it didn't have an unique ID. It does now, so add it. self._container_registry.addContainer(container) # Replace the quality changes container old_container = global_stack.findContainer( {"type": "quality_changes"}) if old_container.getId() == old_id: quality_changes_index = global_stack.getContainerIndex( old_container) global_stack.replaceContainer(quality_changes_index, container) continue for stack in extruder_stacks: old_container = stack.findContainer( {"type": "quality_changes"}) if old_container.getId() == old_id: quality_changes_index = stack.getContainerIndex( old_container) stack.replaceContainer(quality_changes_index, container) if self._resolve_strategies["material"] == "new": for material in material_containers: old_material = global_stack.findContainer({"type": "material"}) if old_material.getId() in self._id_mapping: material_index = global_stack.getContainerIndex( old_material) global_stack.replaceContainer(material_index, material) continue for stack in extruder_stacks: old_material = stack.findContainer({"type": "material"}) if old_material.getId() in self._id_mapping: material_index = stack.getContainerIndex(old_material) stack.replaceContainer(material_index, material) continue for stack in extruder_stacks: ExtruderManager.getInstance().registerExtruder( stack, global_stack.getId()) else: # Machine has no extruders, but it needs to be registered with the extruder manager. ExtruderManager.getInstance().registerExtruder( None, global_stack.getId()) Logger.log( "d", "Workspace loading is notifying rest of the code of changes...") # Notify everything/one that is to notify about changes. global_stack.containersChanged.emit(global_stack.getTop()) for stack in extruder_stacks: stack.setNextStack(global_stack) stack.containersChanged.emit(stack.getTop()) # Actually change the active machine. Application.getInstance().setGlobalContainerStack(global_stack) # Load all the nodes / meshdata of the workspace nodes = self._3mf_mesh_reader.read(file_name) if nodes is None: nodes = [] return nodes
def test_removePreference(preference): preferences = Preferences() preferences.addPreference(preference["key"], preference["default"]) preferences.removePreference(preference["key"]) assert preferences.getValue(preference["key"]) is None
def preRead(self, file_name): self._3mf_mesh_reader = Application.getInstance().getMeshFileHandler().getReaderForFile(file_name) if self._3mf_mesh_reader and self._3mf_mesh_reader.preRead(file_name) == WorkspaceReader.PreReadResult.accepted: pass else: Logger.log("w", "Could not find reader that was able to read the scene data for 3MF workspace") return WorkspaceReader.PreReadResult.failed machine_name = "" # Check if there are any conflicts, so we can ask the user. archive = zipfile.ZipFile(file_name, "r") cura_file_names = [name for name in archive.namelist() if name.startswith("Cura/")] container_stack_files = [name for name in cura_file_names if name.endswith(self._container_stack_suffix)] self._resolve_strategies = {"machine": None, "quality_changes": None, "material": None} machine_conflict = False quality_changes_conflict = False for container_stack_file in container_stack_files: container_id = self._stripFileToId(container_stack_file) serialized = archive.open(container_stack_file).read().decode("utf-8") if machine_name == "": machine_name = self._getMachineNameFromSerializedStack(serialized) stacks = self._container_registry.findContainerStacks(id=container_id) if stacks: # Check if there are any changes at all in any of the container stacks. id_list = self._getContainerIdListFromSerialized(serialized) for index, container_id in enumerate(id_list): if stacks[0].getContainer(index).getId() != container_id: machine_conflict = True Job.yieldThread() material_labels = [] material_conflict = False xml_material_profile = self._getXmlProfileClass() if self._material_container_suffix is None: self._material_container_suffix = ContainerRegistry.getMimeTypeForContainer(xml_material_profile).preferredSuffix if xml_material_profile: material_container_files = [name for name in cura_file_names if name.endswith(self._material_container_suffix)] for material_container_file in material_container_files: container_id = self._stripFileToId(material_container_file) materials = self._container_registry.findInstanceContainers(id=container_id) material_labels.append(self._getMaterialLabelFromSerialized(archive.open(material_container_file).read().decode("utf-8"))) if materials and not materials[0].isReadOnly(): # Only non readonly materials can be in conflict material_conflict = True Job.yieldThread() # Check if any quality_changes instance container is in conflict. instance_container_files = [name for name in cura_file_names if name.endswith(self._instance_container_suffix)] quality_name = "" quality_type = "" num_settings_overriden_by_quality_changes = 0 # How many settings are changed by the quality changes for instance_container_file in instance_container_files: container_id = self._stripFileToId(instance_container_file) instance_container = InstanceContainer(container_id) # Deserialize InstanceContainer by converting read data from bytes to string instance_container.deserialize(archive.open(instance_container_file).read().decode("utf-8")) container_type = instance_container.getMetaDataEntry("type") if container_type == "quality_changes": quality_name = instance_container.getName() num_settings_overriden_by_quality_changes += len(instance_container._instances) # Check if quality changes already exists. quality_changes = self._container_registry.findInstanceContainers(id = container_id) if quality_changes: # Check if there really is a conflict by comparing the values if quality_changes[0] != instance_container: quality_changes_conflict = True elif container_type == "quality": # If the quality name is not set (either by quality or changes, set it now) # Quality changes should always override this (as they are "on top") if quality_name == "": quality_name = instance_container.getName() quality_type = instance_container.getName() Job.yieldThread() num_visible_settings = 0 try: temp_preferences = Preferences() temp_preferences.readFromFile(io.TextIOWrapper(archive.open("Cura/preferences.cfg"))) # We need to wrap it, else the archive parser breaks. visible_settings_string = temp_preferences.getValue("general/visible_settings") if visible_settings_string is not None: num_visible_settings = len(visible_settings_string.split(";")) active_mode = temp_preferences.getValue("cura/active_mode") if not active_mode: active_mode = Preferences.getInstance().getValue("cura/active_mode") except KeyError: # If there is no preferences file, it's not a workspace, so notify user of failure. Logger.log("w", "File %s is not a valid workspace.", file_name) return WorkspaceReader.PreReadResult.failed # Show the dialog, informing the user what is about to happen. self._dialog.setMachineConflict(machine_conflict) self._dialog.setQualityChangesConflict(quality_changes_conflict) self._dialog.setMaterialConflict(material_conflict) self._dialog.setNumVisibleSettings(num_visible_settings) self._dialog.setQualityName(quality_name) self._dialog.setQualityType(quality_type) self._dialog.setNumSettingsOverridenByQualityChanges(num_settings_overriden_by_quality_changes) self._dialog.setActiveMode(active_mode) self._dialog.setMachineName(machine_name) self._dialog.setMaterialLabels(material_labels) self._dialog.setHasObjectsOnPlate(Application.getInstance().getPlatformActivity) self._dialog.show() # Block until the dialog is closed. self._dialog.waitForClose() if self._dialog.getResult() == {}: return WorkspaceReader.PreReadResult.cancelled self._resolve_strategies = self._dialog.getResult() return WorkspaceReader.PreReadResult.accepted
def preRead(self, file_name, show_dialog=True, *args, **kwargs): self._3mf_mesh_reader = Application.getInstance().getMeshFileHandler().getReaderForFile(file_name) if self._3mf_mesh_reader and self._3mf_mesh_reader.preRead(file_name) == WorkspaceReader.PreReadResult.accepted: pass else: Logger.log("w", "Could not find reader that was able to read the scene data for 3MF workspace") return WorkspaceReader.PreReadResult.failed machine_name = "" machine_type = "" variant_type_name = i18n_catalog.i18nc("@label", "Nozzle") # Check if there are any conflicts, so we can ask the user. archive = zipfile.ZipFile(file_name, "r") cura_file_names = [name for name in archive.namelist() if name.startswith("Cura/")] # A few lists of containers in this project files. # When loading the global stack file, it may be associated with those containers, which may or may not be # in Cura already, so we need to provide them as alternative search lists. definition_container_list = [] instance_container_list = [] material_container_list = [] # # Read definition containers # machine_definition_container_count = 0 extruder_definition_container_count = 0 definition_container_files = [name for name in cura_file_names if name.endswith(self._definition_container_suffix)] for each_definition_container_file in definition_container_files: container_id = self._stripFileToId(each_definition_container_file) definitions = self._container_registry.findDefinitionContainers(id=container_id) if not definitions: definition_container = DefinitionContainer(container_id) definition_container.deserialize(archive.open(each_definition_container_file).read().decode("utf-8")) else: definition_container = definitions[0] definition_container_list.append(definition_container) definition_container_type = definition_container.getMetaDataEntry("type") if definition_container_type == "machine": machine_type = definition_container.getName() variant_type_name = definition_container.getMetaDataEntry("variants_name", variant_type_name) machine_definition_container_count += 1 elif definition_container_type == "extruder": extruder_definition_container_count += 1 else: Logger.log("w", "Unknown definition container type %s for %s", definition_container_type, each_definition_container_file) Job.yieldThread() # sanity check if machine_definition_container_count != 1: msg = "Expecting one machine definition container but got %s" % machine_definition_container_count Logger.log("e", msg) raise RuntimeError(msg) material_labels = [] material_conflict = False xml_material_profile = self._getXmlProfileClass() if self._material_container_suffix is None: self._material_container_suffix = ContainerRegistry.getMimeTypeForContainer(xml_material_profile).preferredSuffix if xml_material_profile: material_container_files = [name for name in cura_file_names if name.endswith(self._material_container_suffix)] for material_container_file in material_container_files: container_id = self._stripFileToId(material_container_file) materials = self._container_registry.findInstanceContainers(id=container_id) material_labels.append(self._getMaterialLabelFromSerialized(archive.open(material_container_file).read().decode("utf-8"))) if materials and not materials[0].isReadOnly(): # Only non readonly materials can be in conflict material_conflict = True Job.yieldThread() # Check if any quality_changes instance container is in conflict. instance_container_files = [name for name in cura_file_names if name.endswith(self._instance_container_suffix)] quality_name = "" quality_type = "" num_settings_overriden_by_quality_changes = 0 # How many settings are changed by the quality changes num_settings_overriden_by_definition_changes = 0 # How many settings are changed by the definition changes num_user_settings = 0 quality_changes_conflict = False definition_changes_conflict = False for each_instance_container_file in instance_container_files: container_id = self._stripFileToId(each_instance_container_file) instance_container = InstanceContainer(container_id) # Deserialize InstanceContainer by converting read data from bytes to string instance_container.deserialize(archive.open(each_instance_container_file).read().decode("utf-8")) instance_container_list.append(instance_container) container_type = instance_container.getMetaDataEntry("type") if container_type == "quality_changes": quality_name = instance_container.getName() num_settings_overriden_by_quality_changes += len(instance_container._instances) # Check if quality changes already exists. quality_changes = self._container_registry.findInstanceContainers(id = container_id) if quality_changes: # Check if there really is a conflict by comparing the values if quality_changes[0] != instance_container: quality_changes_conflict = True elif container_type == "definition_changes": definition_name = instance_container.getName() num_settings_overriden_by_definition_changes += len(instance_container._instances) definition_changes = self._container_registry.findDefinitionContainers(id = container_id) if definition_changes: if definition_changes[0] != instance_container: definition_changes_conflict = True elif container_type == "user": num_user_settings += len(instance_container._instances) elif container_type in self._ignored_instance_container_types: # Ignore certain instance container types Logger.log("w", "Ignoring instance container [%s] with type [%s]", container_id, container_type) continue Job.yieldThread() # Load ContainerStack files and ExtruderStack files global_stack_file, extruder_stack_files = self._determineGlobalAndExtruderStackFiles( file_name, cura_file_names) self._resolve_strategies = {"machine": None, "quality_changes": None, "material": None} machine_conflict = False for container_stack_file in [global_stack_file] + extruder_stack_files: container_id = self._stripFileToId(container_stack_file) serialized = archive.open(container_stack_file).read().decode("utf-8") if machine_name == "": machine_name = self._getMachineNameFromSerializedStack(serialized) stacks = self._container_registry.findContainerStacks(id = container_id) if stacks: # Check if there are any changes at all in any of the container stacks. id_list = self._getContainerIdListFromSerialized(serialized) for index, container_id in enumerate(id_list): if stacks[0].getContainer(index).getId() != container_id: machine_conflict = True Job.yieldThread() num_visible_settings = 0 try: temp_preferences = Preferences() temp_preferences.readFromFile(io.TextIOWrapper(archive.open("Cura/preferences.cfg"))) # We need to wrap it, else the archive parser breaks. visible_settings_string = temp_preferences.getValue("general/visible_settings") if visible_settings_string is not None: num_visible_settings = len(visible_settings_string.split(";")) active_mode = temp_preferences.getValue("cura/active_mode") if not active_mode: active_mode = Preferences.getInstance().getValue("cura/active_mode") except KeyError: # If there is no preferences file, it's not a workspace, so notify user of failure. Logger.log("w", "File %s is not a valid workspace.", file_name) return WorkspaceReader.PreReadResult.failed # In case we use preRead() to check if a file is a valid project file, we don't want to show a dialog. if not show_dialog: return WorkspaceReader.PreReadResult.accepted # prepare data for the dialog num_extruders = extruder_definition_container_count if num_extruders == 0: num_extruders = 1 # No extruder stacks found, which means there is one extruder extruders = num_extruders * [""] # Show the dialog, informing the user what is about to happen. self._dialog.setMachineConflict(machine_conflict) self._dialog.setQualityChangesConflict(quality_changes_conflict) self._dialog.setDefinitionChangesConflict(definition_changes_conflict) self._dialog.setMaterialConflict(material_conflict) self._dialog.setNumVisibleSettings(num_visible_settings) self._dialog.setQualityName(quality_name) self._dialog.setQualityType(quality_type) self._dialog.setNumSettingsOverridenByQualityChanges(num_settings_overriden_by_quality_changes) self._dialog.setNumUserSettings(num_user_settings) self._dialog.setActiveMode(active_mode) self._dialog.setMachineName(machine_name) self._dialog.setMaterialLabels(material_labels) self._dialog.setMachineType(machine_type) self._dialog.setExtruders(extruders) self._dialog.setVariantType(variant_type_name) self._dialog.setHasObjectsOnPlate(Application.getInstance().platformActivity) self._dialog.show() # Block until the dialog is closed. self._dialog.waitForClose() if self._dialog.getResult() == {}: return WorkspaceReader.PreReadResult.cancelled self._resolve_strategies = self._dialog.getResult() # # There can be 3 resolve strategies coming from the dialog: # - new: create a new container # - override: override the existing container # - None: There is no conflict, which means containers with the same IDs may or may not be there already. # If they are there, there is no conflict between the them. # In this case, you can either create a new one, or safely override the existing one. # # Default values for k, v in self._resolve_strategies.items(): if v is None: self._resolve_strategies[k] = "new" return WorkspaceReader.PreReadResult.accepted
def preRead(self, file_name, show_dialog=True, *args, **kwargs): self._3mf_mesh_reader = Application.getInstance().getMeshFileHandler().getReaderForFile(file_name) if self._3mf_mesh_reader and self._3mf_mesh_reader.preRead(file_name) == WorkspaceReader.PreReadResult.accepted: pass else: Logger.log("w", "Could not find reader that was able to read the scene data for 3MF workspace") return WorkspaceReader.PreReadResult.failed machine_name = "" machine_type = "" variant_type_name = i18n_catalog.i18nc("@label", "Nozzle") # Check if there are any conflicts, so we can ask the user. archive = zipfile.ZipFile(file_name, "r") cura_file_names = [name for name in archive.namelist() if name.startswith("Cura/")] # A few lists of containers in this project files. # When loading the global stack file, it may be associated with those containers, which may or may not be # in Cura already, so we need to provide them as alternative search lists. definition_container_list = [] instance_container_list = [] material_container_list = [] resolve_strategy_keys = ["machine", "material", "quality_changes"] self._resolve_strategies = {k: None for k in resolve_strategy_keys} containers_found_dict = {k: False for k in resolve_strategy_keys} # # Read definition containers # machine_definition_container_count = 0 extruder_definition_container_count = 0 definition_container_files = [name for name in cura_file_names if name.endswith(self._definition_container_suffix)] for each_definition_container_file in definition_container_files: container_id = self._stripFileToId(each_definition_container_file) definitions = self._container_registry.findDefinitionContainers(id=container_id) if not definitions: definition_container = DefinitionContainer(container_id) definition_container.deserialize(archive.open(each_definition_container_file).read().decode("utf-8")) else: definition_container = definitions[0] definition_container_list.append(definition_container) definition_container_type = definition_container.getMetaDataEntry("type") if definition_container_type == "machine": machine_type = definition_container.getName() variant_type_name = definition_container.getMetaDataEntry("variants_name", variant_type_name) machine_definition_container_count += 1 elif definition_container_type == "extruder": extruder_definition_container_count += 1 else: Logger.log("w", "Unknown definition container type %s for %s", definition_container_type, each_definition_container_file) Job.yieldThread() # sanity check if machine_definition_container_count != 1: msg = "Expecting one machine definition container but got %s" % machine_definition_container_count Logger.log("e", msg) raise RuntimeError(msg) material_labels = [] material_conflict = False xml_material_profile = self._getXmlProfileClass() if self._material_container_suffix is None: self._material_container_suffix = ContainerRegistry.getMimeTypeForContainer(xml_material_profile).preferredSuffix if xml_material_profile: material_container_files = [name for name in cura_file_names if name.endswith(self._material_container_suffix)] for material_container_file in material_container_files: container_id = self._stripFileToId(material_container_file) materials = self._container_registry.findInstanceContainers(id=container_id) material_labels.append(self._getMaterialLabelFromSerialized(archive.open(material_container_file).read().decode("utf-8"))) if materials: containers_found_dict["material"] = True if not materials[0].isReadOnly(): # Only non readonly materials can be in conflict material_conflict = True Job.yieldThread() # Check if any quality_changes instance container is in conflict. instance_container_files = [name for name in cura_file_names if name.endswith(self._instance_container_suffix)] quality_name = "" quality_type = "" num_settings_overriden_by_quality_changes = 0 # How many settings are changed by the quality changes num_settings_overriden_by_definition_changes = 0 # How many settings are changed by the definition changes num_user_settings = 0 quality_changes_conflict = False definition_changes_conflict = False for each_instance_container_file in instance_container_files: container_id = self._stripFileToId(each_instance_container_file) instance_container = InstanceContainer(container_id) # Deserialize InstanceContainer by converting read data from bytes to string instance_container.deserialize(archive.open(each_instance_container_file).read().decode("utf-8")) instance_container_list.append(instance_container) container_type = instance_container.getMetaDataEntry("type") if container_type == "quality_changes": quality_name = instance_container.getName() num_settings_overriden_by_quality_changes += len(instance_container._instances) # Check if quality changes already exists. quality_changes = self._container_registry.findInstanceContainers(id = container_id) if quality_changes: containers_found_dict["quality_changes"] = True # Check if there really is a conflict by comparing the values if quality_changes[0] != instance_container: quality_changes_conflict = True elif container_type == "definition_changes": definition_name = instance_container.getName() num_settings_overriden_by_definition_changes += len(instance_container._instances) definition_changes = self._container_registry.findDefinitionContainers(id = container_id) if definition_changes: if definition_changes[0] != instance_container: definition_changes_conflict = True elif container_type == "user": num_user_settings += len(instance_container._instances) elif container_type in self._ignored_instance_container_types: # Ignore certain instance container types Logger.log("w", "Ignoring instance container [%s] with type [%s]", container_id, container_type) continue Job.yieldThread() # Load ContainerStack files and ExtruderStack files global_stack_file, extruder_stack_files = self._determineGlobalAndExtruderStackFiles( file_name, cura_file_names) machine_conflict = False # Because there can be cases as follows: # - the global stack exists but some/all of the extruder stacks DON'T exist # - the global stack DOESN'T exist but some/all of the extruder stacks exist # To simplify this, only check if the global stack exists or not container_id = self._stripFileToId(global_stack_file) serialized = archive.open(global_stack_file).read().decode("utf-8") machine_name = self._getMachineNameFromSerializedStack(serialized) stacks = self._container_registry.findContainerStacks(id = container_id) if stacks: global_stack = stacks[0] containers_found_dict["machine"] = True # Check if there are any changes at all in any of the container stacks. id_list = self._getContainerIdListFromSerialized(serialized) for index, container_id in enumerate(id_list): # take into account the old empty container IDs container_id = self._old_empty_profile_id_dict.get(container_id, container_id) if global_stack.getContainer(index).getId() != container_id: machine_conflict = True break Job.yieldThread() # if the global stack is found, we check if there are conflicts in the extruder stacks if containers_found_dict["machine"] and not machine_conflict: for extruder_stack_file in extruder_stack_files: container_id = self._stripFileToId(extruder_stack_file) serialized = archive.open(extruder_stack_file).read().decode("utf-8") parser = configparser.ConfigParser() parser.read_string(serialized) # The check should be done for the extruder stack that's associated with the existing global stack, # and those extruder stacks may have different IDs. # So we check according to the positions position = str(parser["metadata"]["position"]) if position not in global_stack.extruders: # The extruder position defined in the project doesn't exist in this global stack. # We can say that it is a machine conflict, but it is very hard to override the machine in this # case because we need to override the existing extruders and add the non-existing extruders. # # HACK: # To make this simple, we simply say that there is no machine conflict and create a new machine # by default. machine_conflict = False break existing_extruder_stack = global_stack.extruders[position] # check if there are any changes at all in any of the container stacks. id_list = self._getContainerIdListFromSerialized(serialized) for index, container_id in enumerate(id_list): # take into account the old empty container IDs container_id = self._old_empty_profile_id_dict.get(container_id, container_id) if existing_extruder_stack.getContainer(index).getId() != container_id: machine_conflict = True break num_visible_settings = 0 try: temp_preferences = Preferences() temp_preferences.readFromFile(io.TextIOWrapper(archive.open("Cura/preferences.cfg"))) # We need to wrap it, else the archive parser breaks. visible_settings_string = temp_preferences.getValue("general/visible_settings") if visible_settings_string is not None: num_visible_settings = len(visible_settings_string.split(";")) active_mode = temp_preferences.getValue("cura/active_mode") if not active_mode: active_mode = Preferences.getInstance().getValue("cura/active_mode") except KeyError: # If there is no preferences file, it's not a workspace, so notify user of failure. Logger.log("w", "File %s is not a valid workspace.", file_name) return WorkspaceReader.PreReadResult.failed # In case we use preRead() to check if a file is a valid project file, we don't want to show a dialog. if not show_dialog: return WorkspaceReader.PreReadResult.accepted # prepare data for the dialog num_extruders = extruder_definition_container_count if num_extruders == 0: num_extruders = 1 # No extruder stacks found, which means there is one extruder extruders = num_extruders * [""] # Show the dialog, informing the user what is about to happen. self._dialog.setMachineConflict(machine_conflict) self._dialog.setQualityChangesConflict(quality_changes_conflict) self._dialog.setDefinitionChangesConflict(definition_changes_conflict) self._dialog.setMaterialConflict(material_conflict) self._dialog.setNumVisibleSettings(num_visible_settings) self._dialog.setQualityName(quality_name) self._dialog.setQualityType(quality_type) self._dialog.setNumSettingsOverridenByQualityChanges(num_settings_overriden_by_quality_changes) self._dialog.setNumUserSettings(num_user_settings) self._dialog.setActiveMode(active_mode) self._dialog.setMachineName(machine_name) self._dialog.setMaterialLabels(material_labels) self._dialog.setMachineType(machine_type) self._dialog.setExtruders(extruders) self._dialog.setVariantType(variant_type_name) self._dialog.setHasObjectsOnPlate(Application.getInstance().platformActivity) self._dialog.show() # Block until the dialog is closed. self._dialog.waitForClose() if self._dialog.getResult() == {}: return WorkspaceReader.PreReadResult.cancelled self._resolve_strategies = self._dialog.getResult() # # There can be 3 resolve strategies coming from the dialog: # - new: create a new container # - override: override the existing container # - None: There is no conflict, which means containers with the same IDs may or may not be there already. # If there is an existing container, there is no conflict between the them, and default to "override" # If there is no existing container, default to "new" # # Default values for key, strategy in self._resolve_strategies.items(): if key not in containers_found_dict or strategy is not None: continue self._resolve_strategies[key] = "override" if containers_found_dict[key] else "new" return WorkspaceReader.PreReadResult.accepted
def test_addPreference(preference): preferences = Preferences() preferences.addPreference(preference["key"], preference["default"]) assert preferences.getValue(preference["key"]) == parseValue( preference["default"])
def read(self, file_name): # Load all the nodes / meshdata of the workspace nodes = self._3mf_mesh_reader.read(file_name) if nodes is None: nodes = [] archive = zipfile.ZipFile(file_name, "r") cura_file_names = [name for name in archive.namelist() if name.startswith("Cura/")] # Create a shadow copy of the preferences (we don't want all of the preferences, but we do want to re-use its # parsing code. temp_preferences = Preferences() temp_preferences.readFromFile(io.TextIOWrapper(archive.open("Cura/preferences.cfg"))) # We need to wrap it, else the archive parser breaks. # Copy a number of settings from the temp preferences to the global global_preferences = Preferences.getInstance() global_preferences.setValue("general/visible_settings", temp_preferences.getValue("general/visible_settings")) global_preferences.setValue("cura/categories_expanded", temp_preferences.getValue("cura/categories_expanded")) Application.getInstance().expandedCategoriesChanged.emit() # Notify the GUI of the change self._id_mapping = {} # We don't add containers right away, but wait right until right before the stack serialization. # We do this so that if something goes wrong, it's easier to clean up. containers_to_add = [] # TODO: For the moment we use pretty naive existence checking. If the ID is the same, we assume in quite a few # TODO: cases that the container loaded is the same (most notable in materials & definitions). # TODO: It might be possible that we need to add smarter checking in the future. Logger.log("d", "Workspace loading is checking definitions...") # Get all the definition files & check if they exist. If not, add them. definition_container_files = [name for name in cura_file_names if name.endswith(self._definition_container_suffix)] for definition_container_file in definition_container_files: container_id = self._stripFileToId(definition_container_file) definitions = self._container_registry.findDefinitionContainers(id=container_id) if not definitions: definition_container = DefinitionContainer(container_id) definition_container.deserialize(archive.open(definition_container_file).read().decode("utf-8")) self._container_registry.addContainer(definition_container) Logger.log("d", "Workspace loading is checking materials...") material_containers = [] # Get all the material files and check if they exist. If not, add them. xml_material_profile = self._getXmlProfileClass() if self._material_container_suffix is None: self._material_container_suffix = ContainerRegistry.getMimeTypeForContainer(xml_material_profile).suffixes[0] if xml_material_profile: material_container_files = [name for name in cura_file_names if name.endswith(self._material_container_suffix)] for material_container_file in material_container_files: container_id = self._stripFileToId(material_container_file) materials = self._container_registry.findInstanceContainers(id=container_id) if not materials: material_container = xml_material_profile(container_id) material_container.deserialize(archive.open(material_container_file).read().decode("utf-8")) containers_to_add.append(material_container) else: if not materials[0].isReadOnly(): # Only create new materials if they are not read only. if self._resolve_strategies["material"] == "override": materials[0].deserialize(archive.open(material_container_file).read().decode("utf-8")) elif self._resolve_strategies["material"] == "new": # Note that we *must* deserialize it with a new ID, as multiple containers will be # auto created & added. material_container = xml_material_profile(self.getNewId(container_id)) material_container.deserialize(archive.open(material_container_file).read().decode("utf-8")) containers_to_add.append(material_container) material_containers.append(material_container) Logger.log("d", "Workspace loading is checking instance containers...") # Get quality_changes and user profiles saved in the workspace instance_container_files = [name for name in cura_file_names if name.endswith(self._instance_container_suffix)] user_instance_containers = [] quality_changes_instance_containers = [] for instance_container_file in instance_container_files: container_id = self._stripFileToId(instance_container_file) instance_container = InstanceContainer(container_id) # Deserialize InstanceContainer by converting read data from bytes to string instance_container.deserialize(archive.open(instance_container_file).read().decode("utf-8")) container_type = instance_container.getMetaDataEntry("type") if container_type == "user": # Check if quality changes already exists. user_containers = self._container_registry.findInstanceContainers(id=container_id) if not user_containers: containers_to_add.append(instance_container) else: if self._resolve_strategies["machine"] == "override": user_containers[0].deserialize(archive.open(instance_container_file).read().decode("utf-8")) elif self._resolve_strategies["machine"] == "new": # The machine is going to get a spiffy new name, so ensure that the id's of user settings match. extruder_id = instance_container.getMetaDataEntry("extruder", None) if extruder_id: new_id = self.getNewId(extruder_id) + "_current_settings" instance_container._id = new_id instance_container.setName(new_id) instance_container.setMetaDataEntry("extruder", self.getNewId(extruder_id)) containers_to_add.append(instance_container) machine_id = instance_container.getMetaDataEntry("machine", None) if machine_id: new_id = self.getNewId(machine_id) + "_current_settings" instance_container._id = new_id instance_container.setName(new_id) instance_container.setMetaDataEntry("machine", self.getNewId(machine_id)) containers_to_add.append(instance_container) user_instance_containers.append(instance_container) elif container_type == "quality_changes": # Check if quality changes already exists. quality_changes = self._container_registry.findInstanceContainers(id = container_id) if not quality_changes: containers_to_add.append(instance_container) else: if self._resolve_strategies["quality_changes"] == "override": quality_changes[0].deserialize(archive.open(instance_container_file).read().decode("utf-8")) elif self._resolve_strategies["quality_changes"] is None: # The ID already exists, but nothing in the values changed, so do nothing. pass quality_changes_instance_containers.append(instance_container) else: continue # Add all the containers right before we try to add / serialize the stack for container in containers_to_add: self._container_registry.addContainer(container) # Get the stack(s) saved in the workspace. Logger.log("d", "Workspace loading is checking stacks containers...") container_stack_files = [name for name in cura_file_names if name.endswith(self._container_stack_suffix)] global_stack = None extruder_stacks = [] container_stacks_added = [] try: for container_stack_file in container_stack_files: container_id = self._stripFileToId(container_stack_file) # Check if a stack by this ID already exists; container_stacks = self._container_registry.findContainerStacks(id=container_id) if container_stacks: stack = container_stacks[0] if self._resolve_strategies["machine"] == "override": container_stacks[0].deserialize(archive.open(container_stack_file).read().decode("utf-8")) elif self._resolve_strategies["machine"] == "new": new_id = self.getNewId(container_id) stack = ContainerStack(new_id) stack.deserialize(archive.open(container_stack_file).read().decode("utf-8")) # Ensure a unique ID and name stack._id = new_id # Extruder stacks are "bound" to a machine. If we add the machine as a new one, the id of the # bound machine also needs to change. if stack.getMetaDataEntry("machine", None): stack.setMetaDataEntry("machine", self.getNewId(stack.getMetaDataEntry("machine"))) if stack.getMetaDataEntry("type") != "extruder_train": # Only machines need a new name, stacks may be non-unique stack.setName(self._container_registry.uniqueName(stack.getName())) container_stacks_added.append(stack) self._container_registry.addContainer(stack) else: Logger.log("w", "Resolve strategy of %s for machine is not supported", self._resolve_strategies["machine"]) else: stack = ContainerStack(container_id) # Deserialize stack by converting read data from bytes to string stack.deserialize(archive.open(container_stack_file).read().decode("utf-8")) container_stacks_added.append(stack) self._container_registry.addContainer(stack) if stack.getMetaDataEntry("type") == "extruder_train": extruder_stacks.append(stack) else: global_stack = stack except: Logger.log("W", "We failed to serialize the stack. Trying to clean up.") # Something went really wrong. Try to remove any data that we added. for container in containers_to_add: self._container_registry.getInstance().removeContainer(container.getId()) for container in container_stacks_added: self._container_registry.getInstance().removeContainer(container.getId()) return None if self._resolve_strategies["machine"] == "new": # A new machine was made, but it was serialized with the wrong user container. Fix that now. for container in user_instance_containers: extruder_id = container.getMetaDataEntry("extruder", None) if extruder_id: for extruder in extruder_stacks: if extruder.getId() == extruder_id: extruder.replaceContainer(0, container) continue machine_id = container.getMetaDataEntry("machine", None) if machine_id: if global_stack.getId() == machine_id: global_stack.replaceContainer(0, container) continue if self._resolve_strategies["quality_changes"] == "new": # Quality changes needs to get a new ID, added to registry and to the right stacks for container in quality_changes_instance_containers: old_id = container.getId() container.setName(self._container_registry.uniqueName(container.getName())) # We're not really supposed to change the ID in normal cases, but this is an exception. container._id = self.getNewId(container.getId()) # The container was not added yet, as it didn't have an unique ID. It does now, so add it. self._container_registry.addContainer(container) # Replace the quality changes container old_container = global_stack.findContainer({"type": "quality_changes"}) if old_container.getId() == old_id: quality_changes_index = global_stack.getContainerIndex(old_container) global_stack.replaceContainer(quality_changes_index, container) continue for stack in extruder_stacks: old_container = stack.findContainer({"type": "quality_changes"}) if old_container.getId() == old_id: quality_changes_index = stack.getContainerIndex(old_container) stack.replaceContainer(quality_changes_index, container) if self._resolve_strategies["material"] == "new": for material in material_containers: old_material = global_stack.findContainer({"type": "material"}) if old_material.getId() in self._id_mapping: material_index = global_stack.getContainerIndex(old_material) global_stack.replaceContainer(material_index, material) continue for stack in extruder_stacks: old_material = stack.findContainer({"type": "material"}) if old_material.getId() in self._id_mapping: material_index = stack.getContainerIndex(old_material) stack.replaceContainer(material_index, material) continue for stack in extruder_stacks: ExtruderManager.getInstance().registerExtruder(stack, global_stack.getId()) else: # Machine has no extruders, but it needs to be registered with the extruder manager. ExtruderManager.getInstance().registerExtruder(None, global_stack.getId()) Logger.log("d", "Workspace loading is notifying rest of the code of changes...") # Notify everything/one that is to notify about changes. for container in global_stack.getContainers(): global_stack.containersChanged.emit(container) for stack in extruder_stacks: stack.setNextStack(global_stack) for container in stack.getContainers(): stack.containersChanged.emit(container) # Actually change the active machine. Application.getInstance().setGlobalContainerStack(global_stack) return nodes
class Application: """Central object responsible for running the main event loop and creating other central objects. The Application object is a central object for accessing other important objects. It is also responsible for starting the main event loop. It is passed on to plugins so it can be easily used to access objects required for those plugins. """ def __init__(self, name: str, version: str, api_version: str, app_display_name: str = "", build_type: str = "", is_debug_mode: bool = False, **kwargs) -> None: """Init method :param name: :type{string} The name of the application. :param version: :type{string} Version, formatted as major.minor.rev :param build_type: Additional version info on the type of build this is, such as "master". :param is_debug_mode: Whether to run in debug mode. """ if Application.__instance is not None: raise RuntimeError("Try to create singleton '%s' more than once" % self.__class__.__name__) super().__init__() # Call super to make multiple inheritance work. Application.__instance = self self._api_version = Version(api_version) # type: Version self._app_name = name # type: str self._app_display_name = app_display_name if app_display_name else name # type: str self._version = version # type: str self._build_type = build_type # type: str self._is_debug_mode = is_debug_mode # type: bool self._is_headless = False # type: bool self._use_external_backend = False # type: bool self._just_updated_from_old_version = False # type: bool self._config_lock_filename = "{name}.lock".format( name=self._app_name) # type: str self._cli_args = None # type: argparse.Namespace self._cli_parser = argparse.ArgumentParser( prog=self._app_name, add_help=False) # type: argparse.ArgumentParser self._main_thread = threading.current_thread( ) # type: threading.Thread self.default_theme = self._app_name # type: str # Default theme is the application name self._default_language = "en_US" # type: str self.change_log_url: str = "https://github.com/Ultimaker/Uranium" # Where to find a more detailed description of the recent updates. self.beta_change_log_url: str = "https://github.com/Ultimaker/Uranium" # Where to find a more detailed description of proposed updates. self._preferences_filename = None # type: str self._preferences = None # type: Preferences self._extensions = [] # type: List[Extension] self._file_providers = [] # type: List[FileProvider] self._required_plugins = [] # type: List[str] self._package_manager_class = PackageManager # type: type self._package_manager = None # type: PackageManager self._plugin_registry = None # type: PluginRegistry self._container_registry_class = ContainerRegistry # type: type self._container_registry = None # type: ContainerRegistry self._global_container_stack = None # type: Optional[ContainerStack] self._file_provider_model = FileProviderModel( application=self) # type: Optional[FileProviderModel] self._controller = None # type: Controller self._backend = None # type: Backend self._output_device_manager = None # type: OutputDeviceManager self._operation_stack = None # type: OperationStack self._visible_messages = [] # type: List[Message] self._message_lock = threading.Lock() # type: threading.Lock self._app_install_dir = self.getInstallPrefix() # type: str # Intended for keeping plugin workspace metadata that is going to be saved in and retrieved from workspace files. # When the workspace is stored, all workspace readers will need to ensure that the workspace metadata is correctly # stored to the output file. The same also holds when loading a workspace; the existing data will be cleared # and replaced with the data recovered from the file (if any). self._workspace_metadata_storage = WorkspaceMetadataStorage( ) # type: WorkspaceMetadataStorage # Intended for keeping plugin workspace information that is only temporary. The information added in this structure # is NOT saved to and retrieved from workspace files. self._current_workspace_information = WorkspaceMetadataStorage( ) # type: WorkspaceMetadataStorage def getAPIVersion(self) -> "Version": return self._api_version def getWorkspaceMetadataStorage(self) -> WorkspaceMetadataStorage: return self._workspace_metadata_storage def getCurrentWorkspaceInformation(self) -> WorkspaceMetadataStorage: return self._current_workspace_information # Adds the command line options that can be parsed by the command line parser. # Can be overridden to add additional command line options to the parser. def addCommandLineOptions(self) -> None: self._cli_parser.add_argument("--version", action="version", version="%(prog)s version: {0}".format( self._version)) self._cli_parser.add_argument( "--external-backend", action="store_true", default=False, help= "Use an externally started backend instead of starting it automatically. This is a debug feature to make it possible to run the engine with debug options enabled." ) self._cli_parser.add_argument('--headless', action='store_true', default=False, help="Hides all GUI elements.") self._cli_parser.add_argument( "--debug", action="store_true", default=False, help="Turn on the debug mode by setting this option.") def parseCliOptions(self) -> None: self._cli_args = self._cli_parser.parse_args() self._is_headless = self._cli_args.headless self._is_debug_mode = self._cli_args.debug or self._is_debug_mode self._use_external_backend = self._cli_args.external_backend # Performs initialization that must be done before start. def initialize(self) -> None: Logger.log("d", "Initializing %s", self._app_display_name) Logger.log("d", "App Version %s", self._version) Logger.log("d", "Api Version %s", self._api_version) Logger.log("d", "Build type %s", self._build_type or "None") # For Ubuntu Unity this makes Qt use its own menu bar rather than pass it on to Unity. os.putenv("UBUNTU_MENUPROXY", "0") # Custom signal handling Signal._app = self Signal._signalQueue = self # Initialize Resources. Set the application name and version here because we can only know the actual info # after the __init__() has been called. Resources.ApplicationIdentifier = self._app_name Resources.ApplicationVersion = self._version app_root = os.path.abspath( os.path.join(os.path.dirname(sys.executable))) Resources.addSecureSearchPath( os.path.join(app_root, "share", "uranium", "resources")) Resources.addSecureSearchPath( os.path.join(os.path.dirname(sys.executable), "resources")) Resources.addSecureSearchPath( os.path.join(self._app_install_dir, "share", "uranium", "resources")) Resources.addSecureSearchPath( os.path.join(self._app_install_dir, "Resources", "uranium", "resources")) Resources.addSecureSearchPath( os.path.join(self._app_install_dir, "Resources", self._app_name, "resources")) if not hasattr(sys, "frozen"): Resources.addSearchPath( os.path.join(os.path.abspath(os.path.dirname(__file__)), "..", "resources")) # local Conan cache Resources.addSearchPath( os.path.join(os.path.abspath(os.path.dirname(__file__)), "..", "..", "resources")) Resources.addSearchPath( os.path.join(os.path.abspath(os.path.dirname(__file__)), "..", "..", "plugins")) # venv site-packages Resources.addSearchPath( os.path.join(os.path.dirname(sys.executable), "..", "share", "uranium", "resources")) i18nCatalog.setApplication(self) PluginRegistry.addType("backend", self.setBackend) PluginRegistry.addType("logger", Logger.addLogger) PluginRegistry.addType("extension", self.addExtension) PluginRegistry.addType("file_provider", self.addFileProvider) self._preferences = Preferences() self._preferences.addPreference("general/language", self._default_language) self._preferences.addPreference("general/visible_settings", "") self._preferences.addPreference("general/plugins_to_remove", "") self._preferences.addPreference("general/disabled_plugins", "") self._controller = Controller(self) self._output_device_manager = OutputDeviceManager() self._operation_stack = OperationStack(self._controller) self._plugin_registry = PluginRegistry(self) self._plugin_registry.addPluginLocation( os.path.join(app_root, "share", "uranium", "plugins")) self._plugin_registry.addPluginLocation( os.path.join(app_root, "share", "cura", "plugins")) self._plugin_registry.addPluginLocation( os.path.join(self._app_install_dir, "lib", "uranium")) self._plugin_registry.addPluginLocation( os.path.join(self._app_install_dir, "lib64", "uranium")) self._plugin_registry.addPluginLocation( os.path.join(self._app_install_dir, "lib32", "uranium")) self._plugin_registry.addPluginLocation( os.path.join(os.path.dirname(sys.executable), "plugins")) self._plugin_registry.addPluginLocation( os.path.join(self._app_install_dir, "Resources", "uranium", "plugins")) self._plugin_registry.addPluginLocation( os.path.join(self._app_install_dir, "Resources", self._app_name, "plugins")) # Locally installed plugins local_path = os.path.join( Resources.getStoragePath(Resources.Resources), "plugins") # Ensure the local plugins directory exists try: os.makedirs(local_path) except OSError: pass self._plugin_registry.addPluginLocation(local_path) if not hasattr(sys, "frozen"): self._plugin_registry.addPluginLocation( os.path.join(os.path.abspath(os.path.dirname(__file__)), "..", "plugins")) self._container_registry = self._container_registry_class(self) UM.Settings.InstanceContainer.setContainerRegistry( self._container_registry) UM.Settings.ContainerStack.setContainerRegistry( self._container_registry) self.showMessageSignal.connect(self.showMessage) self.hideMessageSignal.connect(self.hideMessage) def startSplashWindowPhase(self) -> None: pass def startPostSplashWindowPhase(self) -> None: pass # Indicates if we have just updated from an older application version. def hasJustUpdatedFromOldVersion(self) -> bool: return self._just_updated_from_old_version def run(self): """Run the main event loop. This method should be re-implemented by subclasses to start the main event loop. :exception NotImplementedError: """ self.addCommandLineOptions() self.parseCliOptions() self.initialize() self.startSplashWindowPhase() self.startPostSplashWindowPhase() def getContainerRegistry(self) -> ContainerRegistry: return self._container_registry def getApplicationLockFilename(self) -> str: """Get the lock filename""" return self._config_lock_filename applicationShuttingDown = Signal() """Emitted when the application window was closed and we need to shut down the application""" showMessageSignal = Signal() hideMessageSignal = Signal() globalContainerStackChanged = Signal() workspaceLoaded = Signal() def setGlobalContainerStack(self, stack: Optional["ContainerStack"]) -> None: if self._global_container_stack != stack: self._global_container_stack = stack self.globalContainerStackChanged.emit() def getGlobalContainerStack(self) -> Optional["ContainerStack"]: return self._global_container_stack def hideMessage(self, message: Message) -> None: raise NotImplementedError def showMessage(self, message: Message) -> None: raise NotImplementedError def showToastMessage(self, title: str, message: str) -> None: raise NotImplementedError def getVersion(self) -> str: """Get the version of the application""" return self._version def getBuildType(self) -> str: """Get the build type of the application""" return self._build_type def getIsDebugMode(self) -> bool: return self._is_debug_mode def getIsHeadLess(self) -> bool: return self._is_headless def getUseExternalBackend(self) -> bool: return self._use_external_backend visibleMessageAdded = Signal() def hideMessageById(self, message_id: int) -> None: """Hide message by ID (as provided by built-in id function)""" # If a user and the application tries to close same message dialog simultaneously, message_id could become an empty # string, and then the application will raise an error when trying to do "int(message_id)". # So we check the message_id here. if not message_id: return found_message = None with self._message_lock: for message in self._visible_messages: if id(message) == int(message_id): found_message = message if found_message is not None: self.hideMessageSignal.emit(found_message) visibleMessageRemoved = Signal() def getVisibleMessages(self) -> List[Message]: """Get list of all visible messages""" with self._message_lock: return self._visible_messages def _loadPlugins(self) -> None: """Function that needs to be overridden by child classes with a list of plugins it needs.""" pass def getApplicationName(self) -> str: """Get name of the application. :returns: app_name """ return self._app_name def getApplicationDisplayName(self) -> str: return self._app_display_name def getPreferences(self) -> Preferences: """Get the preferences. :return: preferences """ return self._preferences def savePreferences(self) -> None: if self._preferences_filename: self._preferences.writeToFile(self._preferences_filename) else: Logger.log("i", "Preferences filename not set. Unable to save file.") def getApplicationLanguage(self) -> str: """Get the currently used IETF language tag. The returned tag is during runtime used to translate strings. :returns: Language tag. """ language = os.getenv("URANIUM_LANGUAGE") if not language: language = self._preferences.getValue("general/language") if not language: language = os.getenv("LANGUAGE") if not language: language = self._default_language return language def getRequiredPlugins(self) -> List[str]: """Application has a list of plugins that it *must* have. If it does not have these, it cannot function. These plugins can not be disabled in any way. """ return self._required_plugins def setRequiredPlugins(self, plugin_names: List[str]) -> None: """Set the plugins that the application *must* have in order to function. :param plugin_names: List of strings with the names of the required plugins """ self._required_plugins = plugin_names def setBackend(self, backend: "Backend") -> None: """Set the backend of the application (the program that does the heavy lifting).""" self._backend = backend def getBackend(self) -> "Backend": """Get the backend of the application (the program that does the heavy lifting). :returns: Backend """ return self._backend def getPluginRegistry(self) -> PluginRegistry: """Get the PluginRegistry of this application. :returns: PluginRegistry """ return self._plugin_registry def getController(self) -> Controller: """Get the Controller of this application. :returns: Controller """ return self._controller def getOperationStack(self) -> OperationStack: return self._operation_stack def getOutputDeviceManager(self) -> OutputDeviceManager: return self._output_device_manager def getRenderer(self) -> Renderer: """Return an application-specific Renderer object. :exception NotImplementedError """ raise NotImplementedError( "getRenderer must be implemented by subclasses.") def functionEvent(self, event: CallFunctionEvent) -> None: """Post a function event onto the event loop. This takes a CallFunctionEvent object and puts it into the actual event loop. :exception NotImplementedError """ raise NotImplementedError( "functionEvent must be implemented by subclasses.") def callLater(self, func: Callable[..., Any], *args, **kwargs) -> None: """Call a function the next time the event loop runs. You can't get the result of this function directly. It won't block. :param func: The function to call. :param args: The positional arguments to pass to the function. :param kwargs: The keyword arguments to pass to the function. """ event = CallFunctionEvent(func, args, kwargs) self.functionEvent(event) def getMainThread(self) -> threading.Thread: """Get the application's main thread.""" return self._main_thread def addExtension(self, extension: "Extension") -> None: self._extensions.append(extension) def getExtensions(self) -> List["Extension"]: return self._extensions def addFileProvider(self, file_provider: "FileProvider") -> None: self._file_providers.append(file_provider) def getFileProviders(self) -> List["FileProvider"]: return self._file_providers # Returns the path to the folder of the app itself, e.g.: '/root/blah/programs/Cura'. @staticmethod def getAppFolderPrefix() -> str: if "python" in os.path.basename(sys.executable): executable = sys.argv[0] else: executable = sys.executable try: return os.path.dirname(os.path.realpath(executable)) except EnvironmentError: # Symlinks can't be dereferenced. return os.path.dirname(executable) # Returns the path to the folder the app is installed _in_, e.g.: '/root/blah/programs' @staticmethod def getInstallPrefix() -> str: return os.path.abspath( os.path.join(Application.getAppFolderPrefix(), "..")) __instance = None # type: Application @classmethod def getInstance(cls, *args, **kwargs) -> "Application": return cls.__instance
def read(self, file_name): archive = zipfile.ZipFile(file_name, "r") cura_file_names = [name for name in archive.namelist() if name.startswith("Cura/")] # Create a shadow copy of the preferences (we don't want all of the preferences, but we do want to re-use its # parsing code. temp_preferences = Preferences() temp_preferences.readFromFile(io.TextIOWrapper(archive.open("Cura/preferences.cfg"))) # We need to wrap it, else the archive parser breaks. # Copy a number of settings from the temp preferences to the global global_preferences = Preferences.getInstance() visible_settings = temp_preferences.getValue("general/visible_settings") if visible_settings is None: Logger.log("w", "Workspace did not contain visible settings. Leaving visibility unchanged") else: global_preferences.setValue("general/visible_settings", visible_settings) categories_expanded = temp_preferences.getValue("cura/categories_expanded") if categories_expanded is None: Logger.log("w", "Workspace did not contain expanded categories. Leaving them unchanged") else: global_preferences.setValue("cura/categories_expanded", categories_expanded) Application.getInstance().expandedCategoriesChanged.emit() # Notify the GUI of the change self._id_mapping = {} # We don't add containers right away, but wait right until right before the stack serialization. # We do this so that if something goes wrong, it's easier to clean up. containers_to_add = [] global_stack_file, extruder_stack_files = self._determineGlobalAndExtruderStackFiles(file_name, cura_file_names) global_stack = None extruder_stacks = [] extruder_stacks_added = [] container_stacks_added = [] containers_added = [] global_stack_id_original = self._stripFileToId(global_stack_file) global_stack_id_new = global_stack_id_original global_stack_need_rename = False extruder_stack_id_map = {} # new and old ExtruderStack IDs map if self._resolve_strategies["machine"] == "new": # We need a new id if the id already exists if self._container_registry.findContainerStacks(id = global_stack_id_original): global_stack_id_new = self.getNewId(global_stack_id_original) global_stack_need_rename = True for each_extruder_stack_file in extruder_stack_files: old_container_id = self._stripFileToId(each_extruder_stack_file) new_container_id = old_container_id if self._container_registry.findContainerStacks(id = old_container_id): # get a new name for this extruder new_container_id = self.getNewId(old_container_id) extruder_stack_id_map[old_container_id] = new_container_id # TODO: For the moment we use pretty naive existence checking. If the ID is the same, we assume in quite a few # TODO: cases that the container loaded is the same (most notable in materials & definitions). # TODO: It might be possible that we need to add smarter checking in the future. Logger.log("d", "Workspace loading is checking definitions...") # Get all the definition files & check if they exist. If not, add them. definition_container_files = [name for name in cura_file_names if name.endswith(self._definition_container_suffix)] for definition_container_file in definition_container_files: container_id = self._stripFileToId(definition_container_file) definitions = self._container_registry.findDefinitionContainers(id = container_id) if not definitions: definition_container = DefinitionContainer(container_id) definition_container.deserialize(archive.open(definition_container_file).read().decode("utf-8")) self._container_registry.addContainer(definition_container) Job.yieldThread() Logger.log("d", "Workspace loading is checking materials...") material_containers = [] # Get all the material files and check if they exist. If not, add them. xml_material_profile = self._getXmlProfileClass() if self._material_container_suffix is None: self._material_container_suffix = ContainerRegistry.getMimeTypeForContainer(xml_material_profile).suffixes[0] if xml_material_profile: material_container_files = [name for name in cura_file_names if name.endswith(self._material_container_suffix)] for material_container_file in material_container_files: container_id = self._stripFileToId(material_container_file) materials = self._container_registry.findInstanceContainers(id = container_id) if not materials: material_container = xml_material_profile(container_id) material_container.deserialize(archive.open(material_container_file).read().decode("utf-8")) containers_to_add.append(material_container) else: material_container = materials[0] if not material_container.isReadOnly(): # Only create new materials if they are not read only. if self._resolve_strategies["material"] == "override": material_container.deserialize(archive.open(material_container_file).read().decode("utf-8")) elif self._resolve_strategies["material"] == "new": # Note that we *must* deserialize it with a new ID, as multiple containers will be # auto created & added. material_container = xml_material_profile(self.getNewId(container_id)) material_container.deserialize(archive.open(material_container_file).read().decode("utf-8")) containers_to_add.append(material_container) material_containers.append(material_container) Job.yieldThread() Logger.log("d", "Workspace loading is checking instance containers...") # Get quality_changes and user profiles saved in the workspace instance_container_files = [name for name in cura_file_names if name.endswith(self._instance_container_suffix)] user_instance_containers = [] quality_and_definition_changes_instance_containers = [] for instance_container_file in instance_container_files: container_id = self._stripFileToId(instance_container_file) serialized = archive.open(instance_container_file).read().decode("utf-8") # HACK! we ignore "quality" and "variant" instance containers! parser = configparser.ConfigParser() parser.read_string(serialized) if not parser.has_option("metadata", "type"): Logger.log("w", "Cannot find metadata/type in %s, ignoring it", instance_container_file) continue if parser.get("metadata", "type") in self._ignored_instance_container_types: continue instance_container = InstanceContainer(container_id) # Deserialize InstanceContainer by converting read data from bytes to string instance_container.deserialize(serialized) container_type = instance_container.getMetaDataEntry("type") Job.yieldThread() # # IMPORTANT: # If an instance container (or maybe other type of container) exists, and user chooses "Create New", # we need to rename this container and all references to it, and changing those references are VERY # HARD. # if container_type in self._ignored_instance_container_types: # Ignore certain instance container types Logger.log("w", "Ignoring instance container [%s] with type [%s]", container_id, container_type) continue elif container_type == "user": # Check if quality changes already exists. user_containers = self._container_registry.findInstanceContainers(id = container_id) if not user_containers: containers_to_add.append(instance_container) else: if self._resolve_strategies["machine"] == "override" or self._resolve_strategies["machine"] is None: instance_container = user_containers[0] instance_container.deserialize(archive.open(instance_container_file).read().decode("utf-8")) instance_container.setDirty(True) elif self._resolve_strategies["machine"] == "new": # The machine is going to get a spiffy new name, so ensure that the id's of user settings match. old_extruder_id = instance_container.getMetaDataEntry("extruder", None) if old_extruder_id: new_extruder_id = extruder_stack_id_map[old_extruder_id] new_id = new_extruder_id + "_current_settings" instance_container._id = new_id instance_container.setName(new_id) instance_container.setMetaDataEntry("extruder", new_extruder_id) containers_to_add.append(instance_container) machine_id = instance_container.getMetaDataEntry("machine", None) if machine_id: new_machine_id = self.getNewId(machine_id) new_id = new_machine_id + "_current_settings" instance_container._id = new_id instance_container.setName(new_id) instance_container.setMetaDataEntry("machine", new_machine_id) containers_to_add.append(instance_container) user_instance_containers.append(instance_container) elif container_type in ("quality_changes", "definition_changes"): # Check if quality changes already exists. changes_containers = self._container_registry.findInstanceContainers(id = container_id) if not changes_containers: # no existing containers with the same ID, so we can safely add the new one containers_to_add.append(instance_container) else: # we have found existing container with the same ID, so we need to resolve according to the # selected strategy. if self._resolve_strategies[container_type] == "override": instance_container = changes_containers[0] instance_container.deserialize(archive.open(instance_container_file).read().decode("utf-8")) instance_container.setDirty(True) elif self._resolve_strategies[container_type] == "new": # TODO: how should we handle the case "new" for quality_changes and definition_changes? instance_container.setName(self._container_registry.uniqueName(instance_container.getName())) new_changes_container_id = self.getNewId(instance_container.getId()) instance_container._id = new_changes_container_id # TODO: we don't know the following is correct or not, need to verify # AND REFACTOR!!! if self._resolve_strategies["machine"] == "new": # The machine is going to get a spiffy new name, so ensure that the id's of user settings match. old_extruder_id = instance_container.getMetaDataEntry("extruder", None) if old_extruder_id: new_extruder_id = extruder_stack_id_map[old_extruder_id] instance_container.setMetaDataEntry("extruder", new_extruder_id) machine_id = instance_container.getMetaDataEntry("machine", None) if machine_id: new_machine_id = self.getNewId(machine_id) instance_container.setMetaDataEntry("machine", new_machine_id) containers_to_add.append(instance_container) elif self._resolve_strategies[container_type] is None: # The ID already exists, but nothing in the values changed, so do nothing. pass quality_and_definition_changes_instance_containers.append(instance_container) else: existing_container = self._container_registry.findInstanceContainers(id = container_id) if not existing_container: containers_to_add.append(instance_container) if global_stack_need_rename: if instance_container.getMetaDataEntry("machine"): instance_container.setMetaDataEntry("machine", global_stack_id_new) # Add all the containers right before we try to add / serialize the stack for container in containers_to_add: self._container_registry.addContainer(container) container.setDirty(True) containers_added.append(container) # Get the stack(s) saved in the workspace. Logger.log("d", "Workspace loading is checking stacks containers...") # -- # load global stack file try: if self._resolve_strategies["machine"] == "override": container_stacks = self._container_registry.findContainerStacks(id = global_stack_id_original) stack = container_stacks[0] # HACK # There is a machine, check if it has authentication data. If so, keep that data. network_authentication_id = container_stacks[0].getMetaDataEntry("network_authentication_id") network_authentication_key = container_stacks[0].getMetaDataEntry("network_authentication_key") container_stacks[0].deserialize(archive.open(global_stack_file).read().decode("utf-8")) if network_authentication_id: container_stacks[0].addMetaDataEntry("network_authentication_id", network_authentication_id) if network_authentication_key: container_stacks[0].addMetaDataEntry("network_authentication_key", network_authentication_key) elif self._resolve_strategies["machine"] == "new": # create a new global stack stack = GlobalStack(global_stack_id_new) # Deserialize stack by converting read data from bytes to string stack.deserialize(archive.open(global_stack_file).read().decode("utf-8")) # Ensure a unique ID and name stack._id = global_stack_id_new # Extruder stacks are "bound" to a machine. If we add the machine as a new one, the id of the # bound machine also needs to change. if stack.getMetaDataEntry("machine", None): stack.setMetaDataEntry("machine", global_stack_id_new) # Only machines need a new name, stacks may be non-unique stack.setName(global_stack_id_new) container_stacks_added.append(stack) self._container_registry.addContainer(stack) containers_added.append(stack) else: Logger.log("e", "Resolve strategy of %s for machine is not supported", self._resolve_strategies["machine"]) global_stack = stack Job.yieldThread() except: Logger.logException("w", "We failed to serialize the stack. Trying to clean up.") # Something went really wrong. Try to remove any data that we added. for container in containers_added: self._container_registry.removeContainer(container.getId()) return # -- # load extruder stack files try: for extruder_stack_file in extruder_stack_files: container_id = self._stripFileToId(extruder_stack_file) extruder_file_content = archive.open(extruder_stack_file, "r").read().decode("utf-8") if self._resolve_strategies["machine"] == "override": # deserialize new extruder stack over the current ones stack = self._overrideExtruderStack(global_stack, extruder_file_content) elif self._resolve_strategies["machine"] == "new": new_id = extruder_stack_id_map[container_id] stack = ExtruderStack(new_id) # HACK: the global stack can have a new name, so we need to make sure that this extruder stack # references to the new name instead of the old one. Normally, this can be done after # deserialize() by setting the metadata, but in the case of ExtruderStack, deserialize() # also does addExtruder() to its machine stack, so we have to make sure that it's pointing # to the right machine BEFORE deserialization. extruder_config = configparser.ConfigParser() extruder_config.read_string(extruder_file_content) extruder_config.set("metadata", "machine", global_stack_id_new) tmp_string_io = io.StringIO() extruder_config.write(tmp_string_io) extruder_file_content = tmp_string_io.getvalue() stack.deserialize(extruder_file_content) # Ensure a unique ID and name stack._id = new_id self._container_registry.addContainer(stack) extruder_stacks_added.append(stack) containers_added.append(stack) else: Logger.log("w", "Unknown resolve strategy: %s", self._resolve_strategies["machine"]) extruder_stacks.append(stack) except: Logger.logException("w", "We failed to serialize the stack. Trying to clean up.") # Something went really wrong. Try to remove any data that we added. for container in containers_added: self._container_registry.removeContainer(container.getId()) return # # Replacing the old containers if resolve is "new". # When resolve is "new", some containers will get renamed, so all the other containers that reference to those # MUST get updated too. # if self._resolve_strategies["machine"] == "new": # A new machine was made, but it was serialized with the wrong user container. Fix that now. for container in user_instance_containers: # replacing the container ID for user instance containers for the extruders extruder_id = container.getMetaDataEntry("extruder", None) if extruder_id: for extruder in extruder_stacks: if extruder.getId() == extruder_id: extruder.userChanges = container continue # replacing the container ID for user instance containers for the machine machine_id = container.getMetaDataEntry("machine", None) if machine_id: if global_stack.getId() == machine_id: global_stack.userChanges = container continue for changes_container_type in ("quality_changes", "definition_changes"): if self._resolve_strategies[changes_container_type] == "new": # Quality changes needs to get a new ID, added to registry and to the right stacks for each_changes_container in quality_and_definition_changes_instance_containers: # NOTE: The renaming and giving new IDs are possibly redundant because they are done in the # instance container loading part. new_id = each_changes_container.getId() # Find the old (current) changes container in the global stack if changes_container_type == "quality_changes": old_container = global_stack.qualityChanges elif changes_container_type == "definition_changes": old_container = global_stack.definitionChanges # sanity checks # NOTE: The following cases SHOULD NOT happen!!!! if not old_container: Logger.log("e", "We try to get [%s] from the global stack [%s] but we got None instead!", changes_container_type, global_stack.getId()) # Replace the quality/definition changes container if it's in the GlobalStack # NOTE: we can get an empty container here, but the IDs will not match, # so this comparison is fine. if self._id_mapping.get(old_container.getId()) == new_id: if changes_container_type == "quality_changes": global_stack.qualityChanges = each_changes_container elif changes_container_type == "definition_changes": global_stack.definitionChanges = each_changes_container continue # Replace the quality/definition changes container if it's in one of the ExtruderStacks for each_extruder_stack in extruder_stacks: changes_container = None if changes_container_type == "quality_changes": changes_container = each_extruder_stack.qualityChanges elif changes_container_type == "definition_changes": changes_container = each_extruder_stack.definitionChanges # sanity checks # NOTE: The following cases SHOULD NOT happen!!!! if not changes_container: Logger.log("e", "We try to get [%s] from the extruder stack [%s] but we got None instead!", changes_container_type, each_extruder_stack.getId()) # NOTE: we can get an empty container here, but the IDs will not match, # so this comparison is fine. if self._id_mapping.get(changes_container.getId()) == new_id: if changes_container_type == "quality_changes": each_extruder_stack.qualityChanges = each_changes_container elif changes_container_type == "definition_changes": each_extruder_stack.definitionChanges = each_changes_container if self._resolve_strategies["material"] == "new": for each_material in material_containers: old_material = global_stack.material # check if the old material container has been renamed to this material container ID # if the container hasn't been renamed, we do nothing. new_id = self._id_mapping.get(old_material.getId()) if new_id is None or new_id != each_material.getId(): continue if old_material.getId() in self._id_mapping: global_stack.material = each_material for each_extruder_stack in extruder_stacks: old_material = each_extruder_stack.material # check if the old material container has been renamed to this material container ID # if the container hasn't been renamed, we do nothing. new_id = self._id_mapping.get(old_material.getId()) if new_id is None or new_id != each_material.getId(): continue if old_material.getId() in self._id_mapping: each_extruder_stack.material = each_material if extruder_stacks: for stack in extruder_stacks: ExtruderManager.getInstance().registerExtruder(stack, global_stack.getId()) else: # Machine has no extruders, but it needs to be registered with the extruder manager. ExtruderManager.getInstance().registerExtruder(None, global_stack.getId()) Logger.log("d", "Workspace loading is notifying rest of the code of changes...") if self._resolve_strategies["machine"] == "new": for stack in extruder_stacks: stack.setNextStack(global_stack) stack.containersChanged.emit(stack.getTop()) # Actually change the active machine. Application.getInstance().setGlobalContainerStack(global_stack) # Notify everything/one that is to notify about changes. global_stack.containersChanged.emit(global_stack.getTop()) # Load all the nodes / meshdata of the workspace nodes = self._3mf_mesh_reader.read(file_name) if nodes is None: nodes = [] return nodes
def preRead(self, file_name, show_dialog=True, *args, **kwargs): self._3mf_mesh_reader = Application.getInstance().getMeshFileHandler( ).getReaderForFile(file_name) if self._3mf_mesh_reader and self._3mf_mesh_reader.preRead( file_name) == WorkspaceReader.PreReadResult.accepted: pass else: Logger.log( "w", "Could not find reader that was able to read the scene data for 3MF workspace" ) return WorkspaceReader.PreReadResult.failed machine_name = "" machine_type = "" variant_type_name = i18n_catalog.i18nc("@label", "Nozzle") num_extruders = 0 # Check if there are any conflicts, so we can ask the user. archive = zipfile.ZipFile(file_name, "r") cura_file_names = [ name for name in archive.namelist() if name.startswith("Cura/") ] container_stack_files = [ name for name in cura_file_names if name.endswith(self._container_stack_suffix) ] self._resolve_strategies = { "machine": None, "quality_changes": None, "material": None } machine_conflict = False quality_changes_conflict = False for container_stack_file in container_stack_files: container_id = self._stripFileToId(container_stack_file) serialized = archive.open(container_stack_file).read().decode( "utf-8") if machine_name == "": machine_name = self._getMachineNameFromSerializedStack( serialized) stacks = self._container_registry.findContainerStacks( id=container_id) if stacks: # Check if there are any changes at all in any of the container stacks. id_list = self._getContainerIdListFromSerialized(serialized) for index, container_id in enumerate(id_list): if stacks[0].getContainer(index).getId() != container_id: machine_conflict = True Job.yieldThread() definition_container_files = [ name for name in cura_file_names if name.endswith(self._definition_container_suffix) ] for definition_container_file in definition_container_files: container_id = self._stripFileToId(definition_container_file) definitions = self._container_registry.findDefinitionContainers( id=container_id) if not definitions: definition_container = DefinitionContainer(container_id) definition_container.deserialize( archive.open(definition_container_file).read().decode( "utf-8")) else: definition_container = definitions[0] if definition_container.getMetaDataEntry("type") != "extruder": machine_type = definition_container.getName() variant_type_name = definition_container.getMetaDataEntry( "variants_name", variant_type_name) else: num_extruders += 1 Job.yieldThread() if num_extruders == 0: num_extruders = 1 # No extruder stacks found, which means there is one extruder extruders = num_extruders * [""] material_labels = [] material_conflict = False xml_material_profile = self._getXmlProfileClass() if self._material_container_suffix is None: self._material_container_suffix = ContainerRegistry.getMimeTypeForContainer( xml_material_profile).preferredSuffix if xml_material_profile: material_container_files = [ name for name in cura_file_names if name.endswith(self._material_container_suffix) ] for material_container_file in material_container_files: container_id = self._stripFileToId(material_container_file) materials = self._container_registry.findInstanceContainers( id=container_id) material_labels.append( self._getMaterialLabelFromSerialized( archive.open(material_container_file).read().decode( "utf-8"))) if materials and not materials[0].isReadOnly( ): # Only non readonly materials can be in conflict material_conflict = True Job.yieldThread() # Check if any quality_changes instance container is in conflict. instance_container_files = [ name for name in cura_file_names if name.endswith(self._instance_container_suffix) ] quality_name = "" quality_type = "" num_settings_overriden_by_quality_changes = 0 # How many settings are changed by the quality changes num_user_settings = 0 for instance_container_file in instance_container_files: container_id = self._stripFileToId(instance_container_file) instance_container = InstanceContainer(container_id) # Deserialize InstanceContainer by converting read data from bytes to string instance_container.deserialize( archive.open(instance_container_file).read().decode("utf-8")) container_type = instance_container.getMetaDataEntry("type") if container_type == "quality_changes": quality_name = instance_container.getName() num_settings_overriden_by_quality_changes += len( instance_container._instances) # Check if quality changes already exists. quality_changes = self._container_registry.findInstanceContainers( id=container_id) if quality_changes: # Check if there really is a conflict by comparing the values if quality_changes[0] != instance_container: quality_changes_conflict = True elif container_type == "quality": # If the quality name is not set (either by quality or changes, set it now) # Quality changes should always override this (as they are "on top") if quality_name == "": quality_name = instance_container.getName() quality_type = instance_container.getName() elif container_type == "user": num_user_settings += len(instance_container._instances) Job.yieldThread() num_visible_settings = 0 try: temp_preferences = Preferences() temp_preferences.readFromFile( io.TextIOWrapper(archive.open("Cura/preferences.cfg")) ) # We need to wrap it, else the archive parser breaks. visible_settings_string = temp_preferences.getValue( "general/visible_settings") if visible_settings_string is not None: num_visible_settings = len(visible_settings_string.split(";")) active_mode = temp_preferences.getValue("cura/active_mode") if not active_mode: active_mode = Preferences.getInstance().getValue( "cura/active_mode") except KeyError: # If there is no preferences file, it's not a workspace, so notify user of failure. Logger.log("w", "File %s is not a valid workspace.", file_name) return WorkspaceReader.PreReadResult.failed # In case we use preRead() to check if a file is a valid project file, we don't want to show a dialog. if not show_dialog: return WorkspaceReader.PreReadResult.accepted # Show the dialog, informing the user what is about to happen. self._dialog.setMachineConflict(machine_conflict) self._dialog.setQualityChangesConflict(quality_changes_conflict) self._dialog.setMaterialConflict(material_conflict) self._dialog.setNumVisibleSettings(num_visible_settings) self._dialog.setQualityName(quality_name) self._dialog.setQualityType(quality_type) self._dialog.setNumSettingsOverridenByQualityChanges( num_settings_overriden_by_quality_changes) self._dialog.setNumUserSettings(num_user_settings) self._dialog.setActiveMode(active_mode) self._dialog.setMachineName(machine_name) self._dialog.setMaterialLabels(material_labels) self._dialog.setMachineType(machine_type) self._dialog.setExtruders(extruders) self._dialog.setVariantType(variant_type_name) self._dialog.setHasObjectsOnPlate( Application.getInstance().platformActivity) self._dialog.show() # Block until the dialog is closed. self._dialog.waitForClose() if self._dialog.getResult() == {}: return WorkspaceReader.PreReadResult.cancelled self._resolve_strategies = self._dialog.getResult() return WorkspaceReader.PreReadResult.accepted
def read(self, file_name): archive = zipfile.ZipFile(file_name, "r") cura_file_names = [name for name in archive.namelist() if name.startswith("Cura/")] # Create a shadow copy of the preferences (we don't want all of the preferences, but we do want to re-use its # parsing code. temp_preferences = Preferences() temp_preferences.readFromFile(io.TextIOWrapper(archive.open("Cura/preferences.cfg"))) # We need to wrap it, else the archive parser breaks. # Copy a number of settings from the temp preferences to the global global_preferences = Preferences.getInstance() visible_settings = temp_preferences.getValue("general/visible_settings") if visible_settings is None: Logger.log("w", "Workspace did not contain visible settings. Leaving visibility unchanged") else: global_preferences.setValue("general/visible_settings", visible_settings) categories_expanded = temp_preferences.getValue("cura/categories_expanded") if categories_expanded is None: Logger.log("w", "Workspace did not contain expanded categories. Leaving them unchanged") else: global_preferences.setValue("cura/categories_expanded", categories_expanded) Application.getInstance().expandedCategoriesChanged.emit() # Notify the GUI of the change self._id_mapping = {} # We don't add containers right away, but wait right until right before the stack serialization. # We do this so that if something goes wrong, it's easier to clean up. containers_to_add = [] global_stack_file, extruder_stack_files = self._determineGlobalAndExtruderStackFiles(file_name, cura_file_names) global_stack = None extruder_stacks = [] extruder_stacks_added = [] container_stacks_added = [] containers_added = [] global_stack_id_original = self._stripFileToId(global_stack_file) global_stack_id_new = global_stack_id_original global_stack_need_rename = False extruder_stack_id_map = {} # new and old ExtruderStack IDs map if self._resolve_strategies["machine"] == "new": # We need a new id if the id already exists if self._container_registry.findContainerStacks(id = global_stack_id_original): global_stack_id_new = self.getNewId(global_stack_id_original) global_stack_need_rename = True for each_extruder_stack_file in extruder_stack_files: old_container_id = self._stripFileToId(each_extruder_stack_file) new_container_id = old_container_id if self._container_registry.findContainerStacks(id = old_container_id): # get a new name for this extruder new_container_id = self.getNewId(old_container_id) extruder_stack_id_map[old_container_id] = new_container_id # TODO: For the moment we use pretty naive existence checking. If the ID is the same, we assume in quite a few # TODO: cases that the container loaded is the same (most notable in materials & definitions). # TODO: It might be possible that we need to add smarter checking in the future. Logger.log("d", "Workspace loading is checking definitions...") # Get all the definition files & check if they exist. If not, add them. definition_container_files = [name for name in cura_file_names if name.endswith(self._definition_container_suffix)] for definition_container_file in definition_container_files: container_id = self._stripFileToId(definition_container_file) definitions = self._container_registry.findDefinitionContainers(id = container_id) if not definitions: definition_container = DefinitionContainer(container_id) definition_container.deserialize(archive.open(definition_container_file).read().decode("utf-8")) self._container_registry.addContainer(definition_container) Job.yieldThread() Logger.log("d", "Workspace loading is checking materials...") material_containers = [] # Get all the material files and check if they exist. If not, add them. xml_material_profile = self._getXmlProfileClass() if self._material_container_suffix is None: self._material_container_suffix = ContainerRegistry.getMimeTypeForContainer(xml_material_profile).suffixes[0] if xml_material_profile: material_container_files = [name for name in cura_file_names if name.endswith(self._material_container_suffix)] for material_container_file in material_container_files: container_id = self._stripFileToId(material_container_file) materials = self._container_registry.findInstanceContainers(id = container_id) if not materials: material_container = xml_material_profile(container_id) material_container.deserialize(archive.open(material_container_file).read().decode("utf-8")) containers_to_add.append(material_container) else: material_container = materials[0] if not material_container.isReadOnly(): # Only create new materials if they are not read only. if self._resolve_strategies["material"] == "override": material_container.deserialize(archive.open(material_container_file).read().decode("utf-8")) elif self._resolve_strategies["material"] == "new": # Note that we *must* deserialize it with a new ID, as multiple containers will be # auto created & added. material_container = xml_material_profile(self.getNewId(container_id)) material_container.deserialize(archive.open(material_container_file).read().decode("utf-8")) containers_to_add.append(material_container) material_containers.append(material_container) Job.yieldThread() Logger.log("d", "Workspace loading is checking instance containers...") # Get quality_changes and user profiles saved in the workspace instance_container_files = [name for name in cura_file_names if name.endswith(self._instance_container_suffix)] user_instance_containers = [] quality_and_definition_changes_instance_containers = [] for instance_container_file in instance_container_files: container_id = self._stripFileToId(instance_container_file) serialized = archive.open(instance_container_file).read().decode("utf-8") # HACK! we ignore "quality" and "variant" instance containers! parser = configparser.ConfigParser() parser.read_string(serialized) if not parser.has_option("metadata", "type"): Logger.log("w", "Cannot find metadata/type in %s, ignoring it", instance_container_file) continue if parser.get("metadata", "type") in self._ignored_instance_container_types: continue instance_container = InstanceContainer(container_id) # Deserialize InstanceContainer by converting read data from bytes to string instance_container.deserialize(serialized) container_type = instance_container.getMetaDataEntry("type") Job.yieldThread() # # IMPORTANT: # If an instance container (or maybe other type of container) exists, and user chooses "Create New", # we need to rename this container and all references to it, and changing those references are VERY # HARD. # if container_type in self._ignored_instance_container_types: # Ignore certain instance container types Logger.log("w", "Ignoring instance container [%s] with type [%s]", container_id, container_type) continue elif container_type == "user": # Check if quality changes already exists. user_containers = self._container_registry.findInstanceContainers(id = container_id) if not user_containers: containers_to_add.append(instance_container) else: if self._resolve_strategies["machine"] == "override" or self._resolve_strategies["machine"] is None: instance_container = user_containers[0] instance_container.deserialize(archive.open(instance_container_file).read().decode("utf-8")) instance_container.setDirty(True) elif self._resolve_strategies["machine"] == "new": # The machine is going to get a spiffy new name, so ensure that the id's of user settings match. old_extruder_id = instance_container.getMetaDataEntry("extruder", None) if old_extruder_id: new_extruder_id = extruder_stack_id_map[old_extruder_id] new_id = new_extruder_id + "_current_settings" instance_container._id = new_id instance_container.setName(new_id) instance_container.setMetaDataEntry("extruder", new_extruder_id) containers_to_add.append(instance_container) machine_id = instance_container.getMetaDataEntry("machine", None) if machine_id: new_machine_id = self.getNewId(machine_id) new_id = new_machine_id + "_current_settings" instance_container._id = new_id instance_container.setName(new_id) instance_container.setMetaDataEntry("machine", new_machine_id) containers_to_add.append(instance_container) user_instance_containers.append(instance_container) elif container_type in ("quality_changes", "definition_changes"): # Check if quality changes already exists. changes_containers = self._container_registry.findInstanceContainers(id = container_id) if not changes_containers: # no existing containers with the same ID, so we can safely add the new one containers_to_add.append(instance_container) else: # we have found existing container with the same ID, so we need to resolve according to the # selected strategy. if self._resolve_strategies[container_type] == "override": instance_container = changes_containers[0] instance_container.deserialize(archive.open(instance_container_file).read().decode("utf-8")) instance_container.setDirty(True) elif self._resolve_strategies[container_type] == "new": # TODO: how should we handle the case "new" for quality_changes and definition_changes? instance_container.setName(self._container_registry.uniqueName(instance_container.getName())) new_changes_container_id = self.getNewId(instance_container.getId()) instance_container._id = new_changes_container_id # TODO: we don't know the following is correct or not, need to verify # AND REFACTOR!!! if self._resolve_strategies["machine"] == "new": # The machine is going to get a spiffy new name, so ensure that the id's of user settings match. old_extruder_id = instance_container.getMetaDataEntry("extruder", None) if old_extruder_id: new_extruder_id = extruder_stack_id_map[old_extruder_id] instance_container.setMetaDataEntry("extruder", new_extruder_id) machine_id = instance_container.getMetaDataEntry("machine", None) if machine_id: new_machine_id = self.getNewId(machine_id) instance_container.setMetaDataEntry("machine", new_machine_id) containers_to_add.append(instance_container) elif self._resolve_strategies[container_type] is None: # The ID already exists, but nothing in the values changed, so do nothing. pass quality_and_definition_changes_instance_containers.append(instance_container) else: existing_container = self._container_registry.findInstanceContainers(id = container_id) if not existing_container: containers_to_add.append(instance_container) if global_stack_need_rename: if instance_container.getMetaDataEntry("machine"): instance_container.setMetaDataEntry("machine", global_stack_id_new) # Add all the containers right before we try to add / serialize the stack for container in containers_to_add: self._container_registry.addContainer(container) container.setDirty(True) containers_added.append(container) # Get the stack(s) saved in the workspace. Logger.log("d", "Workspace loading is checking stacks containers...") # -- # load global stack file try: # Check if a stack by this ID already exists; container_stacks = self._container_registry.findContainerStacks(id = global_stack_id_original) if container_stacks: stack = container_stacks[0] if self._resolve_strategies["machine"] == "override": # TODO: HACK # There is a machine, check if it has authentication data. If so, keep that data. network_authentication_id = container_stacks[0].getMetaDataEntry("network_authentication_id") network_authentication_key = container_stacks[0].getMetaDataEntry("network_authentication_key") container_stacks[0].deserialize(archive.open(global_stack_file).read().decode("utf-8")) if network_authentication_id: container_stacks[0].addMetaDataEntry("network_authentication_id", network_authentication_id) if network_authentication_key: container_stacks[0].addMetaDataEntry("network_authentication_key", network_authentication_key) elif self._resolve_strategies["machine"] == "new": stack = GlobalStack(global_stack_id_new) stack.deserialize(archive.open(global_stack_file).read().decode("utf-8")) # Ensure a unique ID and name stack._id = global_stack_id_new # Extruder stacks are "bound" to a machine. If we add the machine as a new one, the id of the # bound machine also needs to change. if stack.getMetaDataEntry("machine", None): stack.setMetaDataEntry("machine", global_stack_id_new) # Only machines need a new name, stacks may be non-unique stack.setName(self._container_registry.uniqueName(stack.getName())) container_stacks_added.append(stack) self._container_registry.addContainer(stack) else: Logger.log("w", "Resolve strategy of %s for machine is not supported", self._resolve_strategies["machine"]) else: # no existing container stack, so we create a new one stack = GlobalStack(global_stack_id_new) # Deserialize stack by converting read data from bytes to string stack.deserialize(archive.open(global_stack_file).read().decode("utf-8")) container_stacks_added.append(stack) self._container_registry.addContainer(stack) containers_added.append(stack) global_stack = stack Job.yieldThread() except: Logger.logException("w", "We failed to serialize the stack. Trying to clean up.") # Something went really wrong. Try to remove any data that we added. for container in containers_added: self._container_registry.removeContainer(container.getId()) return # -- # load extruder stack files try: for index, extruder_stack_file in enumerate(extruder_stack_files): container_id = self._stripFileToId(extruder_stack_file) extruder_file_content = archive.open(extruder_stack_file, "r").read().decode("utf-8") container_stacks = self._container_registry.findContainerStacks(id = container_id) if container_stacks: # this container stack already exists, try to resolve stack = container_stacks[0] if self._resolve_strategies["machine"] == "override": # NOTE: This is the same code as those in the lower part # deserialize new extruder stack over the current ones stack = self._overrideExtruderStack(global_stack, extruder_file_content) elif self._resolve_strategies["machine"] == "new": # create a new extruder stack from this one new_id = extruder_stack_id_map[container_id] stack = ExtruderStack(new_id) # HACK: the global stack can have a new name, so we need to make sure that this extruder stack # references to the new name instead of the old one. Normally, this can be done after # deserialize() by setting the metadata, but in the case of ExtruderStack, deserialize() # also does addExtruder() to its machine stack, so we have to make sure that it's pointing # to the right machine BEFORE deserialization. extruder_config = configparser.ConfigParser() extruder_config.read_string(extruder_file_content) extruder_config.set("metadata", "machine", global_stack_id_new) tmp_string_io = io.StringIO() extruder_config.write(tmp_string_io) extruder_file_content = tmp_string_io.getvalue() stack.deserialize(extruder_file_content) # Ensure a unique ID and name stack._id = new_id self._container_registry.addContainer(stack) extruder_stacks_added.append(stack) containers_added.append(stack) else: # No extruder stack with the same ID can be found if self._resolve_strategies["machine"] == "override": # deserialize new extruder stack over the current ones stack = self._overrideExtruderStack(global_stack, extruder_file_content) elif self._resolve_strategies["machine"] == "new": # container not found, create a new one stack = ExtruderStack(container_id) # HACK: the global stack can have a new name, so we need to make sure that this extruder stack # references to the new name instead of the old one. Normally, this can be done after # deserialize() by setting the metadata, but in the case of ExtruderStack, deserialize() # also does addExtruder() to its machine stack, so we have to make sure that it's pointing # to the right machine BEFORE deserialization. extruder_config = configparser.ConfigParser() extruder_config.read_string(extruder_file_content) extruder_config.set("metadata", "machine", global_stack_id_new) tmp_string_io = io.StringIO() extruder_config.write(tmp_string_io) extruder_file_content = tmp_string_io.getvalue() stack.deserialize(extruder_file_content) self._container_registry.addContainer(stack) extruder_stacks_added.append(stack) containers_added.append(stack) else: Logger.log("w", "Unknown resolve strategy: %s" % str(self._resolve_strategies["machine"])) extruder_stacks.append(stack) except: Logger.logException("w", "We failed to serialize the stack. Trying to clean up.") # Something went really wrong. Try to remove any data that we added. for container in containers_added: self._container_registry.removeContainer(container.getId()) return # # Replacing the old containers if resolve is "new". # When resolve is "new", some containers will get renamed, so all the other containers that reference to those # MUST get updated too. # if self._resolve_strategies["machine"] == "new": # A new machine was made, but it was serialized with the wrong user container. Fix that now. for container in user_instance_containers: # replacing the container ID for user instance containers for the extruders extruder_id = container.getMetaDataEntry("extruder", None) if extruder_id: for extruder in extruder_stacks: if extruder.getId() == extruder_id: extruder.userChanges = container continue # replacing the container ID for user instance containers for the machine machine_id = container.getMetaDataEntry("machine", None) if machine_id: if global_stack.getId() == machine_id: global_stack.userChanges = container continue for changes_container_type in ("quality_changes", "definition_changes"): if self._resolve_strategies[changes_container_type] == "new": # Quality changes needs to get a new ID, added to registry and to the right stacks for each_changes_container in quality_and_definition_changes_instance_containers: # NOTE: The renaming and giving new IDs are possibly redundant because they are done in the # instance container loading part. new_id = each_changes_container.getId() # Find the old (current) changes container in the global stack if changes_container_type == "quality_changes": old_container = global_stack.qualityChanges elif changes_container_type == "definition_changes": old_container = global_stack.definitionChanges # sanity checks # NOTE: The following cases SHOULD NOT happen!!!! if not old_container: Logger.log("e", "We try to get [%s] from the global stack [%s] but we got None instead!", changes_container_type, global_stack.getId()) # Replace the quality/definition changes container if it's in the GlobalStack # NOTE: we can get an empty container here, but the IDs will not match, # so this comparison is fine. if self._id_mapping.get(old_container.getId()) == new_id: if changes_container_type == "quality_changes": global_stack.qualityChanges = each_changes_container elif changes_container_type == "definition_changes": global_stack.definitionChanges = each_changes_container continue # Replace the quality/definition changes container if it's in one of the ExtruderStacks for each_extruder_stack in extruder_stacks: changes_container = None if changes_container_type == "quality_changes": changes_container = each_extruder_stack.qualityChanges elif changes_container_type == "definition_changes": changes_container = each_extruder_stack.definitionChanges # sanity checks # NOTE: The following cases SHOULD NOT happen!!!! if not changes_container: Logger.log("e", "We try to get [%s] from the extruder stack [%s] but we got None instead!", changes_container_type, each_extruder_stack.getId()) # NOTE: we can get an empty container here, but the IDs will not match, # so this comparison is fine. if self._id_mapping.get(changes_container.getId()) == new_id: if changes_container_type == "quality_changes": each_extruder_stack.qualityChanges = each_changes_container elif changes_container_type == "definition_changes": each_extruder_stack.definitionChanges = each_changes_container if self._resolve_strategies["material"] == "new": for each_material in material_containers: old_material = global_stack.material # check if the old material container has been renamed to this material container ID # if the container hasn't been renamed, we do nothing. new_id = self._id_mapping.get(old_material.getId()) if new_id is None or new_id != each_material.getId(): continue if old_material.getId() in self._id_mapping: global_stack.material = each_material for each_extruder_stack in extruder_stacks: old_material = each_extruder_stack.material # check if the old material container has been renamed to this material container ID # if the container hasn't been renamed, we do nothing. new_id = self._id_mapping.get(old_material.getId()) if new_id is None or new_id != each_material.getId(): continue if old_material.getId() in self._id_mapping: each_extruder_stack.material = each_material if extruder_stacks: for stack in extruder_stacks: ExtruderManager.getInstance().registerExtruder(stack, global_stack.getId()) else: # Machine has no extruders, but it needs to be registered with the extruder manager. ExtruderManager.getInstance().registerExtruder(None, global_stack.getId()) Logger.log("d", "Workspace loading is notifying rest of the code of changes...") if self._resolve_strategies["machine"] == "new": for stack in extruder_stacks: stack.setNextStack(global_stack) stack.containersChanged.emit(stack.getTop()) # Actually change the active machine. Application.getInstance().setGlobalContainerStack(global_stack) # Notify everything/one that is to notify about changes. global_stack.containersChanged.emit(global_stack.getTop()) # Load all the nodes / meshdata of the workspace nodes = self._3mf_mesh_reader.read(file_name) if nodes is None: nodes = [] return nodes
class Application: ## Init method # # \param name \type{string} The name of the application. # \param version \type{string} Version, formatted as major.minor.rev # \param build_type Additional version info on the type of build this is, such as "master". # \param is_debug_mode Whether to run in debug mode. def __init__(self, name: str, version: str, api_version: str, app_display_name: str = "", build_type: str = "", is_debug_mode: bool = False, **kwargs) -> None: if Application.__instance is not None: raise RuntimeError("Try to create singleton '%s' more than once" % self.__class__.__name__) Application.__instance = self super().__init__() # Call super to make multiple inheritance work. self._api_version = Version(api_version) # type: Version self._app_name = name #type: str self._app_display_name = app_display_name if app_display_name else name # type: str self._version = version #type: str self._build_type = build_type #type: str self._is_debug_mode = is_debug_mode #type: bool self._is_headless = False #type: bool self._use_external_backend = False #type: bool self._config_lock_filename = "{name}.lock".format( name=self._app_name) # type: str self._cli_args = None #type: argparse.Namespace self._cli_parser = argparse.ArgumentParser( prog=self._app_name, add_help=False) #type: argparse.ArgumentParser self._main_thread = threading.current_thread() #type: threading.Thread self.default_theme = self._app_name #type: str # Default theme is the application name self._default_language = "en_US" #type: str self.change_log_url = "https://github.com/Ultimaker/Uranium" # Where to find a more detailed description of the recent updates. self._preferences_filename = None #type: str self._preferences = None #type: Preferences self._extensions = [] #type: List[Extension] self._required_plugins = [] #type: List[str] self._package_manager_class = PackageManager # type: type self._package_manager = None # type: PackageManager self._plugin_registry = None #type: PluginRegistry self._container_registry_class = ContainerRegistry #type: type self._container_registry = None #type: ContainerRegistry self._global_container_stack = None #type: ContainerStack self._controller = None #type: Controller self._backend = None #type: Backend self._output_device_manager = None #type: OutputDeviceManager self._operation_stack = None #type: OperationStack self._visible_messages = [] #type: List[Message] self._message_lock = threading.Lock() #type: threading.Lock self._app_install_dir = self.getInstallPrefix() #type: str def getAPIVersion(self) -> "Version": return self._api_version # Adds the command line options that can be parsed by the command line parser. # Can be overridden to add additional command line options to the parser. def addCommandLineOptions(self) -> None: self._cli_parser.add_argument("--version", action="version", version="%(prog)s version: {0}".format( self._version)) self._cli_parser.add_argument( "--external-backend", action="store_true", default=False, help= "Use an externally started backend instead of starting it automatically. This is a debug feature to make it possible to run the engine with debug options enabled." ) self._cli_parser.add_argument('--headless', action='store_true', default=False, help="Hides all GUI elements.") self._cli_parser.add_argument( "--debug", action="store_true", default=False, help="Turn on the debug mode by setting this option.") def parseCliOptions(self) -> None: self._cli_args = self._cli_parser.parse_args() self._is_headless = self._cli_args.headless self._is_debug_mode = self._cli_args.debug or self._is_debug_mode self._use_external_backend = self._cli_args.external_backend # Performs initialization that must be done before start. def initialize(self) -> None: # For Ubuntu Unity this makes Qt use its own menu bar rather than pass it on to Unity. os.putenv("UBUNTU_MENUPROXY", "0") # Custom signal handling Signal._app = self Signal._signalQueue = self # Initialize Resources. Set the application name and version here because we can only know the actual info # after the __init__() has been called. Resources.ApplicationIdentifier = self._app_name Resources.ApplicationVersion = self._version Resources.addSearchPath( os.path.join(os.path.dirname(sys.executable), "resources")) Resources.addSearchPath( os.path.join(self._app_install_dir, "share", "uranium", "resources")) Resources.addSearchPath( os.path.join(self._app_install_dir, "Resources", "uranium", "resources")) Resources.addSearchPath( os.path.join(self._app_install_dir, "Resources", self._app_name, "resources")) if not hasattr(sys, "frozen"): Resources.addSearchPath( os.path.join(os.path.abspath(os.path.dirname(__file__)), "..", "resources")) i18nCatalog.setApplication(self) PluginRegistry.addType("backend", self.setBackend) PluginRegistry.addType("logger", Logger.addLogger) PluginRegistry.addType("extension", self.addExtension) self._preferences = Preferences() self._preferences.addPreference("general/language", self._default_language) self._preferences.addPreference("general/visible_settings", "") self._preferences.addPreference("general/plugins_to_remove", "") self._preferences.addPreference("general/disabled_plugins", "") self._controller = Controller(self) self._output_device_manager = OutputDeviceManager() self._operation_stack = OperationStack( self._controller) # type: OperationStack self._plugin_registry = PluginRegistry(self) #type: PluginRegistry self._plugin_registry.addPluginLocation( os.path.join(self._app_install_dir, "lib", "uranium")) self._plugin_registry.addPluginLocation( os.path.join(self._app_install_dir, "lib64", "uranium")) self._plugin_registry.addPluginLocation( os.path.join(self._app_install_dir, "lib32", "uranium")) self._plugin_registry.addPluginLocation( os.path.join(os.path.dirname(sys.executable), "plugins")) self._plugin_registry.addPluginLocation( os.path.join(self._app_install_dir, "Resources", "uranium", "plugins")) self._plugin_registry.addPluginLocation( os.path.join(self._app_install_dir, "Resources", self._app_name, "plugins")) # Locally installed plugins local_path = os.path.join( Resources.getStoragePath(Resources.Resources), "plugins") # Ensure the local plugins directory exists try: os.makedirs(local_path) except OSError: pass self._plugin_registry.addPluginLocation(local_path) if not hasattr(sys, "frozen"): self._plugin_registry.addPluginLocation( os.path.join(os.path.abspath(os.path.dirname(__file__)), "..", "plugins")) self._container_registry = self._container_registry_class(self) UM.Settings.InstanceContainer.setContainerRegistry( self._container_registry) UM.Settings.ContainerStack.setContainerRegistry( self._container_registry) # Initialize the package manager to remove and install scheduled packages. self._package_manager = self._package_manager_class(self) self.showMessageSignal.connect(self.showMessage) self.hideMessageSignal.connect(self.hideMessage) def startSplashWindowPhase(self) -> None: pass def startPostSplashWindowPhase(self) -> None: pass ## Run the main event loop. # This method should be re-implemented by subclasses to start the main event loop. # \exception NotImplementedError def run(self): self.addCommandLineOptions() self.parseCliOptions() self.initialize() self.startSplashWindowPhase() self.startPostSplashWindowPhase() def getContainerRegistry(self): return self._container_registry ## Get the lock filename def getApplicationLockFilename(self) -> str: return self._config_lock_filename ## Emitted when the application window was closed and we need to shut down the application applicationShuttingDown = Signal() showMessageSignal = Signal() hideMessageSignal = Signal() globalContainerStackChanged = Signal() workspaceLoaded = Signal() def setGlobalContainerStack(self, stack: "ContainerStack") -> None: if self._global_container_stack != stack: self._global_container_stack = stack self.globalContainerStackChanged.emit() def getGlobalContainerStack(self) -> Optional["ContainerStack"]: return self._global_container_stack def hideMessage(self, message: Message) -> None: raise NotImplementedError def showMessage(self, message: Message) -> None: raise NotImplementedError def showToastMessage(self, title: str, message: str) -> None: raise NotImplementedError ## Get the version of the application def getVersion(self) -> str: return self._version ## Get the build type of the application def getBuildType(self) -> str: return self._build_type def getIsDebugMode(self) -> bool: return self._is_debug_mode def getIsHeadLess(self) -> bool: return self._is_headless def getUseExternalBackend(self) -> bool: return self._use_external_backend visibleMessageAdded = Signal() ## Hide message by ID (as provided by built-in id function) def hideMessageById(self, message_id: int) -> None: # If a user and the application tries to close same message dialog simultaneously, message_id could become an empty # string, and then the application will raise an error when trying to do "int(message_id)". # So we check the message_id here. if not message_id: return found_message = None with self._message_lock: for message in self._visible_messages: if id(message) == int(message_id): found_message = message if found_message is not None: self.hideMessageSignal.emit(found_message) visibleMessageRemoved = Signal() ## Get list of all visible messages def getVisibleMessages(self) -> List[Message]: with self._message_lock: return self._visible_messages ## Function that needs to be overridden by child classes with a list of plugins it needs. def _loadPlugins(self) -> None: pass ## Get name of the application. # \returns app_name \type{string} def getApplicationName(self) -> str: return self._app_name def getApplicationDisplayName(self) -> str: return self._app_display_name ## Get the preferences. # \return preferences \type{Preferences} def getPreferences(self) -> Preferences: return self._preferences def savePreferences(self) -> None: if self._preferences_filename: self._preferences.writeToFile(self._preferences_filename) else: Logger.log("i", "Preferences filename not set. Unable to save file.") ## Get the currently used IETF language tag. # The returned tag is during runtime used to translate strings. # \returns Language tag. def getApplicationLanguage(self) -> str: language = os.getenv("URANIUM_LANGUAGE") if not language: language = self._preferences.getValue("general/language") if not language: language = os.getenv("LANGUAGE") if not language: language = self._default_language return language ## Application has a list of plugins that it *must* have. If it does not have these, it cannot function. # These plugins can not be disabled in any way. def getRequiredPlugins(self) -> List[str]: return self._required_plugins ## Set the plugins that the application *must* have in order to function. # \param plugin_names \type{list} List of strings with the names of the required plugins def setRequiredPlugins(self, plugin_names: List[str]) -> None: self._required_plugins = plugin_names ## Set the backend of the application (the program that does the heavy lifting). def setBackend(self, backend: "Backend") -> None: self._backend = backend ## Get the backend of the application (the program that does the heavy lifting). # \returns Backend \type{Backend} def getBackend(self) -> "Backend": return self._backend ## Get the PluginRegistry of this application. # \returns PluginRegistry \type{PluginRegistry} def getPluginRegistry(self) -> PluginRegistry: return self._plugin_registry ## Get the Controller of this application. # \returns Controller \type{Controller} def getController(self) -> Controller: return self._controller def getOperationStack(self) -> OperationStack: return self._operation_stack def getOutputDeviceManager(self) -> OutputDeviceManager: return self._output_device_manager ## Return an application-specific Renderer object. # \exception NotImplementedError def getRenderer(self) -> Renderer: raise NotImplementedError( "getRenderer must be implemented by subclasses.") ## Post a function event onto the event loop. # # This takes a CallFunctionEvent object and puts it into the actual event loop. # \exception NotImplementedError def functionEvent(self, event: CallFunctionEvent) -> None: raise NotImplementedError( "functionEvent must be implemented by subclasses.") ## Call a function the next time the event loop runs. # # You can't get the result of this function directly. It won't block. # \param function The function to call. # \param args The positional arguments to pass to the function. # \param kwargs The keyword arguments to pass to the function. def callLater(self, func: Callable[..., Any], *args, **kwargs) -> None: event = CallFunctionEvent(func, args, kwargs) self.functionEvent(event) ## Get the application's main thread. def getMainThread(self) -> threading.Thread: return self._main_thread def addExtension(self, extension: "Extension") -> None: self._extensions.append(extension) def getExtensions(self) -> List["Extension"]: return self._extensions @staticmethod def getInstallPrefix() -> str: if "python" in os.path.basename(sys.executable): return os.path.abspath( os.path.join(os.path.dirname(sys.argv[0]), "..")) else: return os.path.abspath( os.path.join(os.path.dirname(sys.executable), "..")) __instance = None # type: Application @classmethod def getInstance(cls, *args, **kwargs) -> "Application": return cls.__instance
class Application: ## Init method # # \param name \type{string} The name of the application. # \param version \type{string} Version, formatted as major.minor.rev # \param build_type Additional version info on the type of build this is, such as "master". # \param is_debug_mode Whether to run in debug mode. def __init__(self, name: str, version: str, api_version: str, app_display_name: str = "", build_type: str = "", is_debug_mode: bool = False, **kwargs) -> None: if Application.__instance is not None: raise RuntimeError("Try to create singleton '%s' more than once" % self.__class__.__name__) Application.__instance = self super().__init__() # Call super to make multiple inheritance work. self._api_version = Version(api_version) # type: Version self._app_name = name # type: str self._app_display_name = app_display_name if app_display_name else name # type: str self._version = version # type: str self._build_type = build_type # type: str self._is_debug_mode = is_debug_mode # type: bool self._is_headless = False # type: bool self._use_external_backend = False # type: bool self._just_updated_from_old_version = False # type: bool self._config_lock_filename = "{name}.lock".format(name = self._app_name) # type: str self._cli_args = None # type: argparse.Namespace self._cli_parser = argparse.ArgumentParser(prog = self._app_name, add_help = False) # type: argparse.ArgumentParser self._main_thread = threading.current_thread() # type: threading.Thread self.default_theme = self._app_name # type: str # Default theme is the application name self._default_language = "en_US" # type: str self.change_log_url = "https://github.com/Ultimaker/Uranium" # Where to find a more detailed description of the recent updates. self._preferences_filename = None # type: str self._preferences = None # type: Preferences self._extensions = [] # type: List[Extension] self._required_plugins = [] # type: List[str] self._package_manager_class = PackageManager # type: type self._package_manager = None # type: PackageManager self._plugin_registry = None # type: PluginRegistry self._container_registry_class = ContainerRegistry # type: type self._container_registry = None # type: ContainerRegistry self._global_container_stack = None # type: ContainerStack self._controller = None # type: Controller self._backend = None # type: Backend self._output_device_manager = None # type: OutputDeviceManager self._operation_stack = None # type: OperationStack self._visible_messages = [] # type: List[Message] self._message_lock = threading.Lock() # type: threading.Lock self._app_install_dir = self.getInstallPrefix() # type: str def getAPIVersion(self) -> "Version": return self._api_version # Adds the command line options that can be parsed by the command line parser. # Can be overridden to add additional command line options to the parser. def addCommandLineOptions(self) -> None: self._cli_parser.add_argument("--version", action = "version", version = "%(prog)s version: {0}".format(self._version)) self._cli_parser.add_argument("--external-backend", action = "store_true", default = False, help = "Use an externally started backend instead of starting it automatically. This is a debug feature to make it possible to run the engine with debug options enabled.") self._cli_parser.add_argument('--headless', action = 'store_true', default = False, help = "Hides all GUI elements.") self._cli_parser.add_argument("--debug", action = "store_true", default = False, help = "Turn on the debug mode by setting this option.") def parseCliOptions(self) -> None: self._cli_args = self._cli_parser.parse_args() self._is_headless = self._cli_args.headless self._is_debug_mode = self._cli_args.debug or self._is_debug_mode self._use_external_backend = self._cli_args.external_backend # Performs initialization that must be done before start. def initialize(self) -> None: # For Ubuntu Unity this makes Qt use its own menu bar rather than pass it on to Unity. os.putenv("UBUNTU_MENUPROXY", "0") # Custom signal handling Signal._app = self Signal._signalQueue = self # Initialize Resources. Set the application name and version here because we can only know the actual info # after the __init__() has been called. Resources.ApplicationIdentifier = self._app_name Resources.ApplicationVersion = self._version Resources.addSearchPath(os.path.join(os.path.dirname(sys.executable), "resources")) Resources.addSearchPath(os.path.join(self._app_install_dir, "share", "uranium", "resources")) Resources.addSearchPath(os.path.join(self._app_install_dir, "Resources", "uranium", "resources")) Resources.addSearchPath(os.path.join(self._app_install_dir, "Resources", self._app_name, "resources")) if not hasattr(sys, "frozen"): Resources.addSearchPath(os.path.join(os.path.abspath(os.path.dirname(__file__)), "..", "resources")) i18nCatalog.setApplication(self) PluginRegistry.addType("backend", self.setBackend) PluginRegistry.addType("logger", Logger.addLogger) PluginRegistry.addType("extension", self.addExtension) self._preferences = Preferences() self._preferences.addPreference("general/language", self._default_language) self._preferences.addPreference("general/visible_settings", "") self._preferences.addPreference("general/plugins_to_remove", "") self._preferences.addPreference("general/disabled_plugins", "") self._controller = Controller(self) self._output_device_manager = OutputDeviceManager() self._operation_stack = OperationStack(self._controller) # type: OperationStack self._plugin_registry = PluginRegistry(self) #type: PluginRegistry self._plugin_registry.addPluginLocation(os.path.join(self._app_install_dir, "lib", "uranium")) self._plugin_registry.addPluginLocation(os.path.join(self._app_install_dir, "lib64", "uranium")) self._plugin_registry.addPluginLocation(os.path.join(self._app_install_dir, "lib32", "uranium")) self._plugin_registry.addPluginLocation(os.path.join(os.path.dirname(sys.executable), "plugins")) self._plugin_registry.addPluginLocation(os.path.join(self._app_install_dir, "Resources", "uranium", "plugins")) self._plugin_registry.addPluginLocation(os.path.join(self._app_install_dir, "Resources", self._app_name, "plugins")) # Locally installed plugins local_path = os.path.join(Resources.getStoragePath(Resources.Resources), "plugins") # Ensure the local plugins directory exists try: os.makedirs(local_path) except OSError: pass self._plugin_registry.addPluginLocation(local_path) if not hasattr(sys, "frozen"): self._plugin_registry.addPluginLocation(os.path.join(os.path.abspath(os.path.dirname(__file__)), "..", "plugins")) self._container_registry = self._container_registry_class(self) UM.Settings.InstanceContainer.setContainerRegistry(self._container_registry) UM.Settings.ContainerStack.setContainerRegistry(self._container_registry) self.showMessageSignal.connect(self.showMessage) self.hideMessageSignal.connect(self.hideMessage) def startSplashWindowPhase(self) -> None: pass def startPostSplashWindowPhase(self) -> None: pass # Indicates if we have just updated from an older application version. def hasJustUpdatedFromOldVersion(self) -> bool: return self._just_updated_from_old_version ## Run the main event loop. # This method should be re-implemented by subclasses to start the main event loop. # \exception NotImplementedError def run(self): self.addCommandLineOptions() self.parseCliOptions() self.initialize() self.startSplashWindowPhase() self.startPostSplashWindowPhase() def getContainerRegistry(self): return self._container_registry ## Get the lock filename def getApplicationLockFilename(self) -> str: return self._config_lock_filename ## Emitted when the application window was closed and we need to shut down the application applicationShuttingDown = Signal() showMessageSignal = Signal() hideMessageSignal = Signal() globalContainerStackChanged = Signal() workspaceLoaded = Signal() def setGlobalContainerStack(self, stack: "ContainerStack") -> None: if self._global_container_stack != stack: self._global_container_stack = stack self.globalContainerStackChanged.emit() def getGlobalContainerStack(self) -> Optional["ContainerStack"]: return self._global_container_stack def hideMessage(self, message: Message) -> None: raise NotImplementedError def showMessage(self, message: Message) -> None: raise NotImplementedError def showToastMessage(self, title: str, message: str) -> None: raise NotImplementedError ## Get the version of the application def getVersion(self) -> str: return self._version ## Get the build type of the application def getBuildType(self) -> str: return self._build_type def getIsDebugMode(self) -> bool: return self._is_debug_mode def getIsHeadLess(self) -> bool: return self._is_headless def getUseExternalBackend(self) -> bool: return self._use_external_backend visibleMessageAdded = Signal() ## Hide message by ID (as provided by built-in id function) def hideMessageById(self, message_id: int) -> None: # If a user and the application tries to close same message dialog simultaneously, message_id could become an empty # string, and then the application will raise an error when trying to do "int(message_id)". # So we check the message_id here. if not message_id: return found_message = None with self._message_lock: for message in self._visible_messages: if id(message) == int(message_id): found_message = message if found_message is not None: self.hideMessageSignal.emit(found_message) visibleMessageRemoved = Signal() ## Get list of all visible messages def getVisibleMessages(self) -> List[Message]: with self._message_lock: return self._visible_messages ## Function that needs to be overridden by child classes with a list of plugins it needs. def _loadPlugins(self) -> None: pass ## Get name of the application. # \returns app_name \type{string} def getApplicationName(self) -> str: return self._app_name def getApplicationDisplayName(self) -> str: return self._app_display_name ## Get the preferences. # \return preferences \type{Preferences} def getPreferences(self) -> Preferences: return self._preferences def savePreferences(self) -> None: if self._preferences_filename: self._preferences.writeToFile(self._preferences_filename) else: Logger.log("i", "Preferences filename not set. Unable to save file.") ## Get the currently used IETF language tag. # The returned tag is during runtime used to translate strings. # \returns Language tag. def getApplicationLanguage(self) -> str: language = os.getenv("URANIUM_LANGUAGE") if not language: language = self._preferences.getValue("general/language") if not language: language = os.getenv("LANGUAGE") if not language: language = self._default_language return language ## Application has a list of plugins that it *must* have. If it does not have these, it cannot function. # These plugins can not be disabled in any way. def getRequiredPlugins(self) -> List[str]: return self._required_plugins ## Set the plugins that the application *must* have in order to function. # \param plugin_names \type{list} List of strings with the names of the required plugins def setRequiredPlugins(self, plugin_names: List[str]) -> None: self._required_plugins = plugin_names ## Set the backend of the application (the program that does the heavy lifting). def setBackend(self, backend: "Backend") -> None: self._backend = backend ## Get the backend of the application (the program that does the heavy lifting). # \returns Backend \type{Backend} def getBackend(self) -> "Backend": return self._backend ## Get the PluginRegistry of this application. # \returns PluginRegistry \type{PluginRegistry} def getPluginRegistry(self) -> PluginRegistry: return self._plugin_registry ## Get the Controller of this application. # \returns Controller \type{Controller} def getController(self) -> Controller: return self._controller def getOperationStack(self) -> OperationStack: return self._operation_stack def getOutputDeviceManager(self) -> OutputDeviceManager: return self._output_device_manager ## Return an application-specific Renderer object. # \exception NotImplementedError def getRenderer(self) -> Renderer: raise NotImplementedError("getRenderer must be implemented by subclasses.") ## Post a function event onto the event loop. # # This takes a CallFunctionEvent object and puts it into the actual event loop. # \exception NotImplementedError def functionEvent(self, event: CallFunctionEvent) -> None: raise NotImplementedError("functionEvent must be implemented by subclasses.") ## Call a function the next time the event loop runs. # # You can't get the result of this function directly. It won't block. # \param function The function to call. # \param args The positional arguments to pass to the function. # \param kwargs The keyword arguments to pass to the function. def callLater(self, func: Callable[..., Any], *args, **kwargs) -> None: event = CallFunctionEvent(func, args, kwargs) self.functionEvent(event) ## Get the application's main thread. def getMainThread(self) -> threading.Thread: return self._main_thread def addExtension(self, extension: "Extension") -> None: self._extensions.append(extension) def getExtensions(self) -> List["Extension"]: return self._extensions @staticmethod def getInstallPrefix() -> str: if "python" in os.path.basename(sys.executable): executable = sys.argv[0] else: executable = sys.executable return os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(executable)), "..")) __instance = None # type: Application @classmethod def getInstance(cls, *args, **kwargs) -> "Application": return cls.__instance