def test_roundtrip_stack(tmpdir, process_count, loaded_container_registry): definition = loaded_container_registry.findDefinitionContainers( id="multiple_settings")[0] instances = loaded_container_registry.findInstanceContainers( id="setting_values")[0] container_stack = ContainerStack("test_stack") container_stack.setName("Test Container Stack") container_stack.setMetaDataEntry("test", "test") container_stack.addContainer(definition) container_stack.addContainer(instances) temp_file = tmpdir.join("container_stack_test") mp_run(process_count, write_data, temp_file, container_stack) assert len(list(tmpdir.listdir())) == 1 results = mp_run(process_count, read_data, temp_file) for result in results: deserialized_stack = ContainerStack("test_stack") deserialized_stack.deserialize(result) assert deserialized_stack.getName() == container_stack.getName() assert deserialized_stack.getMetaData() == container_stack.getMetaData( ) assert deserialized_stack.getBottom() == container_stack.getBottom() assert deserialized_stack.getTop() == container_stack.getTop()
def test_roundtrip_stack(tmpdir, process_count, loaded_container_registry): definition = loaded_container_registry.findDefinitionContainers(id = "multiple_settings")[0] instances = loaded_container_registry.findInstanceContainers(id = "setting_values")[0] container_stack = ContainerStack("test_stack") container_stack.setName("Test Container Stack") container_stack.addMetaDataEntry("test", "test") container_stack.addContainer(definition) container_stack.addContainer(instances) temp_file = tmpdir.join("container_stack_test") mp_run(process_count, write_data, temp_file, container_stack) assert len(list(tmpdir.listdir())) == 1 results = mp_run(process_count, read_data, temp_file) for result in results: deserialized_stack = ContainerStack("test_stack") deserialized_stack.deserialize(result) assert deserialized_stack.getName() == container_stack.getName() assert deserialized_stack.getMetaData() == container_stack.getMetaData() assert deserialized_stack.getBottom() == container_stack.getBottom() assert deserialized_stack.getTop() == container_stack.getTop()
class CustomStackProxy(QObject): def __init__(self, parent: QObject = None) -> None: super().__init__(parent) self._container_ids = [] # type: List[str] self._stack = ContainerStack("CustomStack" + str(id(self))) self._stack_id = self._stack.id self._stack.setDirty(False) # never save this stack Application.getInstance().getContainerRegistry().addContainer( self._stack) @pyqtProperty(str, constant=True) def stackId(self): return self._stack_id ## Set the containerIds property. def setContainerIds(self, container_ids: List[str]): if container_ids == self._container_ids: return self._container_ids = container_ids while (self._stack.getContainers()): self._stack.removeContainer(0) for container_id in container_ids: containers = Application.getInstance().getContainerRegistry( ).findContainers(id=container_id) if containers: self._stack.addContainer(containers[0]) self._stack.setDirty(False) # never save this stack self.containerIdsChanged.emit() ## Emitted when the containerIds property changes. containerIdsChanged = pyqtSignal() ## The ID of the container we should query for property values. @pyqtProperty("QVariantList", fset=setContainerIds, notify=containerIdsChanged) def containerIds(self): return self._container_ids @pyqtSlot(str) def removeInstanceFromTop(self, key): self._stack.getTop().removeInstance(key) self._stack.getTop().setDirty(True)
def _checkStackForErrors(self, stack: ContainerStack) -> bool: top_of_stack = cast(InstanceContainer, stack.getTop()) # Cache for efficiency. changed_setting_keys = top_of_stack.getAllKeys() # Add all relations to changed settings as well. for key in top_of_stack.getAllKeys(): instance = top_of_stack.getInstance(key) if instance is None: continue self._addRelations(changed_setting_keys, instance.definition.relations) Job.yieldThread() for changed_setting_key in changed_setting_keys: validation_state = stack.getProperty(changed_setting_key, "validationState") if validation_state is None: definition = cast(SettingDefinition, stack.getSettingDefinition(changed_setting_key)) validator_type = SettingDefinition.getValidatorForType(definition.type) if validator_type: validator = validator_type(changed_setting_key) validation_state = validator(stack) if validation_state in ( ValidatorState.Exception, ValidatorState.MaximumError, ValidatorState.MinimumError, ValidatorState.Invalid): Logger.log("w", "Setting %s is not valid, but %s. Aborting slicing.", changed_setting_key, validation_state) return True Job.yieldThread() return False
def _settingIsOverwritingInheritance(self, key: str, stack: ContainerStack = None) -> bool: """Check if a setting has an inheritance function that is overwritten""" has_setting_function = False if not stack: stack = self._active_container_stack if not stack: # No active container stack yet! return False if self._active_container_stack is None: return False all_keys = self._active_container_stack.getAllKeys() containers = [] # type: List[ContainerInterface] has_user_state = stack.getProperty(key, "state") == InstanceState.User """Check if the setting has a user state. If not, it is never overwritten.""" if not has_user_state: return False # If a setting is not enabled, don't label it as overwritten (It's never visible anyway). if not stack.getProperty(key, "enabled"): return False user_container = stack.getTop() """Also check if the top container is not a setting function (this happens if the inheritance is restored).""" if user_container and isinstance(user_container.getProperty(key, "value"), SettingFunction): return False ## Mash all containers for all the stacks together. while stack: containers.extend(stack.getContainers()) stack = stack.getNextStack() has_non_function_value = False for container in containers: try: value = container.getProperty(key, "value") except AttributeError: continue if value is not None: # If a setting doesn't use any keys, it won't change it's value, so treat it as if it's a fixed value has_setting_function = isinstance(value, SettingFunction) if has_setting_function: for setting_key in value.getUsedSettingKeys(): if setting_key in all_keys: break # We found an actual setting. So has_setting_function can remain true else: # All of the setting_keys turned out to not be setting keys at all! # This can happen due enum keys also being marked as settings. has_setting_function = False if has_setting_function is False: has_non_function_value = True continue if has_setting_function: break # There is a setting function somewhere, stop looking deeper. return has_setting_function and has_non_function_value
def _settingIsOverwritingInheritance(self, key: str, stack: ContainerStack = None) -> bool: has_setting_function = False if not stack: stack = self._active_container_stack if not stack: #No active container stack yet! return False containers = [] ## Check if the setting has a user state. If not, it is never overwritten. has_user_state = stack.getProperty(key, "state") == InstanceState.User if not has_user_state: return False ## If a setting is not enabled, don't label it as overwritten (It's never visible anyway). if not stack.getProperty(key, "enabled"): return False ## Also check if the top container is not a setting function (this happens if the inheritance is restored). if isinstance(stack.getTop().getProperty(key, "value"), SettingFunction): return False ## Mash all containers for all the stacks together. while stack: containers.extend(stack.getContainers()) stack = stack.getNextStack() has_non_function_value = False for container in containers: try: value = container.getProperty(key, "value") except AttributeError: continue if value is not None: # If a setting doesn't use any keys, it won't change it's value, so treat it as if it's a fixed value has_setting_function = isinstance(value, SettingFunction) if has_setting_function: for setting_key in value.getUsedSettingKeys(): if setting_key in self._active_container_stack.getAllKeys(): break # We found an actual setting. So has_setting_function can remain true else: # All of the setting_keys turned out to not be setting keys at all! # This can happen due enum keys also being marked as settings. has_setting_function = False if has_setting_function is False: has_non_function_value = True continue if has_setting_function: break # There is a setting function somewhere, stop looking deeper. return has_setting_function and has_non_function_value
def _checkStackForErrors(self, stack: ContainerStack) -> bool: if stack is None: return False # if there are no per-object settings we don't need to check the other settings here stack_top = stack.getTop() if stack_top is None or not stack_top.getAllKeys(): return False for key in stack.getAllKeys(): validation_state = stack.getProperty(key, "validationState") if validation_state in (ValidatorState.Exception, ValidatorState.MaximumError, ValidatorState.MinimumError, ValidatorState.Invalid): Logger.log("w", "Setting %s is not valid, but %s. Aborting slicing.", key, validation_state) return True Job.yieldThread() return False
def read(self, file_name): archive = zipfile.ZipFile(file_name, "r") cura_file_names = [ name for name in archive.namelist() if name.startswith("Cura/") ] # Create a shadow copy of the preferences (we don't want all of the preferences, but we do want to re-use its # parsing code. temp_preferences = Preferences() temp_preferences.readFromFile( io.TextIOWrapper(archive.open("Cura/preferences.cfg")) ) # We need to wrap it, else the archive parser breaks. # Copy a number of settings from the temp preferences to the global global_preferences = Preferences.getInstance() visible_settings = temp_preferences.getValue( "general/visible_settings") if visible_settings is None: Logger.log( "w", "Workspace did not contain visible settings. Leaving visibility unchanged" ) else: global_preferences.setValue("general/visible_settings", visible_settings) categories_expanded = temp_preferences.getValue( "cura/categories_expanded") if categories_expanded is None: Logger.log( "w", "Workspace did not contain expanded categories. Leaving them unchanged" ) else: global_preferences.setValue("cura/categories_expanded", categories_expanded) Application.getInstance().expandedCategoriesChanged.emit( ) # Notify the GUI of the change self._id_mapping = {} # We don't add containers right away, but wait right until right before the stack serialization. # We do this so that if something goes wrong, it's easier to clean up. containers_to_add = [] # TODO: For the moment we use pretty naive existence checking. If the ID is the same, we assume in quite a few # TODO: cases that the container loaded is the same (most notable in materials & definitions). # TODO: It might be possible that we need to add smarter checking in the future. Logger.log("d", "Workspace loading is checking definitions...") # Get all the definition files & check if they exist. If not, add them. definition_container_files = [ name for name in cura_file_names if name.endswith(self._definition_container_suffix) ] for definition_container_file in definition_container_files: container_id = self._stripFileToId(definition_container_file) definitions = self._container_registry.findDefinitionContainers( id=container_id) if not definitions: definition_container = DefinitionContainer(container_id) definition_container.deserialize( archive.open(definition_container_file).read().decode( "utf-8")) self._container_registry.addContainer(definition_container) Job.yieldThread() Logger.log("d", "Workspace loading is checking materials...") material_containers = [] # Get all the material files and check if they exist. If not, add them. xml_material_profile = self._getXmlProfileClass() if self._material_container_suffix is None: self._material_container_suffix = ContainerRegistry.getMimeTypeForContainer( xml_material_profile).suffixes[0] if xml_material_profile: material_container_files = [ name for name in cura_file_names if name.endswith(self._material_container_suffix) ] for material_container_file in material_container_files: container_id = self._stripFileToId(material_container_file) materials = self._container_registry.findInstanceContainers( id=container_id) if not materials: material_container = xml_material_profile(container_id) material_container.deserialize( archive.open(material_container_file).read().decode( "utf-8")) containers_to_add.append(material_container) else: if not materials[0].isReadOnly( ): # Only create new materials if they are not read only. if self._resolve_strategies["material"] == "override": materials[0].deserialize( archive.open(material_container_file).read(). decode("utf-8")) elif self._resolve_strategies["material"] == "new": # Note that we *must* deserialize it with a new ID, as multiple containers will be # auto created & added. material_container = xml_material_profile( self.getNewId(container_id)) material_container.deserialize( archive.open(material_container_file).read(). decode("utf-8")) containers_to_add.append(material_container) material_containers.append(material_container) Job.yieldThread() Logger.log("d", "Workspace loading is checking instance containers...") # Get quality_changes and user profiles saved in the workspace instance_container_files = [ name for name in cura_file_names if name.endswith(self._instance_container_suffix) ] user_instance_containers = [] quality_changes_instance_containers = [] for instance_container_file in instance_container_files: container_id = self._stripFileToId(instance_container_file) instance_container = InstanceContainer(container_id) # Deserialize InstanceContainer by converting read data from bytes to string instance_container.deserialize( archive.open(instance_container_file).read().decode("utf-8")) container_type = instance_container.getMetaDataEntry("type") Job.yieldThread() if container_type == "user": # Check if quality changes already exists. user_containers = self._container_registry.findInstanceContainers( id=container_id) if not user_containers: containers_to_add.append(instance_container) else: if self._resolve_strategies[ "machine"] == "override" or self._resolve_strategies[ "machine"] is None: user_containers[0].deserialize( archive.open(instance_container_file).read(). decode("utf-8")) elif self._resolve_strategies["machine"] == "new": # The machine is going to get a spiffy new name, so ensure that the id's of user settings match. extruder_id = instance_container.getMetaDataEntry( "extruder", None) if extruder_id: new_id = self.getNewId( extruder_id) + "_current_settings" instance_container._id = new_id instance_container.setName(new_id) instance_container.setMetaDataEntry( "extruder", self.getNewId(extruder_id)) containers_to_add.append(instance_container) machine_id = instance_container.getMetaDataEntry( "machine", None) if machine_id: new_id = self.getNewId( machine_id) + "_current_settings" instance_container._id = new_id instance_container.setName(new_id) instance_container.setMetaDataEntry( "machine", self.getNewId(machine_id)) containers_to_add.append(instance_container) user_instance_containers.append(instance_container) elif container_type == "quality_changes": # Check if quality changes already exists. quality_changes = self._container_registry.findInstanceContainers( id=container_id) if not quality_changes: containers_to_add.append(instance_container) else: if self._resolve_strategies[ "quality_changes"] == "override": quality_changes[0].deserialize( archive.open(instance_container_file).read(). decode("utf-8")) elif self._resolve_strategies["quality_changes"] is None: # The ID already exists, but nothing in the values changed, so do nothing. pass quality_changes_instance_containers.append(instance_container) else: continue # Add all the containers right before we try to add / serialize the stack for container in containers_to_add: self._container_registry.addContainer(container) container.setDirty(True) # Get the stack(s) saved in the workspace. Logger.log("d", "Workspace loading is checking stacks containers...") container_stack_files = [ name for name in cura_file_names if name.endswith(self._container_stack_suffix) ] global_stack = None extruder_stacks = [] container_stacks_added = [] try: for container_stack_file in container_stack_files: container_id = self._stripFileToId(container_stack_file) # Check if a stack by this ID already exists; container_stacks = self._container_registry.findContainerStacks( id=container_id) if container_stacks: stack = container_stacks[0] if self._resolve_strategies["machine"] == "override": # TODO: HACK # There is a machine, check if it has authenticationd data. If so, keep that data. network_authentication_id = container_stacks[ 0].getMetaDataEntry("network_authentication_id") network_authentication_key = container_stacks[ 0].getMetaDataEntry("network_authentication_key") container_stacks[0].deserialize( archive.open(container_stack_file).read().decode( "utf-8")) if network_authentication_id: container_stacks[0].addMetaDataEntry( "network_authentication_id", network_authentication_id) if network_authentication_key: container_stacks[0].addMetaDataEntry( "network_authentication_key", network_authentication_key) elif self._resolve_strategies["machine"] == "new": new_id = self.getNewId(container_id) stack = ContainerStack(new_id) stack.deserialize( archive.open(container_stack_file).read().decode( "utf-8")) # Ensure a unique ID and name stack._id = new_id # Extruder stacks are "bound" to a machine. If we add the machine as a new one, the id of the # bound machine also needs to change. if stack.getMetaDataEntry("machine", None): stack.setMetaDataEntry( "machine", self.getNewId( stack.getMetaDataEntry("machine"))) if stack.getMetaDataEntry("type") != "extruder_train": # Only machines need a new name, stacks may be non-unique stack.setName( self._container_registry.uniqueName( stack.getName())) container_stacks_added.append(stack) self._container_registry.addContainer(stack) else: Logger.log( "w", "Resolve strategy of %s for machine is not supported", self._resolve_strategies["machine"]) else: stack = ContainerStack(container_id) # Deserialize stack by converting read data from bytes to string stack.deserialize( archive.open(container_stack_file).read().decode( "utf-8")) container_stacks_added.append(stack) self._container_registry.addContainer(stack) if stack.getMetaDataEntry("type") == "extruder_train": extruder_stacks.append(stack) else: global_stack = stack Job.yieldThread() except: Logger.logException( "w", "We failed to serialize the stack. Trying to clean up.") # Something went really wrong. Try to remove any data that we added. for container in containers_to_add: self._container_registry.getInstance().removeContainer( container.getId()) for container in container_stacks_added: self._container_registry.getInstance().removeContainer( container.getId()) return None if self._resolve_strategies["machine"] == "new": # A new machine was made, but it was serialized with the wrong user container. Fix that now. for container in user_instance_containers: extruder_id = container.getMetaDataEntry("extruder", None) if extruder_id: for extruder in extruder_stacks: if extruder.getId() == extruder_id: extruder.replaceContainer(0, container) continue machine_id = container.getMetaDataEntry("machine", None) if machine_id: if global_stack.getId() == machine_id: global_stack.replaceContainer(0, container) continue if self._resolve_strategies["quality_changes"] == "new": # Quality changes needs to get a new ID, added to registry and to the right stacks for container in quality_changes_instance_containers: old_id = container.getId() container.setName( self._container_registry.uniqueName(container.getName())) # We're not really supposed to change the ID in normal cases, but this is an exception. container._id = self.getNewId(container.getId()) # The container was not added yet, as it didn't have an unique ID. It does now, so add it. self._container_registry.addContainer(container) # Replace the quality changes container old_container = global_stack.findContainer( {"type": "quality_changes"}) if old_container.getId() == old_id: quality_changes_index = global_stack.getContainerIndex( old_container) global_stack.replaceContainer(quality_changes_index, container) continue for stack in extruder_stacks: old_container = stack.findContainer( {"type": "quality_changes"}) if old_container.getId() == old_id: quality_changes_index = stack.getContainerIndex( old_container) stack.replaceContainer(quality_changes_index, container) if self._resolve_strategies["material"] == "new": for material in material_containers: old_material = global_stack.findContainer({"type": "material"}) if old_material.getId() in self._id_mapping: material_index = global_stack.getContainerIndex( old_material) global_stack.replaceContainer(material_index, material) continue for stack in extruder_stacks: old_material = stack.findContainer({"type": "material"}) if old_material.getId() in self._id_mapping: material_index = stack.getContainerIndex(old_material) stack.replaceContainer(material_index, material) continue for stack in extruder_stacks: ExtruderManager.getInstance().registerExtruder( stack, global_stack.getId()) else: # Machine has no extruders, but it needs to be registered with the extruder manager. ExtruderManager.getInstance().registerExtruder( None, global_stack.getId()) Logger.log( "d", "Workspace loading is notifying rest of the code of changes...") # Notify everything/one that is to notify about changes. global_stack.containersChanged.emit(global_stack.getTop()) for stack in extruder_stacks: stack.setNextStack(global_stack) stack.containersChanged.emit(stack.getTop()) # Actually change the active machine. Application.getInstance().setGlobalContainerStack(global_stack) # Load all the nodes / meshdata of the workspace nodes = self._3mf_mesh_reader.read(file_name) if nodes is None: nodes = [] return nodes
def read(self, file_name): archive = zipfile.ZipFile(file_name, "r") cura_file_names = [name for name in archive.namelist() if name.startswith("Cura/")] # Create a shadow copy of the preferences (we don't want all of the preferences, but we do want to re-use its # parsing code. temp_preferences = Preferences() temp_preferences.readFromFile(io.TextIOWrapper(archive.open("Cura/preferences.cfg"))) # We need to wrap it, else the archive parser breaks. # Copy a number of settings from the temp preferences to the global global_preferences = Preferences.getInstance() visible_settings = temp_preferences.getValue("general/visible_settings") if visible_settings is None: Logger.log("w", "Workspace did not contain visible settings. Leaving visibility unchanged") else: global_preferences.setValue("general/visible_settings", visible_settings) categories_expanded = temp_preferences.getValue("cura/categories_expanded") if categories_expanded is None: Logger.log("w", "Workspace did not contain expanded categories. Leaving them unchanged") else: global_preferences.setValue("cura/categories_expanded", categories_expanded) Application.getInstance().expandedCategoriesChanged.emit() # Notify the GUI of the change self._id_mapping = {} # We don't add containers right away, but wait right until right before the stack serialization. # We do this so that if something goes wrong, it's easier to clean up. containers_to_add = [] # TODO: For the moment we use pretty naive existence checking. If the ID is the same, we assume in quite a few # TODO: cases that the container loaded is the same (most notable in materials & definitions). # TODO: It might be possible that we need to add smarter checking in the future. Logger.log("d", "Workspace loading is checking definitions...") # Get all the definition files & check if they exist. If not, add them. definition_container_files = [name for name in cura_file_names if name.endswith(self._definition_container_suffix)] for definition_container_file in definition_container_files: container_id = self._stripFileToId(definition_container_file) definitions = self._container_registry.findDefinitionContainers(id=container_id) if not definitions: definition_container = DefinitionContainer(container_id) definition_container.deserialize(archive.open(definition_container_file).read().decode("utf-8")) self._container_registry.addContainer(definition_container) Job.yieldThread() Logger.log("d", "Workspace loading is checking materials...") material_containers = [] # Get all the material files and check if they exist. If not, add them. xml_material_profile = self._getXmlProfileClass() if self._material_container_suffix is None: self._material_container_suffix = ContainerRegistry.getMimeTypeForContainer(xml_material_profile).suffixes[0] if xml_material_profile: material_container_files = [name for name in cura_file_names if name.endswith(self._material_container_suffix)] for material_container_file in material_container_files: container_id = self._stripFileToId(material_container_file) materials = self._container_registry.findInstanceContainers(id=container_id) if not materials: material_container = xml_material_profile(container_id) material_container.deserialize(archive.open(material_container_file).read().decode("utf-8")) containers_to_add.append(material_container) else: if not materials[0].isReadOnly(): # Only create new materials if they are not read only. if self._resolve_strategies["material"] == "override": materials[0].deserialize(archive.open(material_container_file).read().decode("utf-8")) elif self._resolve_strategies["material"] == "new": # Note that we *must* deserialize it with a new ID, as multiple containers will be # auto created & added. material_container = xml_material_profile(self.getNewId(container_id)) material_container.deserialize(archive.open(material_container_file).read().decode("utf-8")) containers_to_add.append(material_container) material_containers.append(material_container) Job.yieldThread() Logger.log("d", "Workspace loading is checking instance containers...") # Get quality_changes and user profiles saved in the workspace instance_container_files = [name for name in cura_file_names if name.endswith(self._instance_container_suffix)] user_instance_containers = [] quality_changes_instance_containers = [] for instance_container_file in instance_container_files: container_id = self._stripFileToId(instance_container_file) instance_container = InstanceContainer(container_id) # Deserialize InstanceContainer by converting read data from bytes to string instance_container.deserialize(archive.open(instance_container_file).read().decode("utf-8")) container_type = instance_container.getMetaDataEntry("type") Job.yieldThread() if container_type == "user": # Check if quality changes already exists. user_containers = self._container_registry.findInstanceContainers(id=container_id) if not user_containers: containers_to_add.append(instance_container) else: if self._resolve_strategies["machine"] == "override" or self._resolve_strategies["machine"] is None: user_containers[0].deserialize(archive.open(instance_container_file).read().decode("utf-8")) elif self._resolve_strategies["machine"] == "new": # The machine is going to get a spiffy new name, so ensure that the id's of user settings match. extruder_id = instance_container.getMetaDataEntry("extruder", None) if extruder_id: new_id = self.getNewId(extruder_id) + "_current_settings" instance_container._id = new_id instance_container.setName(new_id) instance_container.setMetaDataEntry("extruder", self.getNewId(extruder_id)) containers_to_add.append(instance_container) machine_id = instance_container.getMetaDataEntry("machine", None) if machine_id: new_id = self.getNewId(machine_id) + "_current_settings" instance_container._id = new_id instance_container.setName(new_id) instance_container.setMetaDataEntry("machine", self.getNewId(machine_id)) containers_to_add.append(instance_container) user_instance_containers.append(instance_container) elif container_type == "quality_changes": # Check if quality changes already exists. quality_changes = self._container_registry.findInstanceContainers(id = container_id) if not quality_changes: containers_to_add.append(instance_container) else: if self._resolve_strategies["quality_changes"] == "override": quality_changes[0].deserialize(archive.open(instance_container_file).read().decode("utf-8")) elif self._resolve_strategies["quality_changes"] is None: # The ID already exists, but nothing in the values changed, so do nothing. pass quality_changes_instance_containers.append(instance_container) else: continue # Add all the containers right before we try to add / serialize the stack for container in containers_to_add: self._container_registry.addContainer(container) # Get the stack(s) saved in the workspace. Logger.log("d", "Workspace loading is checking stacks containers...") container_stack_files = [name for name in cura_file_names if name.endswith(self._container_stack_suffix)] global_stack = None extruder_stacks = [] container_stacks_added = [] try: for container_stack_file in container_stack_files: container_id = self._stripFileToId(container_stack_file) # Check if a stack by this ID already exists; container_stacks = self._container_registry.findContainerStacks(id=container_id) if container_stacks: stack = container_stacks[0] if self._resolve_strategies["machine"] == "override": # TODO: HACK # There is a machine, check if it has authenticationd data. If so, keep that data. network_authentication_id = container_stacks[0].getMetaDataEntry("network_authentication_id") network_authentication_key = container_stacks[0].getMetaDataEntry("network_authentication_key") container_stacks[0].deserialize(archive.open(container_stack_file).read().decode("utf-8")) if network_authentication_id: container_stacks[0].addMetaDataEntry("network_authentication_id", network_authentication_id) if network_authentication_key: container_stacks[0].addMetaDataEntry("network_authentication_key", network_authentication_key) elif self._resolve_strategies["machine"] == "new": new_id = self.getNewId(container_id) stack = ContainerStack(new_id) stack.deserialize(archive.open(container_stack_file).read().decode("utf-8")) # Ensure a unique ID and name stack._id = new_id # Extruder stacks are "bound" to a machine. If we add the machine as a new one, the id of the # bound machine also needs to change. if stack.getMetaDataEntry("machine", None): stack.setMetaDataEntry("machine", self.getNewId(stack.getMetaDataEntry("machine"))) if stack.getMetaDataEntry("type") != "extruder_train": # Only machines need a new name, stacks may be non-unique stack.setName(self._container_registry.uniqueName(stack.getName())) container_stacks_added.append(stack) self._container_registry.addContainer(stack) else: Logger.log("w", "Resolve strategy of %s for machine is not supported", self._resolve_strategies["machine"]) else: stack = ContainerStack(container_id) # Deserialize stack by converting read data from bytes to string stack.deserialize(archive.open(container_stack_file).read().decode("utf-8")) container_stacks_added.append(stack) self._container_registry.addContainer(stack) if stack.getMetaDataEntry("type") == "extruder_train": extruder_stacks.append(stack) else: global_stack = stack Job.yieldThread() except: Logger.logException("w", "We failed to serialize the stack. Trying to clean up.") # Something went really wrong. Try to remove any data that we added. for container in containers_to_add: self._container_registry.getInstance().removeContainer(container.getId()) for container in container_stacks_added: self._container_registry.getInstance().removeContainer(container.getId()) return None if self._resolve_strategies["machine"] == "new": # A new machine was made, but it was serialized with the wrong user container. Fix that now. for container in user_instance_containers: extruder_id = container.getMetaDataEntry("extruder", None) if extruder_id: for extruder in extruder_stacks: if extruder.getId() == extruder_id: extruder.replaceContainer(0, container) continue machine_id = container.getMetaDataEntry("machine", None) if machine_id: if global_stack.getId() == machine_id: global_stack.replaceContainer(0, container) continue if self._resolve_strategies["quality_changes"] == "new": # Quality changes needs to get a new ID, added to registry and to the right stacks for container in quality_changes_instance_containers: old_id = container.getId() container.setName(self._container_registry.uniqueName(container.getName())) # We're not really supposed to change the ID in normal cases, but this is an exception. container._id = self.getNewId(container.getId()) # The container was not added yet, as it didn't have an unique ID. It does now, so add it. self._container_registry.addContainer(container) # Replace the quality changes container old_container = global_stack.findContainer({"type": "quality_changes"}) if old_container.getId() == old_id: quality_changes_index = global_stack.getContainerIndex(old_container) global_stack.replaceContainer(quality_changes_index, container) continue for stack in extruder_stacks: old_container = stack.findContainer({"type": "quality_changes"}) if old_container.getId() == old_id: quality_changes_index = stack.getContainerIndex(old_container) stack.replaceContainer(quality_changes_index, container) if self._resolve_strategies["material"] == "new": for material in material_containers: old_material = global_stack.findContainer({"type": "material"}) if old_material.getId() in self._id_mapping: material_index = global_stack.getContainerIndex(old_material) global_stack.replaceContainer(material_index, material) continue for stack in extruder_stacks: old_material = stack.findContainer({"type": "material"}) if old_material.getId() in self._id_mapping: material_index = stack.getContainerIndex(old_material) stack.replaceContainer(material_index, material) continue for stack in extruder_stacks: ExtruderManager.getInstance().registerExtruder(stack, global_stack.getId()) else: # Machine has no extruders, but it needs to be registered with the extruder manager. ExtruderManager.getInstance().registerExtruder(None, global_stack.getId()) Logger.log("d", "Workspace loading is notifying rest of the code of changes...") # Notify everything/one that is to notify about changes. global_stack.containersChanged.emit(global_stack.getTop()) for stack in extruder_stacks: stack.setNextStack(global_stack) stack.containersChanged.emit(stack.getTop()) # Actually change the active machine. Application.getInstance().setGlobalContainerStack(global_stack) # Load all the nodes / meshdata of the workspace nodes = self._3mf_mesh_reader.read(file_name) if nodes is None: nodes = [] return nodes