def test_getStoragePathForType_Linux(self): with pytest.raises(ResourceTypeError): # No types have been added, so this should break! Resources.getAllResourcesOfType(0) with pytest.raises(UnsupportedStorageTypeError): # We still haven't added it, so it should fail (again) Resources.getStoragePathForType(0) Resources.addStorageType(0, "/test") assert Resources.getStoragePathForType(0) == "/test"
def loadProfiles(self): storage_path = Resources.getStoragePathForType(Resources.Profiles) dirs = Resources.getAllPathsForType(Resources.Profiles) for dir in dirs: if not os.path.isdir(dir): continue read_only = dir != storage_path for file_name in os.listdir(dir): path = os.path.join(dir, file_name) if os.path.isdir(path): continue profile = Profile(self, read_only) try: profile.loadFromFile(path) except Exception as e: Logger.log("e", "An exception occurred loading Profile %s: %s", path, str(e)) continue if not self.findProfile(profile.getName()): self._profiles.append(profile) profile.nameChanged.connect(self._onProfileNameChanged) profile = self.findProfile(Preferences.getInstance().getValue("machines/active_profile")) if profile: self.setActiveProfile(profile) self.profilesChanged.emit()
def load(self): files = [] for resource_type in self._resource_types: resources = Resources.getAllResourcesOfType(resource_type) try: resource_storage_path = Resources.getStoragePathForType(resource_type) except UnsupportedStorageTypeError: resource_storage_path = "" # Pre-process the list of files to insert relevant data # Most importantly, we need to ensure the loading order is DefinitionContainer, InstanceContainer, ContainerStack for path in resources: mime = MimeTypeDatabase.getMimeTypeForFile(path) container_type = self.__mime_type_map.get(mime.name) if not container_type: Logger.log("w", "Could not determine container type for file %s, ignoring", path) continue type_priority = 2 if issubclass(container_type, DefinitionContainer.DefinitionContainer): type_priority = 0 if issubclass(container_type, InstanceContainer.InstanceContainer): type_priority = 1 # Since we have the mime type and resource type here, process these two properties so we do not # need to look up mime types etc. again. container_id = urllib.parse.unquote_plus(mime.stripExtension(os.path.basename(path))) read_only = os.path.dirname(path) != resource_storage_path files.append((type_priority, container_id, path, read_only, container_type)) # Sort the list of files by type_priority so we can ensure correct loading order. files = sorted(files, key = lambda i: i[0]) for _, container_id, file_path, read_only, container_type in files: if container_id in self._id_container_cache: continue try: if issubclass(container_type, DefinitionContainer.DefinitionContainer): definition = self._loadCachedDefinition(container_id, file_path) if definition: self.addContainer(definition) continue new_container = container_type(container_id) with open(file_path, encoding = "utf-8") as f: new_container.deserialize(f.read()) new_container.setReadOnly(read_only) if issubclass(container_type, DefinitionContainer.DefinitionContainer): self._saveCachedDefinition(new_container) self.addContainer(new_container) except Exception as e: Logger.logException("e", "Could not deserialize container %s", container_id)
def loadProfiles(self): storage_path = Resources.getStoragePathForType(Resources.Profiles) dirs = Resources.getAllPathsForType(Resources.Profiles) for dir in dirs: if not os.path.isdir(dir): continue read_only = dir != storage_path for root, dirs, files in os.walk(dir): for file_name in files: path = os.path.join(root, file_name) if os.path.isdir(path): continue profile = Profile(self, read_only) try: profile.loadFromFile(path) except Exception as e: Logger.log("e", "An exception occurred loading Profile %s: %s", path, str(e)) continue if not self.findProfile(profile.getName(), variant_name = profile.getMachineVariantName(), material_name = profile.getMaterialName(), instance = self._active_machine): self._profiles.append(profile) profile.nameChanged.connect(self._onProfileNameChanged) for instance in self._machine_instances: try: file_name = urllib.parse.quote_plus(instance.getName()) + ".curaprofile" instance.getWorkingProfile().loadFromFile(Resources.getStoragePath(Resources.MachineInstanceProfiles, file_name)) except Exception as e: Logger.log("w", "Could not load working profile: %s: %s", file_name, str(e)) self._setDefaultVariantMaterialProfile(instance) self._protect_working_profile = True if self._active_machine: profile_name = self._active_machine.getActiveProfileName() if profile_name == "": profile_name = "Normal Quality" profile = self.findProfile(self._active_machine.getActiveProfileName(), instance = self._active_machine) if profile: self.setActiveProfile(profile) else: profiles = self.getProfiles(instance = self._active_machine) if len(profiles) > 0: self.setActiveProfile(profiles[0]) self.profilesChanged.emit() self._protect_working_profile = False
def loadProfiles(self): storage_path = Resources.getStoragePathForType(Resources.Profiles) dirs = Resources.getAllPathsForType(Resources.Profiles) for dir in dirs: if not os.path.isdir(dir): continue read_only = dir != storage_path for file_name in os.listdir(dir): path = os.path.join(dir, file_name) if os.path.isdir(path): continue profile = Profile(self, read_only) try: profile.loadFromFile(path) except Exception as e: Logger.log("e", "An exception occurred loading Profile %s: %s", path, str(e)) continue if not self.findProfile(profile.getName()): self._profiles.append(profile) profile.nameChanged.connect(self._onProfileNameChanged) profile = self.findProfile( Preferences.getInstance().getValue("machines/active_profile")) if profile: self.setActiveProfile(profile) else: if Preferences.getInstance().getValue( "machines/active_profile") == "": for profile in self._profiles: self.setActiveProfile( profile) #default to first profile you can find break self.profilesChanged.emit()
def load(self): files = [] files_resource_type = [] for resource_type in self._resource_types: resources = Resources.getAllResourcesOfType(resource_type) files.extend(resources) files_resource_type.extend([resource_type] * len(resources)) for file_path, resource_type in zip(files, files_resource_type): try: mime = MimeTypeDatabase.getMimeTypeForFile(file_path) container_type = self.__mime_type_map.get(mime.name) container_id = mime.stripExtension(os.path.basename(file_path)) ## Ensure that all special characters are encoded back. container_id = urllib.parse.unquote_plus(container_id) read_only = True try: read_only = os.path.dirname(file_path) != ( Resources.getStoragePathForType(resource_type)) except UnsupportedStorageTypeError: pass if container_type is None: Logger.log("w", "Unable to detect container type for %s", mime.name) continue new_container = container_type(container_id) with open(file_path, encoding="utf-8") as f: new_container.deserialize(f.read()) new_container.setReadOnly(read_only) self._containers.append(new_container) except Exception as e: Logger.logException("e", "Could not deserialize container %s", container_id)
def configFixer(self): #This finds all the config cache files and runs the previous two functions dMats, dVars, dQuals, dIntents = self.diffMaker() if len(dMats) > 0 or len(dQuals) > 0 or len(dIntents) > 0: truth = True else: truth = False path = Resources.getStoragePathForType(Resources.Resources) Logger.log("i", "Cleaning cache") files = [] for (dirpath, dirnames, filenames) in os.walk(path): for file in filenames: #don't mess with anything in the plugins directory if "autilus" in file and "autilus" not in dirpath and file.endswith( ".cfg"): #Logger.log("i","!!!@"+os.path.join(dirpath,file)) files.append(os.path.join(dirpath, file)) Logger.log( "i", "There are " + str(len(files)) + " cache files to mess with") self.cachePatch(dMats, files) self.cachePatch(dQuals, files) self.cachePatch(dVars, files) self.cachePatch(dIntents, files) return truth
def load(self) -> None: files = [] for resource_type in self._resource_types: resources = Resources.getAllResourcesOfType(resource_type) try: resource_storage_path = Resources.getStoragePathForType( resource_type) except UnsupportedStorageTypeError: resource_storage_path = "" # Pre-process the list of files to insert relevant data # Most importantly, we need to ensure the loading order is DefinitionContainer, InstanceContainer, ContainerStack for path in resources: try: mime = MimeTypeDatabase.getMimeTypeForFile(path) except MimeTypeDatabase.MimeTypeNotFoundError: # No valid mime type found for file, ignore it. continue container_type = self.__mime_type_map.get(mime.name) if not container_type: Logger.log( "w", "Could not determine container type for file %s, ignoring", path) continue type_priority = container_type.getLoadingPriority() # Since we have the mime type and resource type here, process these two properties so we do not # need to look up mime types etc. again. container_id = urllib.parse.unquote_plus( mime.stripExtension(os.path.basename(path))) read_only = os.path.realpath(os.path.dirname( path)) != os.path.realpath(resource_storage_path) files.append((type_priority, container_id, path, read_only, container_type)) # Sort the list of files by type_priority so we can ensure correct loading order. files = sorted(files, key=lambda i: i[0]) resource_start_time = time.time() for _, container_id, file_path, read_only, container_type in files: if container_id in self._id_container_cache: Logger.log("c", "Found a container with a duplicate ID: %s", container_id) Logger.log( "c", "Existing container is %s, trying to load %s from %s", self._id_container_cache[container_id], container_type, file_path) continue try: if issubclass(container_type, DefinitionContainer): definition = self._loadCachedDefinition( container_id, file_path) if definition: self.addContainer(definition) continue new_container = container_type(container_id) with open(file_path, encoding="utf-8") as f: new_container.deserialize(f.read()) new_container.setReadOnly(read_only) new_container.setPath(file_path) if issubclass(container_type, DefinitionContainer): self._saveCachedDefinition(new_container) self.addContainer(new_container) except Exception as e: Logger.logException("e", "Could not deserialize container %s", container_id) Logger.log("d", "Loading data into container registry took %s seconds", time.time() - resource_start_time)
def load(self) -> None: # We disable the garbage collection while loading, as this speeds up the loading. # Since there is so much going on (lots of objects being created), it's better to have it wait a bit until # the dust settles down. gc.disable() files = [] old_file_expression = re.compile( r"\{sep}old\{sep}\d+\{sep}".format(sep=os.sep)) for resource_type in self._resource_types: resources = Resources.getAllResourcesOfType(resource_type) try: resource_storage_path = Resources.getStoragePathForType( resource_type) except UnsupportedStorageTypeError: resource_storage_path = "" # Pre-process the list of files to insert relevant data # Most importantly, we need to ensure the loading order is DefinitionContainer, InstanceContainer, ContainerStack for path in resources: if old_file_expression.search(path): # This is a backup file, ignore it. continue try: mime = MimeTypeDatabase.getMimeTypeForFile(path) except MimeTypeDatabase.MimeTypeNotFoundError: # No valid mime type found for file, ignore it. continue container_type = self.__mime_type_map.get(mime.name) if not container_type: Logger.log( "w", "Could not determine container type for file %s, ignoring", path) continue type_priority = container_type.getLoadingPriority() # Since we have the mime type and resource type here, process these two properties so we do not # need to look up mime types etc. again. container_id = urllib.parse.unquote_plus( mime.stripExtension(os.path.basename(path))) read_only = os.path.realpath(os.path.dirname( path)) != os.path.realpath(resource_storage_path) files.append((type_priority, container_id, path, read_only, container_type)) # Sort the list of files by type_priority so we can ensure correct loading order. files = sorted(files, key=lambda i: i[0]) resource_start_time = time.time() with self.lockCache(): #Because we might be writing cache files. for _, container_id, file_path, read_only, container_type in files: # Enable the rest of the application to get UI updates. UM.Qt.QtApplication.QtApplication.processEvents() if container_id in self._id_container_cache: Logger.log("c", "Found a container with a duplicate ID: %s", container_id) Logger.log( "c", "Existing container is %s, trying to load %s from %s", self._id_container_cache[container_id], container_type, file_path) continue try: if issubclass(container_type, DefinitionContainer): definition = self._loadCachedDefinition( container_id, file_path) if definition: self.addContainer(definition) continue new_container = container_type(container_id) with open(file_path, encoding="utf-8") as f: new_container.deserialize(f.read()) new_container.setReadOnly(read_only) new_container.setPath(file_path) if issubclass(container_type, DefinitionContainer): self._saveCachedDefinition(new_container) self.addContainer(new_container) except Exception as e: Logger.logException("e", "Could not deserialize container %s", container_id) Logger.log("d", "Loading data into container registry took %s seconds", time.time() - resource_start_time) gc.enable()
def __init__(self): super().__init__() self._application = CuraApplication.getInstance() self._setting_keyword = ";SETTING_" #self._application.initializationFinished.connect(self._onInitialized) #def _onInitialized(self): self.this_plugin_path = os.path.join( Resources.getStoragePath(Resources.Resources), "plugins", "Nautilus", "Nautilus") self._preferences_window = None self._guides = None self._ready = False self.local_meshes_path = None self.local_printer_def_path = None self.local_materials_path = None self.local_quality_path = None self.local_extruder_path = None self.local_variants_path = None self.local_setvis_path = None self.local_global_dir = None self.local_intent_path = None Logger.log("i", "Nautilus Plugin setting up") self.local_meshes_path = os.path.join( Resources.getStoragePathForType(Resources.Resources), "meshes") self.local_printer_def_path = Resources.getStoragePath( Resources.DefinitionContainers ) #os.path.join(Resources.getStoragePath(Resources.Resources),"definitions") self.local_materials_path = os.path.join( Resources.getStoragePath(Resources.Resources), "materials") self.local_quality_path = os.path.join( Resources.getStoragePath(Resources.Resources), "quality") self.local_extruder_path = os.path.join( Resources.getStoragePath(Resources.Resources), "extruders") self.local_variants_path = os.path.join( Resources.getStoragePath(Resources.Resources), "variants") self.local_setvis_path = os.path.join( Resources.getStoragePath(Resources.Resources), "setting_visibility") self.local_global_dir = os.path.join( Resources.getStoragePath(Resources.Resources), "machine_instances") self.local_intent_path = os.path.join( Resources.getStoragePath(Resources.Resources), "intent") self.setvers = self._application.getPreferences().getValue( "metadata/setting_version") self.gitUrl = 'https://api.github.com/repos/HydraResearchLLC/Nautilus-Configuration-Macros/releases/latest' self.fullJson = json.loads(requests.get(self.gitUrl).text) # if the plugin was never installed, then force installation if self._application.getPreferences().getValue( "Nautilus/install_status") is None: self._ready = True self._application.getPreferences().addPreference( "Nautilus/install_status", "unknown") Logger.log("i", "first install") self._application.getPreferences().addPreference( "Nautilus/configversion", "1.0.0") # if something got messed up, force installation if not self.isInstalled() and self._application.getPreferences( ).getValue("Nautilus/install_status") is "installed": self._application.getPreferences().setValue( "Nautilus/install_status", "unknown") Logger.log( "i", "weird error, config uninstalled, preference incorrect") # if it's installed, and it's listed as uninstalled, then change that to reflect the truth if self.isInstalled() and self._application.getPreferences().getValue( "Nautilus/install_status") is "uninstalled": self._application.getPreferences().setValue( "Nautilus/install_status", "installed") Logger.log("i", "weird error, config installed, preference incorrect") # if the version isn't the same, then force installation if not self.versionsMatch(): self._application.getPreferences().setValue( "Nautilus/install_status", "unknown") Logger.log("i", "Version's don't match") # Check the preferences to see if the user uninstalled the files - # if so don't automatically install them if self._application.getPreferences().getValue( "Nautilus/install_status") is "unknown": # if the user never installed the files, then automatically install it Logger.log("i", "Time to install!") self.installPluginFiles() if not self.configVersionsMatch(): self.messageMaker() Logger.log("i", "time for a config update!") #This is the signal for machines changing self._application.globalContainerStackChanged.connect( self.updateMachineName) Duet = NautilusDuet.NautilusDuet() self.addMenuItem(catalog.i18nc("@item:inmenu", "Nautilus Connections"), Duet.showSettingsDialog) self.addMenuItem(catalog.i18nc("@item:inmenu", "Resources and Guides"), self.showGuides) self.addMenuItem(catalog.i18nc("@item:inmenu", "Preferences"), self.showPreferences) # finally save the cura.cfg file #self._application.getPreferences().writeToFile(Resources.getStoragePath(Resources.Preferences, self._application.getApplicationName() + ".cfg")) Application.getInstance().engineCreatedSignal.connect(self.addMatCosts)
def _upgradeFile(self, storage_path_absolute: str, configuration_file: str, old_configuration_type: str) -> bool: configuration_file_absolute = os.path.join(storage_path_absolute, configuration_file) # Read the old file. try: with open(configuration_file_absolute, encoding = "utf-8", errors = "ignore") as file_handle: files_data = [file_handle.read()] except MemoryError: # File is too big. It might be the log. return False except FileNotFoundError: # File was already moved to an /old directory. return False except IOError: Logger.log("w", "Can't open configuration file %s for reading.", configuration_file_absolute) return False # Get the version number of the old file. try: old_version = self._get_version_functions[old_configuration_type](files_data[0]) except: # Version getter gives an exception. Not a valid file. Can't upgrade it then. return False version = old_version configuration_type = old_configuration_type # Get the actual MIME type object, from the name. try: mime_type = UM.MimeTypeDatabase.MimeTypeDatabase.getMimeTypeForFile(configuration_file) except UM.MimeTypeDatabase.MimeTypeNotFoundError: return False filenames_without_extension = [self._stripMimeTypeExtension(mime_type, configuration_file)] result_data = self.updateFilesData(configuration_type, version, files_data, filenames_without_extension) if not result_data: return False configuration_type, version, files_data, filenames_without_extension = result_data # If the version changed, save the new files. if version != old_version or configuration_type != old_configuration_type: # Finding out where to store these files. resource_type, mime_type_name = self._current_versions[(configuration_type, version)] storage_path = Resources.getStoragePathForType(resource_type) mime_type = UM.MimeTypeDatabase.MimeTypeDatabase.getMimeType(mime_type_name) # Get the actual MIME type object, from the name. if mime_type.preferredSuffix: extension = "." + mime_type.preferredSuffix elif mime_type.suffixes: extension = "." + mime_type.suffixes[0] else: extension = "" # No known suffix. Put no extension behind it. new_filenames = [filename + extension for filename in filenames_without_extension] configuration_files_absolute = [os.path.join(storage_path, filename) for filename in new_filenames] for file_idx, configuration_file_absolute in enumerate(configuration_files_absolute): try: with open(os.path.join(configuration_file_absolute), "w", encoding = "utf-8") as file_handle: file_handle.write(files_data[file_idx]) # Save the new file. except IOError: Logger.log("w", "Couldn't write new configuration file to %s.", configuration_file_absolute) return False Logger.log("i", "Upgraded %s to version %s.", configuration_file, str(version)) return True return False # Version didn't change. Was already current.
def loadProfiles(self): storage_path = Resources.getStoragePathForType(Resources.Profiles) dirs = Resources.getAllPathsForType(Resources.Profiles) for dir in dirs: if not os.path.isdir(dir): continue read_only = dir != storage_path for root, dirs, files in os.walk(dir): for file_name in files: path = os.path.join(root, file_name) if os.path.isdir(path): continue # Bit of a hack, but we should only use cfg or curaprofile files in the profile folder. try: extension = path.split(".")[-1] if extension != "cfg" and extension != "curaprofile": continue except: continue # profile has no extension profile = Profile(self, read_only) try: profile.loadFromFile(path) except Exception as e: Logger.log( "e", "An exception occurred loading Profile %s: %s", path, str(e)) continue self._profiles.append(profile) profile.nameChanged.connect(self._onProfileNameChanged) for instance in self._machine_instances: try: file_name = urllib.parse.quote_plus( instance.getName()) + ".curaprofile" instance.getWorkingProfile().loadFromFile( Resources.getStoragePath(Resources.MachineInstanceProfiles, file_name)) except Exception as e: Logger.log("w", "Could not load working profile: %s: %s", file_name, str(e)) self._setDefaultVariantMaterialProfile(instance) self._protect_working_profile = True if self._active_machine: profile_name = self._active_machine.getActiveProfileName() if profile_name == "": profile_name = self._active_machine.getMachineDefinition( ).getPreference("prefered_profile") profile = self.findProfile( self._active_machine.getActiveProfileName(), instance=self._active_machine) if profile: self.setActiveProfile(profile) else: profiles = self.getProfiles(instance=self._active_machine) if len(profiles) > 0: self.setActiveProfile(profiles[0]) self.profilesChanged.emit() self._protect_working_profile = False
def loadProfiles(self): storage_path = Resources.getStoragePathForType(Resources.Profiles) dirs = Resources.getAllPathsForType(Resources.Profiles) for dir in dirs: if not os.path.isdir(dir): continue read_only = dir != storage_path for root, dirs, files in os.walk(dir): for file_name in files: path = os.path.join(root, file_name) if os.path.isdir(path): continue profile = Profile(self, read_only) try: profile.loadFromFile(path) except Exception as e: Logger.log( "e", "An exception occurred loading Profile %s: %s", path, str(e)) continue if not self.findProfile( profile.getName(), variant_name=profile.getMachineVariantName(), material_name=profile.getMaterialName(), instance=self._active_machine): self._profiles.append(profile) profile.nameChanged.connect(self._onProfileNameChanged) for instance in self._machine_instances: try: file_name = urllib.parse.quote_plus( instance.getName()) + ".curaprofile" instance.getWorkingProfile().loadFromFile( Resources.getStoragePath(Resources.MachineInstanceProfiles, file_name)) except Exception as e: Logger.log("w", "Could not load working profile: %s: %s", file_name, str(e)) self._setDefaultVariantMaterialProfile(instance) self._protect_working_profile = True if self._active_machine: profile_name = self._active_machine.getActiveProfileName() if profile_name == "": profile_name = "Normal Quality" profile = self.findProfile( self._active_machine.getActiveProfileName(), instance=self._active_machine) if profile: self.setActiveProfile(profile) else: profiles = self.getProfiles(instance=self._active_machine) if len(profiles) > 0: self.setActiveProfile(profiles[0]) self.profilesChanged.emit() self._protect_working_profile = False
def _upgradeFile(self, storage_path_absolute, configuration_file, old_configuration_type, paths): configuration_file_absolute = os.path.join(storage_path_absolute, configuration_file) #Read the old file. try: with open(configuration_file_absolute, encoding = "utf-8", errors = "ignore") as file_handle: configuration = file_handle.read() except IOError: Logger.log("w", "Can't open configuration file %s for reading.", configuration_file_absolute) return False #Get the version number of the old file. try: old_version = self._get_version_functions[old_configuration_type](configuration) except: #Version getter gives an exception. Not a valid file. Can't upgrade it then. Logger.log("w", "Invalid %s file: %s", old_configuration_type, configuration_file_absolute) return False version = old_version configuration_type = old_configuration_type filename_without_extension = os.path.splitext(configuration_file)[0] #Keep converting the file until it's at one of the current versions. while (configuration_type, version) not in self._current_versions: if (configuration_type, version) not in paths: Logger.log("w", "File %s (%s, %s) could not be upgraded to the most recent version. No upgrade plug-in can do it.", configuration_file, configuration_type, str(version)) return False new_type, new_version, upgrade = paths[(configuration_type, version)] try: filename_without_extension, configuration = upgrade(configuration, filename_without_extension) except Exception as e: #Upgrade failed due to a coding error in the plug-in. Logger.logException("w", "Exception in %s upgrade with %s: %s", old_configuration_type, upgrade.__module__, str(e)) return False if not configuration: #Upgrade failed. return False version = new_version configuration_type = new_type #If the version changed, save the new file. if version != old_version or configuration_type != old_configuration_type: self._storeOldFile(storage_path_absolute, configuration_file, old_version) #Finding out where to store this file. resource_type, mime_type = self._current_versions[(configuration_type, version)] storage_path = Resources.getStoragePathForType(resource_type) mime_type = UM.MimeTypeDatabase.getMimeType(mime_type) #Get the actual MIME type object, from the name. new_filename = filename_without_extension if mime_type.preferredSuffix: new_filename += "." + mime_type.preferredSuffix elif mime_type.suffixes: new_filename += "." + mime_type.suffixes[0] configuration_file_absolute = os.path.join(storage_path, new_filename) try: with open(os.path.join(configuration_file_absolute), "w", encoding = "utf-8") as file_handle: file_handle.write(configuration) #Save the new file. except IOError: Logger.log("w", "Couldn't write new configuration file to %s.", configuration_file_absolute) return False Logger.log("i", "Upgraded %s to version %s.", configuration_file, str(version)) return True return False #Version didn't change. Was already current.
def _upgradeFile(self, storage_path_absolute, configuration_file, old_configuration_type): configuration_file_absolute = os.path.join(storage_path_absolute, configuration_file) #Read the old file. try: with open(configuration_file_absolute, encoding="utf-8", errors="ignore") as file_handle: files_data = [file_handle.read()] except FileNotFoundError: #File was already moved to an /old directory. return False except IOError: Logger.log("w", "Can't open configuration file %s for reading.", configuration_file_absolute) return False #Get the version number of the old file. try: old_version = self._get_version_functions[old_configuration_type]( files_data[0]) except: #Version getter gives an exception. Not a valid file. Can't upgrade it then. return False version = old_version configuration_type = old_configuration_type filenames_without_extension = [os.path.splitext(configuration_file)[0]] #Keep converting the file until it's at one of the current versions. while (configuration_type, version) not in self._current_versions: if (configuration_type, version) not in self._upgrade_routes: #No version upgrade plug-in claims to be able to upgrade this file. return False new_type, new_version, upgrade_step = self._upgrade_routes[( configuration_type, version)] new_filenames_without_extension = [] new_files_data = [] for file_idx, file_data in enumerate(files_data): try: this_filenames_without_extension, this_files_data = upgrade_step( file_data, filenames_without_extension[file_idx]) except Exception as e: #Upgrade failed due to a coding error in the plug-in. Logger.logException("w", "Exception in %s upgrade with %s: %s", old_configuration_type, upgrade_step.__module__, str(e)) return False if not this_files_data: #Upgrade failed. return False new_filenames_without_extension += this_filenames_without_extension new_files_data += this_files_data filenames_without_extension = new_filenames_without_extension files_data = new_files_data version = new_version configuration_type = new_type #If the version changed, save the new files. if version != old_version or configuration_type != old_configuration_type: self._storeOldFile(storage_path_absolute, configuration_file, old_version) #Finding out where to store these files. resource_type, mime_type = self._current_versions[( configuration_type, version)] storage_path = Resources.getStoragePathForType(resource_type) mime_type = UM.MimeTypeDatabase.getMimeType( mime_type) #Get the actual MIME type object, from the name. if mime_type.preferredSuffix: extension = "." + mime_type.preferredSuffix elif mime_type.suffixes: extension = "." + mime_type.suffixes[0] else: extension = "" #No known suffix. Put no extension behind it. new_filenames = [ filename + extension for filename in filenames_without_extension ] configuration_files_absolute = [ os.path.join(storage_path, filename) for filename in new_filenames ] for file_idx, configuration_file_absolute in enumerate( configuration_files_absolute): try: with open(os.path.join(configuration_file_absolute), "w", encoding="utf-8") as file_handle: file_handle.write( files_data[file_idx]) #Save the new file. except IOError: Logger.log("w", "Couldn't write new configuration file to %s.", configuration_file_absolute) return False Logger.log("i", "Upgraded %s to version %s.", configuration_file, str(version)) return True return False #Version didn't change. Was already current.
def _upgradeFile(self, storage_path_absolute: str, configuration_file: str, old_configuration_type: str) -> bool: configuration_file_absolute = os.path.join(storage_path_absolute, configuration_file) # Read the old file. try: with open(configuration_file_absolute, encoding = "utf-8", errors = "ignore") as file_handle: files_data = [file_handle.read()] except MemoryError: # File is too big. It might be the log. return False except FileNotFoundError: # File was already moved to an /old directory. return False except IOError: Logger.log("w", "Can't open configuration file %s for reading.", configuration_file_absolute) return False # Get the version number of the old file. try: old_version = self._get_version_functions[old_configuration_type](files_data[0]) except: # Version getter gives an exception. Not a valid file. Can't upgrade it then. return False version = old_version configuration_type = old_configuration_type # Get the actual MIME type object, from the name. try: mime_type = UM.MimeTypeDatabase.MimeTypeDatabase.getMimeTypeForFile(configuration_file) except UM.MimeTypeDatabase.MimeTypeNotFoundError: return False filenames_without_extension = [self._stripMimeTypeExtension(mime_type, configuration_file)] result_data = self.updateFilesData(configuration_type, version, files_data, filenames_without_extension) if not result_data: return False configuration_type, version, files_data, filenames_without_extension = result_data # If the version changed, save the new files. if version != old_version or configuration_type != old_configuration_type: # Finding out where to store these files. resource_type, mime_type_name = self._current_versions[(configuration_type, version)] storage_path = Resources.getStoragePathForType(resource_type) mime_type = UM.MimeTypeDatabase.MimeTypeDatabase.getMimeType(mime_type_name) # Get the actual MIME type object, from the name. if mime_type.preferredSuffix: extension = "." + mime_type.preferredSuffix elif mime_type.suffixes: extension = "." + mime_type.suffixes[0] else: extension = "" # No known suffix. Put no extension behind it. new_filenames = [filename + extension for filename in filenames_without_extension] configuration_files_absolute = [os.path.join(storage_path, filename) for filename in new_filenames] for file_idx, configuration_file_absolute in enumerate(configuration_files_absolute): try: with open(os.path.join(configuration_file_absolute), "w", encoding = "utf-8") as file_handle: file_handle.write(files_data[file_idx]) # Save the new file. except IOError: Logger.log("w", "Couldn't write new configuration file to %s.", configuration_file_absolute) return False Logger.log("i", "Upgraded %s to version %s.", configuration_file, str(version)) return True return False # Version didn't change. Was already current.
def load(self) -> None: files = [] old_file_expression = re.compile(r"\{sep}old\{sep}\d+\{sep}".format(sep = os.sep)) for resource_type in self._resource_types: resources = Resources.getAllResourcesOfType(resource_type) try: resource_storage_path = Resources.getStoragePathForType(resource_type) except UnsupportedStorageTypeError: resource_storage_path = "" # Pre-process the list of files to insert relevant data # Most importantly, we need to ensure the loading order is DefinitionContainer, InstanceContainer, ContainerStack for path in resources: if old_file_expression.search(path): # This is a backup file, ignore it. continue try: mime = MimeTypeDatabase.getMimeTypeForFile(path) except MimeTypeDatabase.MimeTypeNotFoundError: # No valid mime type found for file, ignore it. continue container_type = self.__mime_type_map.get(mime.name) if not container_type: Logger.log("w", "Could not determine container type for file %s, ignoring", path) continue type_priority = container_type.getLoadingPriority() # Since we have the mime type and resource type here, process these two properties so we do not # need to look up mime types etc. again. container_id = urllib.parse.unquote_plus(mime.stripExtension(os.path.basename(path))) read_only = os.path.realpath(os.path.dirname(path)) != os.path.realpath(resource_storage_path) files.append((type_priority, container_id, path, read_only, container_type)) # Sort the list of files by type_priority so we can ensure correct loading order. files = sorted(files, key = lambda i: i[0]) resource_start_time = time.time() for _, container_id, file_path, read_only, container_type in files: if container_id in self._id_container_cache: Logger.log("c", "Found a container with a duplicate ID: %s", container_id) Logger.log("c", "Existing container is %s, trying to load %s from %s", self._id_container_cache[container_id], container_type, file_path) continue try: if issubclass(container_type, DefinitionContainer): definition = self._loadCachedDefinition(container_id, file_path) if definition: self.addContainer(definition) continue new_container = container_type(container_id) with open(file_path, encoding = "utf-8") as f: new_container.deserialize(f.read()) new_container.setReadOnly(read_only) new_container.setPath(file_path) if issubclass(container_type, DefinitionContainer): self._saveCachedDefinition(new_container) self.addContainer(new_container) except Exception as e: Logger.logException("e", "Could not deserialize container %s", container_id) Logger.log("d", "Loading data into container registry took %s seconds", time.time() - resource_start_time)