def removeMaterial(self, material_node: "MaterialNode") -> None: """Deletes a material from Cura. This function does not do any safety checking any more. Please call this function only if: - The material is not read-only. - The material is not used in any stacks. If the material was not lazy-loaded yet, this will fully load the container. When removing this material node, all other materials with the same base fill will also be removed. :param material_node: The material to remove. """ Logger.info(f"Removing material {material_node.container_id}") container_registry = CuraContainerRegistry.getInstance() materials_this_base_file = container_registry.findContainersMetadata(base_file = material_node.base_file) # The material containers belonging to the same material file are supposed to work together. This postponeSignals() # does two things: # - optimizing the signal emitting. # - making sure that the signals will only be emitted after all the material containers have been removed. with postponeSignals(container_registry.containerRemoved, compress = CompressTechnique.CompressPerParameterValue): # CURA-6886: Some containers may not have been loaded. If remove one material container, its material file # will be removed. If later we remove a sub-material container which hasn't been loaded previously, it will # crash because removeContainer() requires to load the container first, but the material file was already # gone. for material_metadata in materials_this_base_file: container_registry.findInstanceContainers(id = material_metadata["id"]) for material_metadata in materials_this_base_file: container_registry.removeContainer(material_metadata["id"])
def _removeAllScheduledPackages(self) -> None: Logger.info( "Attempting to remove the following scheduled packages: {packages}" .format(packages="- " + "\n- ".join(self._to_remove_package_set))) remove_failures = set() for package_id in self._to_remove_package_set: try: self._purgePackage(package_id) del self._installed_package_dict[package_id] except: Logger.logException( "w", f"Failed to remove package: [{package_id}]") remove_failures.add(package_id) if remove_failures: message = Message(catalog.i18nc( "@error:uninstall", "There were some errors uninstalling the following packages: {packages}" .format(packages="- " + "\n- ".join(remove_failures))), title=catalog.i18nc("@info:title", "Uninstalling errors"), message_type=Message.MessageType.ERROR) message.show() if self._to_remove_package_set: # Only remove packages if something changed. self._to_remove_package_set = set() self._saveManagementData()
def initialize(self, check_if_trusted: bool = False) -> None: super().initialize() preferences = Application.getInstance().getPreferences() if check_if_trusted: # Need to do this before the preferences are read for the first time, but after obj-creation, which is here. preferences.indicateUntrustedPreference("general", "theme", lambda value: self._isPathSecure(Resources.getPath(Resources.Themes, value))) preferences.indicateUntrustedPreference("backend", "location", lambda value: self._isPathSecure(os.path.abspath(value))) preferences.addPreference("view/force_empty_shader_cache", False) preferences.addPreference("view/opengl_version_detect", OpenGLContext.OpenGlVersionDetect.Autodetect) # Read preferences here (upgrade won't work) to get: # - The language in use, so the splash window can be shown in the correct language. # - The OpenGL 'force' parameters. try: self.readPreferencesFromConfiguration() except FileNotFoundError: Logger.log("i", "Preferences file not found, ignore and use default language '%s'", self._default_language) # Initialize the package manager to remove and install scheduled packages. self._package_manager = self._package_manager_class(self, parent = self) # If a plugin is removed, check if the matching package is also removed. self._plugin_registry.pluginRemoved.connect(lambda plugin_id: self._package_manager.removePackage(plugin_id)) self._mesh_file_handler = MeshFileHandler(self) #type: MeshFileHandler self._workspace_file_handler = WorkspaceFileHandler(self) #type: WorkspaceFileHandler if preferences.getValue("view/force_empty_shader_cache"): self.setAttribute(Qt.ApplicationAttribute.AA_DisableShaderDiskCache) if preferences.getValue("view/opengl_version_detect") != OpenGLContext.OpenGlVersionDetect.ForceModern: major_version, minor_version, profile = OpenGLContext.detectBestOpenGLVersion( preferences.getValue("view/opengl_version_detect") == OpenGLContext.OpenGlVersionDetect.ForceLegacy) else: Logger.info("Force 'modern' OpenGL (4.1 core) -- overrides 'force legacy opengl' preference.") major_version, minor_version, profile = (4, 1, QSurfaceFormat.OpenGLContextProfile.CoreProfile) if major_version is None or minor_version is None or profile is None: Logger.log("e", "Startup failed because OpenGL version probing has failed: tried to create a 2.0 and 4.1 context. Exiting") if not self.getIsHeadLess(): QMessageBox.critical(None, "Failed to probe OpenGL", "Could not probe OpenGL. This program requires OpenGL 2.0 or higher. Please check your video card drivers.") sys.exit(1) else: opengl_version_str = OpenGLContext.versionAsText(major_version, minor_version, profile) Logger.log("d", "Detected most suitable OpenGL context version: %s", opengl_version_str) if not self.getIsHeadLess(): OpenGLContext.setDefaultFormat(major_version, minor_version, profile = profile) self._qml_import_paths.append(os.path.join(os.path.dirname(sys.executable), "qml")) self._qml_import_paths.append(os.path.join(self.getInstallPrefix(), "Resources", "qml")) Logger.log("i", "Initializing job queue ...") self._job_queue = JobQueue() self._job_queue.jobFinished.connect(self._onJobFinished) Logger.log("i", "Initializing version upgrade manager ...") self._version_upgrade_manager = VersionUpgradeManager(self)
def cancelDownload(self) -> None: Logger.info( f"User cancelled the download of a package. request {self._download_request_data}" ) if self._download_request_data is not None: self._application.getHttpRequestManager().abortRequest( self._download_request_data) self._download_request_data = None self.resetDownload()
def initialize(self) -> None: super().initialize() preferences = Application.getInstance().getPreferences() preferences.addPreference("view/force_empty_shader_cache", False) preferences.addPreference("view/opengl_version_detect", OpenGLContext.OpenGlVersionDetect.Autodetect) # Read preferences here (upgrade won't work) to get: # - The language in use, so the splash window can be shown in the correct language. # - The OpenGL 'force' parameters. try: preferences_filename = Resources.getPath(Resources.Preferences, self._app_name + ".cfg") self._preferences.readFromFile(preferences_filename) except FileNotFoundError: Logger.log("i", "Preferences file not found, ignore and use default language '%s'", self._default_language) # Initialize the package manager to remove and install scheduled packages. self._package_manager = self._package_manager_class(self, parent = self) self._mesh_file_handler = MeshFileHandler(self) #type: MeshFileHandler self._workspace_file_handler = WorkspaceFileHandler(self) #type: WorkspaceFileHandler # Remove this and you will get Windows 95 style for all widgets if you are using Qt 5.10+ self.setStyle("fusion") if preferences.getValue("view/force_empty_shader_cache"): self.setAttribute(Qt.AA_DisableShaderDiskCache) self.setAttribute(Qt.AA_UseDesktopOpenGL) if preferences.getValue("view/opengl_version_detect") != OpenGLContext.OpenGlVersionDetect.ForceModern: major_version, minor_version, profile = OpenGLContext.detectBestOpenGLVersion( preferences.getValue("view/opengl_version_detect") == OpenGLContext.OpenGlVersionDetect.ForceLegacy) else: Logger.info("Force 'modern' OpenGL (4.1 core) -- overrides 'force legacy opengl' preference.") major_version, minor_version, profile = (4, 1, QSurfaceFormat.CoreProfile) if major_version is None or minor_version is None or profile is None: Logger.log("e", "Startup failed because OpenGL version probing has failed: tried to create a 2.0 and 4.1 context. Exiting") if not self.getIsHeadLess(): QMessageBox.critical(None, "Failed to probe OpenGL", "Could not probe OpenGL. This program requires OpenGL 2.0 or higher. Please check your video card drivers.") sys.exit(1) else: opengl_version_str = OpenGLContext.versionAsText(major_version, minor_version, profile) Logger.log("d", "Detected most suitable OpenGL context version: %s", opengl_version_str) if not self.getIsHeadLess(): OpenGLContext.setDefaultFormat(major_version, minor_version, profile = profile) self._qml_import_paths.append(os.path.join(os.path.dirname(sys.executable), "qml")) self._qml_import_paths.append(os.path.join(self.getInstallPrefix(), "Resources", "qml")) Logger.log("i", "Initializing job queue ...") self._job_queue = JobQueue() self._job_queue.jobFinished.connect(self._onJobFinished) Logger.log("i", "Initializing version upgrade manager ...") self._version_upgrade_manager = VersionUpgradeManager(self)
def _getRemoteClusters(self) -> None: """Gets all remote clusters from the API.""" if self._syncing: return Logger.info("Syncing cloud printer clusters") self._syncing = True self._account.setSyncState(self.SYNC_SERVICE_NAME, SyncState.SYNCING) self._api.getClusters(self._onGetRemoteClustersFinished, self._onGetRemoteClusterFailed)
def duplicateMaterial(self, material_node: "MaterialNode", new_base_id: Optional[str] = None, new_metadata: Optional[Dict[str, Any]] = None) -> Optional[str]: """Creates a duplicate of a material with the same GUID and base_file metadata :param material_node: The node representing the material to duplicate. :param new_base_id: A new material ID for the base material. The IDs of the submaterials will be based off this one. If not provided, a material ID will be generated automatically. :param new_metadata: Metadata for the new material. If not provided, this will be duplicated from the original material. :return: The root material ID of the duplicate material. """ Logger.info(f"Duplicating material {material_node.base_file} to {new_base_id}") return self.duplicateMaterialByBaseFile(material_node.base_file, new_base_id, new_metadata)
def startDownload(self, url: str) -> None: Logger.info(f"Attempting to download & install package from {url}.") callback = lambda r: self._onDownloadFinished(r) error_callback = lambda r, e: self._onDownloadFailed(r, e) download_progress_callback = self._onDownloadProgress request_data = self._application.getHttpRequestManager().get( url, callback=callback, error_callback=error_callback, download_progress_callback=download_progress_callback, scope=self._cloud_scope) self._download_request_data = request_data self.setDownloadProgress(0) self.setIsDownloading(True)
def reinstallPackage(self, package_id: str) -> bool: """Attempts to 'reinstall' a package which was scheduled for removal on the next start-up :param package_id: The package ID to be reinstalled :return: True if it was successfully 'reinstalled' False otherwise """ if package_id not in self._to_remove_package_set: return False if package_id in self._installed_package_dict: self._to_remove_package_set.remove(package_id) self._saveManagementData() self.installedPackagesChanged.emit() self.packageInstalled.emit(package_id) Logger.info(f"Reinstalled package [{package_id}]") return True return False
def store(cls, file_path: str, file_id: str, version: Version = Version("1.0.0")) -> None: """ Store a new file into the central file storage. This file will get moved to a storage location that is not specific to this version of the application. If the file already exists, this will check if it's the same file. If the file is not the same, it raises a `FileExistsError`. If the file is the same, no error is raised and the file to store is simply deleted. It is a duplicate of the file already stored. :param file_path: The path to the file to store in the central file storage. :param file_id: A name for the file to store. :param version: A version number for the file. :raises FileExistsError: There is already a centrally stored file with that name and version, but it's different. """ if not os.path.exists(cls._centralStorageLocation()): os.makedirs(cls._centralStorageLocation()) if not os.path.exists(file_path): Logger.debug( f"{file_id} {str(version)} was already stored centrally.") return storage_path = cls._getFilePath(file_id, version) if os.path.exists( storage_path): # File already exists. Check if it's the same. if os.path.getsize(file_path) != os.path.getsize( storage_path ): # As quick check if files are the same, check their file sizes. raise FileExistsError( f"Central file storage already has a file with ID {file_id} and version {str(version)}, but it's different." ) new_file_hash = cls._hashFile(file_path) stored_file_hash = cls._hashFile(storage_path) if new_file_hash != stored_file_hash: raise FileExistsError( f"Central file storage already has a file with ID {file_id} and version {str(version)}, but it's different." ) os.remove(file_path) Logger.info( f"{file_id} {str(version)} was already stored centrally. Removing duplicate." ) else: shutil.move(file_path, storage_path) Logger.info(f"Storing new file {file_id} {str(version)}.")
def setSyncState(self, service_name: str, state: int) -> None: """ Can be used to register sync services and update account sync states Contract: A sync service is expected exit syncing state in all cases, within reasonable time Example: `setSyncState("PluginSyncService", SyncState.SYNCING)` :param service_name: A unique name for your service, such as `plugins` or `backups` :param state: One of SyncState """ Logger.info("Service {service} enters sync state {state}", service=service_name, state=state) prev_state = self._sync_state self._sync_services[service_name] = state if any(val == SyncState.SYNCING for val in self._sync_services.values()): self._sync_state = SyncState.SYNCING self._setManualSyncEnabled(False) elif any(val == SyncState.ERROR for val in self._sync_services.values()): self._sync_state = SyncState.ERROR self._setManualSyncEnabled(True) else: self._sync_state = SyncState.SUCCESS self._setManualSyncEnabled(False) if self._sync_state != prev_state: self.syncStateChanged.emit(self._sync_state) if self._sync_state == SyncState.SUCCESS: self._last_sync_str = datetime.now().strftime("%d/%m/%Y %H:%M") self.lastSyncDateTimeChanged.emit() if self._sync_state != SyncState.SYNCING: # schedule new auto update after syncing completed (for whatever reason) if not self._update_timer.isActive(): self._update_timer.start()
def refresh_everything(): Logger.info("Refreshing ALL screenshots.") for article_per_language in self.articles_source.values(): for article in article_per_language.values(): ScreenshotTool.refresh_screenshots(article_text=article, refreshed_set=refreshed_set)
def _loadManagementData(self) -> None: # The bundled package management file should always be there if len(self._bundled_package_management_file_paths) == 0: Logger.log("w", "Bundled package management files could not be found!") return # Load the bundled packages: self._bundled_package_dict = {} for search_path in self._bundled_package_management_file_paths: try: with open(search_path, "r", encoding="utf-8") as f: self._bundled_package_dict.update(json.load(f)) Logger.log("i", "Loaded bundled packages data from %s", search_path) except UnicodeDecodeError: Logger.logException( "e", "Can't decode package management files. File is corrupt.") return # Need to use the file lock here to prevent concurrent I/O from other processes/threads container_registry = self._application.getContainerRegistry() with container_registry.lockFile(): try: # Load the user packages: with open(cast(str, self._user_package_management_file_path), "r", encoding="utf-8") as f: try: management_dict = json.load(f) except (JSONDecodeError, UnicodeDecodeError): # The file got corrupted, ignore it. This happens extremely infrequently. # The file will get overridden once a user downloads something. return self._installed_package_dict = management_dict.get( "installed", {}) self._to_remove_package_set = set( management_dict.get("to_remove", [])) self._to_install_package_dict = management_dict.get( "to_install", {}) self._dismissed_packages = set( management_dict.get("dismissed", [])) Logger.log("i", "Loaded user packages management file from %s", self._user_package_management_file_path) except FileNotFoundError: Logger.log( "i", "User package management file %s doesn't exist, do nothing", self._user_package_management_file_path) return # For packages that become bundled in the new releases, but a lower version was installed previously, we need # to remove the old lower version that's installed in the user's folder. for package_id, installed_package_dict in self._installed_package_dict.items( ): bundled_package_dict = self._bundled_package_dict.get(package_id) if bundled_package_dict is None: continue should_install = self._shouldInstallCandidate( installed_package_dict["package_info"], bundled_package_dict["package_info"]) # The bundled package is newer if not should_install: self._to_remove_package_set.add(package_id) continue # Also check the to-install packages to avoid installing packages that have a lower version than the bundled # ones. to_remove_package_ids = set() for package_id, to_install_package_dict in self._to_install_package_dict.items( ): bundled_package_dict = self._bundled_package_dict.get(package_id) if bundled_package_dict is None: continue should_install = self._shouldInstallCandidate( to_install_package_dict["package_info"], bundled_package_dict["package_info"]) # The bundled package is newer if not should_install: Logger.info( "Ignoring package {} since it's sdk or package version is lower than the bundled package", package_id) to_remove_package_ids.add(package_id) continue for package_id in to_remove_package_ids: del self._to_install_package_dict[package_id]
def loadPlugin(self, plugin_id: str) -> None: # If plugin has already been loaded, do not load it again: if plugin_id in self._plugins: Logger.log("w", "Plugin %s was already loaded", plugin_id) return # Find the actual plugin on drive, do security checks if necessary: plugin = self._findPlugin(plugin_id) # If not found, raise error: if not plugin: raise PluginNotFoundError(plugin_id) # If found, but isn't in the metadata dictionary, add it: if plugin_id not in self._metadata: try: self._populateMetaData(plugin_id) except InvalidMetaDataError: return # Do not load plugin that has been disabled if plugin_id in self._disabled_plugins: Logger.log("i", "Plugin [%s] has been disabled. Skip loading it.", plugin_id) return # If API version is incompatible, don't load it. supported_sdk_versions = self._metadata[plugin_id].get( "plugin", {}).get("supported_sdk_versions", [Version("0")]) is_plugin_supported = False for supported_sdk_version in supported_sdk_versions: is_plugin_supported |= self.isPluginApiVersionCompatible( supported_sdk_version) if is_plugin_supported: break if not is_plugin_supported: Logger.log( "w", "Plugin [%s] with supported sdk versions [%s] is incompatible with the current sdk version [%s].", plugin_id, [str(version) for version in supported_sdk_versions], self._api_version) self._outdated_plugins.append(plugin_id) return try: to_register = plugin.register( self._application ) # type: ignore # We catch AttributeError on this in case register() doesn't exist. if not to_register: Logger.log("w", "Plugin %s did not return any objects to register", plugin_id) return for plugin_type, plugin_object in to_register.items(): if type(plugin_object) == list: for metadata_index, nested_plugin_object in enumerate( plugin_object): nested_plugin_object.setVersion( self._metadata[plugin_id].get("plugin", {}).get("version")) all_metadata = self._metadata[plugin_id].get( plugin_type, []) try: nested_plugin_object.setMetaData( all_metadata[metadata_index]) except IndexError: nested_plugin_object.setMetaData({}) self._addPluginObject(nested_plugin_object, plugin_id, plugin_type) else: plugin_object.setVersion(self._metadata[plugin_id].get( "plugin", {}).get("version")) metadata = self._metadata[plugin_id].get(plugin_type, {}) if type(metadata) == list: try: metadata = metadata[0] except IndexError: metadata = {} plugin_object.setMetaData(metadata) self._addPluginObject(plugin_object, plugin_id, plugin_type) self._plugins[plugin_id] = plugin self.enablePlugin(plugin_id) Logger.info("Loaded plugin %s", plugin_id) except Exception as ex: Logger.logException("e", "Error loading plugin %s:", plugin_id)
def startSplashWindowPhase(self) -> None: super().startSplashWindowPhase() i18n_catalog = i18nCatalog("uranium") self.showSplashMessage(i18n_catalog.i18nc("@info:progress", "Initializing package manager...")) self._package_manager.initialize() signal.signal(signal.SIGINT, signal.SIG_DFL) # This is done here as a lot of plugins require a correct gl context. If you want to change the framework, # these checks need to be done in your <framework>Application.py class __init__(). self._configuration_error_message = ConfigurationErrorMessage(self, i18n_catalog.i18nc("@info:status", "Your configuration seems to be corrupt."), lifetime = 0, title = i18n_catalog.i18nc("@info:title", "Configuration errors") ) # Remove, install, and then loading plugins self.showSplashMessage(i18n_catalog.i18nc("@info:progress", "Loading plugins...")) # Remove and install the plugins that have been scheduled self._plugin_registry.initializeBeforePluginsAreLoaded() self._plugin_registry.pluginLoadStarted.connect(self._displayLoadingPluginSplashMessage) self._loadPlugins() self._plugin_registry.pluginLoadStarted.disconnect(self._displayLoadingPluginSplashMessage) self._plugin_registry.checkRequiredPlugins(self.getRequiredPlugins()) self.pluginsLoaded.emit() self.showSplashMessage(i18n_catalog.i18nc("@info:progress", "Updating configuration...")) with self._container_registry.lockFile(): VersionUpgradeManager.getInstance().upgrade() # Load preferences again because before we have loaded the plugins, we don't have the upgrade routine for # the preferences file. Now that we have, load the preferences file again so it can be upgraded and loaded. self.showSplashMessage(i18n_catalog.i18nc("@info:progress", "Loading preferences...")) try: preferences_filename = Resources.getPath(Resources.Preferences, self._app_name + ".cfg") with open(preferences_filename, "r", encoding = "utf-8") as f: serialized = f.read() # This performs the upgrade for Preferences self._preferences.deserialize(serialized) self._preferences.setValue("general/plugins_to_remove", "") self._preferences.writeToFile(preferences_filename) except (EnvironmentError, UnicodeDecodeError): Logger.log("i", "The preferences file cannot be opened or it is corrupted, so we will use default values") self.processEvents() # Force the configuration file to be written again since the list of plugins to remove maybe changed try: self.readPreferencesFromConfiguration() except FileNotFoundError: Logger.log("i", "The preferences file '%s' cannot be found, will use default values", self._preferences_filename) self._preferences_filename = Resources.getStoragePath(Resources.Preferences, self._app_name + ".cfg") Logger.info("Completed loading preferences.") # FIXME: This is done here because we now use "plugins.json" to manage plugins instead of the Preferences file, # but the PluginRegistry will still import data from the Preferences files if present, such as disabled plugins, # so we need to reset those values AFTER the Preferences file is loaded. self._plugin_registry.initializeAfterPluginsAreLoaded() # Check if we have just updated from an older version self._preferences.addPreference("general/last_run_version", "") last_run_version_str = self._preferences.getValue("general/last_run_version") if not last_run_version_str: last_run_version_str = self._version last_run_version = Version(last_run_version_str) current_version = Version(self._version) if last_run_version < current_version: self._just_updated_from_old_version = True self._preferences.setValue("general/last_run_version", str(current_version)) self._preferences.writeToFile(self._preferences_filename) # Preferences: recent files self._preferences.addPreference("%s/recent_files" % self._app_name, "") file_names = self._preferences.getValue("%s/recent_files" % self._app_name).split(";") for file_name in file_names: if not os.path.isfile(file_name): continue self._recent_files.append(QUrl.fromLocalFile(file_name)) if not self.getIsHeadLess(): # Initialize System tray icon and make it invisible because it is used only to show pop up messages self._tray_icon = None if self._tray_icon_name: try: self._tray_icon = QIcon(Resources.getPath(Resources.Images, self._tray_icon_name)) self._tray_icon_widget = QSystemTrayIcon(self._tray_icon) self._tray_icon_widget.setVisible(False) Logger.info("Created system tray icon.") except FileNotFoundError: Logger.log("w", "Could not find the icon %s", self._tray_icon_name)
def loadPlugin(self, plugin_id: str) -> None: # If plugin has already been loaded, do not load it again: if plugin_id in self._plugins: Logger.log("w", "Plugin %s was already loaded", plugin_id) return # Find the actual plugin on drive, do security checks if necessary: plugin = self._findPlugin(plugin_id) # If not found, raise error: if not plugin: raise PluginNotFoundError(plugin_id) # If found, but isn't in the metadata dictionary, add it: if plugin_id not in self._metadata: try: self._populateMetaData(plugin_id) except InvalidMetaDataError: return # Do not load plugin that has been disabled if plugin_id in self._disabled_plugins: Logger.log("i", "Plugin [%s] has been disabled. Skip loading it.", plugin_id) return # If API version is incompatible, don't load it. supported_sdk_versions = self._metadata[plugin_id].get( "plugin", {}).get("supported_sdk_versions", [Version("0")]) is_plugin_supported = False for supported_sdk_version in supported_sdk_versions: is_plugin_supported |= self.isPluginApiVersionCompatible( supported_sdk_version) if is_plugin_supported: break if not is_plugin_supported: Logger.log( "w", "Plugin [%s] with supported sdk versions [%s] is incompatible with the current sdk version [%s].", plugin_id, [str(version) for version in supported_sdk_versions], self._api_version) self._outdated_plugins.append(plugin_id) return try: to_register = plugin.register( self._application ) # type: ignore # We catch AttributeError on this in case register() doesn't exist. if not to_register: Logger.log("w", "Plugin %s did not return any objects to register", plugin_id) return for plugin_type, plugin_object in to_register.items(): if type(plugin_object) == list: for metadata_index, nested_plugin_object in enumerate( plugin_object): nested_plugin_object.setVersion( self._metadata[plugin_id].get("plugin", {}).get("version")) all_metadata = self._metadata[plugin_id].get( plugin_type, []) try: nested_plugin_object.setMetaData( all_metadata[metadata_index]) except IndexError: nested_plugin_object.setMetaData({}) self._addPluginObject(nested_plugin_object, plugin_id, plugin_type) else: plugin_object.setVersion(self._metadata[plugin_id].get( "plugin", {}).get("version")) metadata = self._metadata[plugin_id].get(plugin_type, {}) if type(metadata) == list: try: metadata = metadata[0] except IndexError: metadata = {} plugin_object.setMetaData(metadata) self._addPluginObject(plugin_object, plugin_id, plugin_type) self._plugins[plugin_id] = plugin self.enablePlugin(plugin_id) Logger.info("Loaded plugin %s", plugin_id) except Exception: message_text = i18n_catalog.i18nc( "@error", "The plugin {} could not be loaded. Re-installing the plugin might solve " "the issue", plugin_id) unable_to_load_plugin_message = Message( text=message_text, message_type=Message.MessageType.ERROR) unable_to_load_plugin_message.addAction( "remove", name=i18n_catalog.i18nc("@action:button", "Remove plugin"), icon="", description="Remove the plugin", button_align=Message.ActionButtonAlignment.ALIGN_RIGHT) # Listen for the pyqt signal, since that one does support lambda's unable_to_load_plugin_message.pyQtActionTriggered.connect( lambda message, action: (self.uninstallPlugin(plugin_id), message.hide())) unable_to_load_plugin_message.show() Logger.logException("e", "Error loading plugin %s:", plugin_id)
def store(cls, path: str, path_id: str, version: Version = Version("1.0.0"), move_file: bool = True) -> None: """ Store a new item (file or directory) into the central file storage. This item will get moved to a storage location that is not specific to this version of the application. If the item already exists, this will check if it's the same item. If the item is not the same, it raises a `FileExistsError`. If the item is the same, no error is raised and the item to store is simply deleted. It is a duplicate of the item already stored. Note that this function SHOULD NOT be called by plugins themselves. The central_storage.json should be used instead! :param path: The path to the item (file or directory) to store in the central file storage. :param path_id: A name for the item (file or directory) to store. :param version: A version number for the item (file or directory). :param move_file: Should the file be moved at all or just remembered for later retrieval :raises FileExistsError: There is already a centrally stored item (file or directory) with that name and version, but it's different. """ if not move_file: full_identifier = path_id + str(version) if full_identifier not in cls._unmoved_files: cls._unmoved_files[full_identifier] = path return if not os.path.exists(cls.getCentralStorageLocation()): os.makedirs(cls.getCentralStorageLocation()) if not os.path.exists(path): Logger.debug( f"{path_id} {str(version)} was already stored centrally or the provided path is not correct" ) return storage_path = cls._getItemPath(path_id, version) if os.path.exists( storage_path): # File already exists. Check if it's the same. if os.path.getsize(path) != os.path.getsize( storage_path ): # As quick check if files are the same, check their file sizes. raise FileExistsError( f"Central file storage already has an item (file or directory) with ID {path_id} and version {str(version)}, but it's different." ) new_item_hash = cls._hashItem(path) stored_item_hash = cls._hashItem(storage_path) if new_item_hash != stored_item_hash: raise FileExistsError( f"Central file storage already has an item (file or directory) with ID {path_id} and version {str(version)}, but it's different." ) if os.path.isfile(path): os.remove(path) elif os.path.isdir(path): shutil.rmtree(path) Logger.info( f"{path_id} {str(version)} was already stored centrally. Removing duplicate." ) else: shutil.move(path, storage_path) Logger.info(f"Storing new item {path_id}.{str(version)}.")
def load(self, path: str, is_first_call: bool = True) -> None: if path == self._path: return theme_full_path = os.path.join(path, "theme.json") Logger.log( "d", "Loading theme file: {theme_full_path}".format( theme_full_path=theme_full_path)) try: with open(theme_full_path, encoding="utf-8") as f: data = json.load(f) except EnvironmentError as e: Logger.error( "Unable to load theme file at {theme_full_path}: {err}".format( theme_full_path=theme_full_path, err=e)) return except UnicodeDecodeError: Logger.error( "Theme file at {theme_full_path} is corrupt (invalid UTF-8 bytes)." .format(theme_full_path=theme_full_path)) return except json.JSONDecodeError: Logger.error( "Theme file at {theme_full_path} is corrupt (invalid JSON syntax)." .format(theme_full_path=theme_full_path)) return # Iteratively load inherited themes try: theme_id = data["metadata"]["inherits"] self.load(Resources.getPath(Resources.Themes, theme_id), is_first_call=False) except FileNotFoundError: Logger.log("e", "Could not find inherited theme %s", theme_id) except KeyError: pass # No metadata or no inherits keyword in the theme.json file if "colors" in data: for name, value in data["colors"].items(): if not is_first_call and isinstance(value, str): # Keep parent theme string colors as strings and parse later self._colors[name] = value continue if isinstance(value, str) and is_first_call: # value is reference to base_colors color name try: color = data["base_colors"][value] except IndexError: Logger.log( "w", "Colour {value} could not be found in base_colors". format(value=value)) continue else: color = value try: c = QColor(color[0], color[1], color[2], color[3]) except IndexError: # Color doesn't have enough components. Logger.log( "w", "Colour {name} doesn't have enough components. Need to have 4, but had {num_components}." .format(name=name, num_components=len(color))) continue # Skip this one then. self._colors[name] = c if "base_colors" in data: for name, color in data["base_colors"].items(): try: c = QColor(color[0], color[1], color[2], color[3]) except IndexError: # Color doesn't have enough components. Logger.log( "w", "Colour {name} doesn't have enough components. Need to have 4, but had {num_components}." .format(name=name, num_components=len(color))) continue # Skip this one then. self._colors[name] = c if is_first_call and self._colors: #Convert all string value colors to their referenced color for name, color in self._colors.items(): if isinstance(color, str): try: c = self._colors[color] self._colors[name] = c except: Logger.log( "w", "Colour {name} {color} does".format(name=name, color=color)) fonts_dir = os.path.join(path, "fonts") if os.path.isdir(fonts_dir): for root, dirnames, filenames in os.walk(fonts_dir): for filename in filenames: if filename.lower().endswith(".ttf"): QFontDatabase.addApplicationFont( os.path.join(root, filename)) if "fonts" in data: system_font_size = QCoreApplication.instance().font().pointSize() for name, font in data["fonts"].items(): q_font = QFont() q_font.setFamily( font.get("family", QCoreApplication.instance().font().family())) if font.get("bold"): q_font.setBold(font.get("bold", False)) else: q_font.setWeight(font.get("weight", 500)) q_font.setLetterSpacing(QFont.SpacingType.AbsoluteSpacing, font.get("letterSpacing", 0)) q_font.setItalic(font.get("italic", False)) q_font.setPointSize(int( font.get("size", 1) * system_font_size)) q_font.setCapitalization(QFont.Capitalization.AllUppercase if font.get("capitalize", False) else QFont.Capitalization.MixedCase) self._fonts[name] = q_font if "sizes" in data: for name, size in data["sizes"].items(): s = QSizeF() s.setWidth(round(size[0] * self._em_width)) s.setHeight(round(size[1] * self._em_height)) self._sizes[name] = s iconsdir = os.path.join(path, "icons") if os.path.isdir(iconsdir): try: for base_path, _, icons in os.walk(iconsdir): detail_level = base_path.split(os.sep)[-1] if detail_level not in self._icons: self._icons[detail_level] = {} for icon in icons: name = os.path.splitext(icon)[0] self._icons[detail_level][name] = QUrl.fromLocalFile( os.path.join(base_path, icon)) except EnvironmentError as err: # Exception when calling os.walk, e.g. no access rights. Logger.error( f"Can't access icons of theme ({iconsdir}): {err}") # Won't get any icons then. Images will show as black squares. deprecated_icons_file = os.path.join(iconsdir, "deprecated_icons.json") if os.path.isfile(deprecated_icons_file): try: with open(deprecated_icons_file, encoding="utf-8") as f: data = json.load(f) for icon in data: self._deprecated_icons[icon] = data[icon] except (UnicodeDecodeError, json.decoder.JSONDecodeError, EnvironmentError): Logger.logException( "w", "Could not parse deprecated icons list %s", deprecated_icons_file) imagesdir = os.path.join(path, "images") if os.path.isdir(imagesdir): try: for image in os.listdir(imagesdir): name = os.path.splitext(image)[0] self._images[name] = QUrl.fromLocalFile( os.path.join(imagesdir, image)) except EnvironmentError as err: # Exception when calling os.listdir, e.g. no access rights. Logger.error( f"Can't access image of theme ({imagesdir}): {err}") # Won't get any images then. They will show as black squares. Logger.log("d", "Loaded theme %s", path) Logger.info(f"System's em size is {self._em_height}px.") self._path = path # only emit the theme loaded signal once after all the themes in the inheritance chain have been loaded if is_first_call: self.themeLoaded.emit()
def exportContainer( self, container_id: str, file_type: str, file_url_or_string: Union[QUrl, str]) -> Dict[str, str]: """Export a container to a file :param container_id: The ID of the container to export :param file_type: The type of file to save as. Should be in the form of "description (*.extension, *.ext)" :param file_url_or_string: The URL where to save the file. :return: A dictionary containing a key "status" with a status code and a key "message" with a message explaining the status. The status code can be one of "error", "cancelled", "success" """ if not container_id or not file_type or not file_url_or_string: return {"status": "error", "message": "Invalid arguments"} if isinstance(file_url_or_string, QUrl): file_url = file_url_or_string.toLocalFile() else: file_url = file_url_or_string if not file_url: return {"status": "error", "message": "Invalid path"} if file_type not in self._container_name_filters: try: mime_type = MimeTypeDatabase.getMimeTypeForFile(file_url) except MimeTypeNotFoundError: return {"status": "error", "message": "Unknown File Type"} else: mime_type = self._container_name_filters[file_type]["mime"] containers = cura.CuraApplication.CuraApplication.getInstance( ).getContainerRegistry().findContainers(id=container_id) if not containers: return {"status": "error", "message": "Container not found"} container = containers[0] if Platform.isOSX() and "." in file_url: file_url = file_url[:file_url.rfind(".")] for suffix in mime_type.suffixes: if file_url.endswith(suffix): break else: file_url += "." + mime_type.preferredSuffix if not Platform.isWindows(): if os.path.exists(file_url): result = QMessageBox.question( None, catalog.i18nc("@title:window", "File Already Exists"), catalog.i18nc( "@label Don't translate the XML tag <filename>!", "The file <filename>{0}</filename> already exists. Are you sure you want to overwrite it?" ).format(file_url)) if result == QMessageBox.No: return {"status": "cancelled", "message": "User cancelled"} try: contents = container.serialize() except NotImplementedError: return { "status": "error", "message": "Unable to serialize container" } if contents is None: return { "status": "error", "message": "Serialization returned None. Unable to write to file" } try: with SaveFile(file_url, "w") as f: f.write(contents) except OSError: return { "status": "error", "message": "Unable to write to this location.", "path": file_url } Logger.info( "Successfully exported container to {path}".format(path=file_url)) return { "status": "success", "message": "Successfully exported container", "path": file_url }
def importMaterialContainer( self, file_url_or_string: Union[QUrl, str]) -> Dict[str, str]: """Imports a profile from a file :param file_url: A URL that points to the file to import. :return: :type{Dict} dict with a 'status' key containing the string 'success' or 'error', and a 'message' key containing a message for the user """ if not file_url_or_string: return {"status": "error", "message": "Invalid path"} if isinstance(file_url_or_string, QUrl): file_url = file_url_or_string.toLocalFile() else: file_url = file_url_or_string Logger.info(f"Importing material from {file_url}") if not file_url or not os.path.exists(file_url): return {"status": "error", "message": "Invalid path"} try: mime_type = MimeTypeDatabase.getMimeTypeForFile(file_url) except MimeTypeNotFoundError: return { "status": "error", "message": "Could not determine mime type of file" } container_registry = cura.CuraApplication.CuraApplication.getInstance( ).getContainerRegistry() container_type = container_registry.getContainerForMimeType(mime_type) if not container_type: return { "status": "error", "message": "Could not find a container to handle the specified file." } if not issubclass(container_type, InstanceContainer): return { "status": "error", "message": "This is not a material container, but another type of file." } container_id = urllib.parse.unquote_plus( mime_type.stripExtension(os.path.basename(file_url))) container_id = container_registry.uniqueName(container_id) container = container_type(container_id) try: with open(file_url, "rt", encoding="utf-8") as f: container.deserialize(f.read(), file_url) except PermissionError: return { "status": "error", "message": "Permission denied when trying to read the file." } except ContainerFormatError: return { "status": "error", "Message": "The material file appears to be corrupt." } except Exception as ex: return {"status": "error", "message": str(ex)} container.setDirty(True) container_registry.addContainer(container) return { "status": "success", "message": "Successfully imported container {0}".format(container.getName()) }